diff --git a/linebridge b/linebridge
index ff38d45b..57d8b4be 160000
--- a/linebridge
+++ b/linebridge
@@ -1 +1 @@
-Subproject commit ff38d45b9686ccbd2e902477bde4cd7eb7d251e8
+Subproject commit 57d8b4bed14b0b35d1d9753847ac39710e0d9be5
diff --git a/packages/app/package.json b/packages/app/package.json
index 30edaa89..8da89a79 100755
--- a/packages/app/package.json
+++ b/packages/app/package.json
@@ -1,6 +1,6 @@
{
"name": "@comty/app",
- "version": "1.37.1@alpha",
+ "version": "1.38.0@alpha",
"license": "ComtyLicense",
"main": "electron/main",
"type": "module",
@@ -34,7 +34,7 @@
"bear-react-carousel": "^4.0.10-alpha.0",
"classnames": "2.3.1",
"comty.js": "^0.63.1",
- "dashjs": "^4.7.4",
+ "dashjs": "^5.0.0",
"dompurify": "^3.0.0",
"fast-average-color": "^9.2.0",
"fuse.js": "6.5.3",
@@ -49,6 +49,7 @@
"moment": "2.29.4",
"motion": "^12.4.2",
"mpegts.js": "^1.6.10",
+ "music-metadata": "^11.2.1",
"plyr": "^3.7.8",
"prop-types": "^15.8.1",
"qs": "^6.14.0",
diff --git a/packages/app/src/classes/ChunkedUpload/index.js b/packages/app/src/classes/ChunkedUpload/index.js
index 6d6b3441..5e93c5ab 100644
--- a/packages/app/src/classes/ChunkedUpload/index.js
+++ b/packages/app/src/classes/ChunkedUpload/index.js
@@ -170,7 +170,7 @@ export default class ChunkedUpload {
// check if is the last chunk, if so, handle sse events
if (this.chunkCount === this.totalChunks) {
- if (data.sseChannelId || data.eventChannelURL) {
+ if (data.sseChannelId || data.sseUrl) {
this.waitOnSSE(data)
} else {
this.events.emit("finish", data)
@@ -178,9 +178,8 @@ export default class ChunkedUpload {
}
this.events.emit("progress", {
- percentProgress: Math.round(
- (100 / this.totalChunks) * this.chunkCount,
- ),
+ percent: Math.round((100 / this.totalChunks) * this.chunkCount),
+ state: "Uploading",
})
} catch (error) {
this.events.emit("error", error)
@@ -196,12 +195,9 @@ export default class ChunkedUpload {
}
waitOnSSE(data) {
- console.log(
- `[UPLOADER] Connecting to SSE channel >`,
- data.eventChannelURL,
- )
+ console.log(`[UPLOADER] Connecting to SSE channel >`, data.sseUrl)
- const eventSource = new EventSource(data.eventChannelURL)
+ const eventSource = new EventSource(data.sseUrl)
eventSource.onerror = (error) => {
this.events.emit("error", error)
@@ -218,19 +214,20 @@ export default class ChunkedUpload {
console.log(`[UPLOADER] SSE Event >`, messageData)
- if (messageData.status === "done") {
+ if (messageData.event === "done") {
this.events.emit("finish", messageData.result)
eventSource.close()
}
- if (messageData.status === "error") {
+ if (messageData.event === "error") {
this.events.emit("error", messageData.result)
eventSource.close()
}
- if (messageData.status === "progress") {
+ if (messageData.state) {
this.events.emit("progress", {
- percentProgress: messageData.progress,
+ percent: messageData.percent,
+ state: messageData.state,
})
}
}
diff --git a/packages/app/src/components/CoverEditor/index.jsx b/packages/app/src/components/CoverEditor/index.jsx
index b86b8c4e..38d22cbe 100644
--- a/packages/app/src/components/CoverEditor/index.jsx
+++ b/packages/app/src/components/CoverEditor/index.jsx
@@ -7,55 +7,62 @@ import UploadButton from "@components/UploadButton"
import "./index.less"
const CoverEditor = (props) => {
- const { value, onChange, defaultUrl } = props
+ const { value, onChange, defaultUrl } = props
- const [init, setInit] = React.useState(true)
- const [url, setUrl] = React.useState(value)
+ const [init, setInit] = React.useState(true)
+ const [url, setUrl] = React.useState(value)
- React.useEffect(() => {
- if (!init) {
- onChange(url)
- }
- }, [url])
+ React.useEffect(() => {
+ if (!init) {
+ onChange(url)
+ }
+ }, [url])
- React.useEffect(() => {
- if (!value) {
- setUrl(defaultUrl)
- } else {
- setUrl(value)
- }
+ React.useEffect(() => {
+ if (!value) {
+ setUrl(defaultUrl)
+ } else {
+ setUrl(value)
+ }
- setInit(false)
- }, [])
+ setInit(false)
+ }, [])
- return
-
-
-
+ // Handle when value prop change
+ React.useEffect(() => {
+ if (!value) {
+ setUrl(defaultUrl)
+ } else {
+ setUrl(value)
+ }
+ }, [value])
-
-
{
- setUrl(response.url)
- }}
- />
+ return (
+
+
+
+
-
{
- setUrl(defaultUrl)
- }}
- >
- Reset
-
+
+ {
+ setUrl(response.url)
+ }}
+ />
- {
- props.extraActions
- }
-
-
+ {
+ setUrl(defaultUrl)
+ }}
+ >
+ Reset
+
+
+ {props.extraActions}
+
+
+ )
}
export default CoverEditor
diff --git a/packages/app/src/components/Music/PlaylistView/index.jsx b/packages/app/src/components/Music/PlaylistView/index.jsx
index 096c81cf..a6cd9b20 100755
--- a/packages/app/src/components/Music/PlaylistView/index.jsx
+++ b/packages/app/src/components/Music/PlaylistView/index.jsx
@@ -392,9 +392,7 @@ const PlaylistView = (props) => {
key={item._id}
order={item._id}
track={item}
- onClickPlayBtn={() =>
- handleOnClickTrack(item)
- }
+ onPlay={() => handleOnClickTrack(item)}
changeState={(update) =>
handleTrackChangeState(
item._id,
@@ -418,7 +416,7 @@ const PlaylistView = (props) => {
+ onPlay={() =>
handleOnClickTrack(item)
}
changeState={(update) =>
diff --git a/packages/app/src/components/Music/Track/index.jsx b/packages/app/src/components/Music/Track/index.jsx
index 5152704d..e185eb4b 100755
--- a/packages/app/src/components/Music/Track/index.jsx
+++ b/packages/app/src/components/Music/Track/index.jsx
@@ -52,6 +52,10 @@ const Track = (props) => {
const isPlaying = isCurrent && playback_status === "playing"
const handleClickPlayBtn = React.useCallback(() => {
+ if (typeof props.onPlay === "function") {
+ return props.onPlay(props.track)
+ }
+
if (typeof props.onClickPlayBtn === "function") {
props.onClickPlayBtn(props.track)
}
diff --git a/packages/app/src/components/MusicStudio/EnhancedLyricsEditor/components/VideoEditor/index.jsx b/packages/app/src/components/MusicStudio/EnhancedLyricsEditor/components/VideoEditor/index.jsx
index f8907f5f..52020552 100644
--- a/packages/app/src/components/MusicStudio/EnhancedLyricsEditor/components/VideoEditor/index.jsx
+++ b/packages/app/src/components/MusicStudio/EnhancedLyricsEditor/components/VideoEditor/index.jsx
@@ -12,95 +12,96 @@ import "./index.less"
dayjs.extend(customParseFormat)
const VideoEditor = (props) => {
- function handleChange(key, value) {
- if (typeof props.onChange !== "function") {
- return false
- }
+ function handleChange(key, value) {
+ if (typeof props.onChange !== "function") {
+ return false
+ }
- props.onChange(key, value)
- }
+ props.onChange(key, value)
+ }
- return
-
-
- Video
-
+ return (
+
+
+
+ Video
+
- {
- (!props.videoSourceURL) &&
}
- description="No video"
- />
- }
+ {!props.videoSourceURL && (
+
}
+ description="No video"
+ />
+ )}
- {
- props.videoSourceURL &&
-
-
- }
+ {props.videoSourceURL && (
+
+
+
+ )}
-
-
-
-
- Start video sync at
-
+
+
+
+
+ Start video sync at
+
- {props.startSyncAt ?? "not set"}
-
+
{props.startSyncAt ?? "not set"}
+
-
-
Set to:
+
+
Set to:
-
{
- handleChange("startSyncAt", str)
- }}
- />
-
-
+
{
+ handleChange("startSyncAt", str)
+ }}
+ />
+
+
-
-
{
- handleChange("videoSourceURL", response.url)
- }}
- accept={[
- "video/*",
- ]}
- headers={{
- "transmux": "mq-hls",
- }}
- disabled={props.loading}
- >
- Upload video
-
-
- or
-
-
{
- handleChange("videoSourceURL", e.target.value)
- }}
- value={props.videoSourceURL}
- disabled={props.loading}
- />
-
-
+
+
{
+ handleChange("videoSourceURL", response.url)
+ }}
+ accept={["video/*"]}
+ headers={{
+ transformations: "mq-hls",
+ }}
+ disabled={props.loading}
+ >
+ Upload video
+
+ or
+
{
+ handleChange("videoSourceURL", e.target.value)
+ }}
+ value={props.videoSourceURL}
+ disabled={props.loading}
+ />
+
+
+ )
}
-export default VideoEditor
\ No newline at end of file
+export default VideoEditor
diff --git a/packages/app/src/components/MusicStudio/ReleaseEditor/index.jsx b/packages/app/src/components/MusicStudio/ReleaseEditor/index.jsx
index 2e5d604c..1aa38e05 100644
--- a/packages/app/src/components/MusicStudio/ReleaseEditor/index.jsx
+++ b/packages/app/src/components/MusicStudio/ReleaseEditor/index.jsx
@@ -1,280 +1,332 @@
import React from "react"
import * as antd from "antd"
-
import { Icons, createIconRender } from "@components/Icons"
import MusicModel from "@models/music"
-
+import compareObjectsByProperties from "@utils/compareObjectsByProperties"
import useUrlQueryActiveKey from "@hooks/useUrlQueryActiveKey"
import TrackManifest from "@cores/player/classes/TrackManifest"
-import { DefaultReleaseEditorState, ReleaseEditorStateContext } from "@contexts/MusicReleaseEditor"
+import {
+ DefaultReleaseEditorState,
+ ReleaseEditorStateContext,
+} from "@contexts/MusicReleaseEditor"
import Tabs from "./tabs"
import "./index.less"
const ReleaseEditor = (props) => {
- const { release_id } = props
+ const { release_id } = props
- const basicInfoRef = React.useRef()
+ const basicInfoRef = React.useRef()
- const [submitting, setSubmitting] = React.useState(false)
- const [loading, setLoading] = React.useState(true)
- const [submitError, setSubmitError] = React.useState(null)
+ const [submitting, setSubmitting] = React.useState(false)
+ const [loading, setLoading] = React.useState(true)
+ const [submitError, setSubmitError] = React.useState(null)
- const [loadError, setLoadError] = React.useState(null)
- const [globalState, setGlobalState] = React.useState(DefaultReleaseEditorState)
+ const [loadError, setLoadError] = React.useState(null)
+ const [globalState, setGlobalState] = React.useState(
+ DefaultReleaseEditorState,
+ )
+ const [initialValues, setInitialValues] = React.useState({})
- const [customPage, setCustomPage] = React.useState(null)
- const [customPageActions, setCustomPageActions] = React.useState([])
+ const [customPage, setCustomPage] = React.useState(null)
+ const [customPageActions, setCustomPageActions] = React.useState([])
- const [selectedTab, setSelectedTab] = useUrlQueryActiveKey({
- defaultKey: "info",
- queryKey: "tab"
- })
+ const [selectedTab, setSelectedTab] = useUrlQueryActiveKey({
+ defaultKey: "info",
+ queryKey: "tab",
+ })
- async function initialize() {
- setLoading(true)
- setLoadError(null)
+ async function initialize() {
+ setLoading(true)
+ setLoadError(null)
- if (release_id !== "new") {
- try {
- let releaseData = await MusicModel.getReleaseData(release_id)
+ if (release_id !== "new") {
+ try {
+ let releaseData = await MusicModel.getReleaseData(release_id)
- if (Array.isArray(releaseData.list)) {
- releaseData.list = releaseData.list.map((item) => {
- return new TrackManifest(item)
- })
- }
+ if (Array.isArray(releaseData.items)) {
+ releaseData.items = releaseData.items.map((item) => {
+ return new TrackManifest(item)
+ })
+ }
- setGlobalState({
- ...globalState,
- ...releaseData,
- })
- } catch (error) {
- setLoadError(error)
- }
- }
+ setGlobalState({
+ ...globalState,
+ ...releaseData,
+ })
- setLoading(false)
- }
+ setInitialValues(releaseData)
+ } catch (error) {
+ setLoadError(error)
+ }
+ }
- async function renderCustomPage(page, actions) {
- setCustomPage(page ?? null)
- setCustomPageActions(actions ?? [])
- }
+ setLoading(false)
+ }
- async function handleSubmit() {
- setSubmitting(true)
- setSubmitError(null)
+ function hasChanges() {
+ const stagedChanges = {
+ title: globalState.title,
+ type: globalState.type,
+ public: globalState.public,
+ cover: globalState.cover,
+ items: globalState.items,
+ }
- try {
- // first sumbit tracks
- const tracks = await MusicModel.putTrack({
- list: globalState.list,
- })
+ return !compareObjectsByProperties(
+ stagedChanges,
+ initialValues,
+ Object.keys(stagedChanges),
+ )
+ }
- // then submit release
- const result = await MusicModel.putRelease({
- _id: globalState._id,
- title: globalState.title,
- description: globalState.description,
- public: globalState.public,
- cover: globalState.cover,
- explicit: globalState.explicit,
- type: globalState.type,
- list: tracks.list.map((item) => item._id),
- })
+ async function renderCustomPage(page, actions) {
+ setCustomPage(page ?? null)
+ setCustomPageActions(actions ?? [])
+ }
- app.location.push(`/studio/music/${result._id}`)
- } catch (error) {
- console.error(error)
- app.message.error(error.message)
+ async function handleSubmit() {
+ setSubmitting(true)
+ setSubmitError(null)
- setSubmitError(error)
- setSubmitting(false)
+ try {
+ console.log("Submitting Tracks")
- return false
- }
+ // first sumbit tracks
+ const tracks = await MusicModel.putTrack({
+ items: globalState.items,
+ })
- setSubmitting(false)
- app.message.success("Release saved")
- }
+ console.log("Submitting release")
- async function handleDelete() {
- app.layout.modal.confirm({
- headerText: "Are you sure you want to delete this release?",
- descriptionText: "This action cannot be undone.",
- onConfirm: async () => {
- await MusicModel.deleteRelease(globalState._id)
- app.location.push(window.location.pathname.split("/").slice(0, -1).join("/"))
- },
- })
- }
+ // then submit release
+ const result = await MusicModel.putRelease({
+ _id: globalState._id,
+ title: globalState.title,
+ description: globalState.description,
+ public: globalState.public,
+ cover: globalState.cover,
+ explicit: globalState.explicit,
+ type: globalState.type,
+ items: tracks.items.map((item) => item._id),
+ })
- async function canFinish() {
- return true
- }
+ app.location.push(`/studio/music/${result._id}`)
+ } catch (error) {
+ console.error(error)
+ app.message.error(error.message)
- React.useEffect(() => {
- initialize()
- }, [])
+ setSubmitError(error)
+ setSubmitting(false)
- if (loadError) {
- return
- }
+ return false
+ }
- if (loading) {
- return
- }
+ setSubmitting(false)
+ app.message.success("Release saved")
+ }
- const Tab = Tabs.find(({ key }) => key === selectedTab)
+ async function handleDelete() {
+ app.layout.modal.confirm({
+ headerText: "Are you sure you want to delete this release?",
+ descriptionText: "This action cannot be undone.",
+ onConfirm: async () => {
+ await MusicModel.deleteRelease(globalState._id)
+ app.location.push(
+ window.location.pathname.split("/").slice(0, -1).join("/"),
+ )
+ },
+ })
+ }
- const CustomPageProps = {
- close: () => {
- renderCustomPage(null, null)
- }
- }
+ function canFinish() {
+ return hasChanges()
+ }
- return
-
- {
- customPage &&
- {
- customPage.header &&
-
-
}
- onClick={() => renderCustomPage(null, null)}
- />
+ React.useEffect(() => {
+ initialize()
+ }, [])
-
{customPage.header}
-
+ if (loadError) {
+ return (
+
+ )
+ }
- {
- Array.isArray(customPageActions) && customPageActions.map((action, index) => {
- return
{
- if (typeof action.onClick === "function") {
- await action.onClick()
- }
+ if (loading) {
+ return
+ }
- if (action.fireEvent) {
- app.eventBus.emit(action.fireEvent)
- }
- }}
- disabled={action.disabled}
- >
- {action.label}
-
- })
- }
-
- }
+ const Tab = Tabs.find(({ key }) => key === selectedTab)
- {
- customPage.content && (React.isValidElement(customPage.content) ?
- React.cloneElement(customPage.content, {
- ...CustomPageProps,
- ...customPage.props
- }) :
- React.createElement(customPage.content, {
- ...CustomPageProps,
- ...customPage.props
- })
- )
- }
-
- }
- {
- !customPage && <>
-
-
setSelectedTab(e.key)}
- selectedKeys={[selectedTab]}
- items={Tabs}
- mode="vertical"
- />
+ const CustomPageProps = {
+ close: () => {
+ renderCustomPage(null, null)
+ },
+ }
-
-
:
}
- disabled={submitting || loading || !canFinish()}
- loading={submitting}
- >
- {release_id !== "new" ? "Save" : "Release"}
-
+ return (
+
+
+ {customPage && (
+
+ {customPage.header && (
+
+
+
}
+ onClick={() =>
+ renderCustomPage(null, null)
+ }
+ />
- {
- release_id !== "new" ?
}
- disabled={loading}
- onClick={handleDelete}
- >
- Delete
- : null
- }
+
{customPage.header}
+
- {
- release_id !== "new" ?
}
- onClick={() => app.location.push(`/music/release/${globalState._id}`)}
- >
- Go to release
- : null
- }
-
-
+ {Array.isArray(customPageActions) &&
+ customPageActions.map((action, index) => {
+ return (
+
{
+ if (
+ typeof action.onClick ===
+ "function"
+ ) {
+ await action.onClick()
+ }
-
- {
- submitError &&
- }
- {
- !Tab &&
- }
- {
- Tab && React.createElement(Tab.render, {
- release: globalState,
+ if (action.fireEvent) {
+ app.eventBus.emit(
+ action.fireEvent,
+ )
+ }
+ }}
+ disabled={action.disabled}
+ >
+ {action.label}
+
+ )
+ })}
+
+ )}
- state: globalState,
- setState: setGlobalState,
+ {customPage.content &&
+ (React.isValidElement(customPage.content)
+ ? React.cloneElement(customPage.content, {
+ ...CustomPageProps,
+ ...customPage.props,
+ })
+ : React.createElement(customPage.content, {
+ ...CustomPageProps,
+ ...customPage.props,
+ }))}
+
+ )}
+ {!customPage && (
+ <>
+
+
setSelectedTab(e.key)}
+ selectedKeys={[selectedTab]}
+ items={Tabs}
+ mode="vertical"
+ />
- references: {
- basic: basicInfoRef
- }
- })
- }
-
- >
- }
-
-
+
+
+ ) : (
+
+ )
+ }
+ disabled={
+ submitting || loading || !canFinish()
+ }
+ loading={submitting}
+ >
+ {release_id !== "new" ? "Save" : "Release"}
+
+
+ {release_id !== "new" ? (
+
}
+ disabled={loading}
+ onClick={handleDelete}
+ >
+ Delete
+
+ ) : null}
+
+ {release_id !== "new" ? (
+
}
+ onClick={() =>
+ app.location.push(
+ `/music/release/${globalState._id}`,
+ )
+ }
+ >
+ Go to release
+
+ ) : null}
+
+
+
+
+ {submitError && (
+
+ )}
+ {!Tab && (
+
+ )}
+ {Tab &&
+ React.createElement(Tab.render, {
+ release: globalState,
+
+ state: globalState,
+ setState: setGlobalState,
+
+ references: {
+ basic: basicInfoRef,
+ },
+ })}
+
+ >
+ )}
+
+
+ )
}
-export default ReleaseEditor
\ No newline at end of file
+export default ReleaseEditor
diff --git a/packages/app/src/components/MusicStudio/ReleaseEditor/tabs/Tracks/components/TrackListItem/index.jsx b/packages/app/src/components/MusicStudio/ReleaseEditor/tabs/Tracks/components/TrackListItem/index.jsx
index 38a6c0ef..23f4fcec 100644
--- a/packages/app/src/components/MusicStudio/ReleaseEditor/tabs/Tracks/components/TrackListItem/index.jsx
+++ b/packages/app/src/components/MusicStudio/ReleaseEditor/tabs/Tracks/components/TrackListItem/index.jsx
@@ -11,13 +11,27 @@ import { ReleaseEditorStateContext } from "@contexts/MusicReleaseEditor"
import "./index.less"
+const stateToString = {
+ uploading: "Uploading",
+ transmuxing: "Processing...",
+ uploading_s3: "Archiving...",
+}
+
+const getTitleString = ({ track, progress }) => {
+ if (progress) {
+ return stateToString[progress.state] || progress.state
+ }
+
+ return track.title
+}
+
const TrackListItem = (props) => {
const context = React.useContext(ReleaseEditorStateContext)
const [loading, setLoading] = React.useState(false)
const [error, setError] = React.useState(null)
- const { track } = props
+ const { track, progress } = props
async function onClickEditTrack() {
context.renderCustomPage({
@@ -33,8 +47,6 @@ const TrackListItem = (props) => {
props.onDelete(track.uid)
}
- console.log("render")
-
return (
{
@@ -58,7 +70,7 @@ const TrackListItem = (props) => {
{props.index + 1}
- {props.uploading.working && }
+ {progress !== null && }
{
}}
/>
- {track.title}
+ {getTitleString({ track, progress })}
{
- if (prevState.list !== this.state.list) {
+ if (prevState.items !== this.state.items) {
if (typeof this.props.onChangeState === "function") {
this.props.onChangeState(this.state)
}
@@ -55,7 +55,7 @@ class TracksManager extends React.Component {
return false
}
- return this.state.list.find((item) => item.uid === uid)
+ return this.state.items.find((item) => item.uid === uid)
}
addTrackToList = (track) => {
@@ -64,7 +64,7 @@ class TracksManager extends React.Component {
}
this.setState({
- list: [...this.state.list, track],
+ items: [...this.state.items, track],
})
}
@@ -76,18 +76,17 @@ class TracksManager extends React.Component {
this.removeTrackUIDFromPendingUploads(uid)
this.setState({
- list: this.state.list.filter((item) => item.uid !== uid),
+ items: this.state.items.filter((item) => item.uid !== uid),
})
}
modifyTrackByUid = (uid, track) => {
- console.log("modifyTrackByUid", uid, track)
if (!uid || !track) {
return false
}
this.setState({
- list: this.state.list.map((item) => {
+ items: this.state.items.map((item) => {
if (item.uid === uid) {
return {
...item,
@@ -140,7 +139,7 @@ class TracksManager extends React.Component {
)
if (uploadProgressIndex === -1) {
- return 0
+ return null
}
return this.state.pendingUploads[uploadProgressIndex].progress
@@ -159,7 +158,7 @@ class TracksManager extends React.Component {
newData[uploadProgressIndex].progress = progress
- console.log(`Updating progress for [${uid}] to [${progress}]`)
+ console.log(`Updating progress for [${uid}] to >`, progress)
this.setState({
pendingUploads: newData,
@@ -177,8 +176,7 @@ class TracksManager extends React.Component {
const trackManifest = new TrackManifest({
uid: uid,
- file: change.file,
- onChange: this.modifyTrackByUid,
+ file: change.file.originFileObj,
})
this.addTrackToList(trackManifest)
@@ -189,7 +187,7 @@ class TracksManager extends React.Component {
// remove pending file
this.removeTrackUIDFromPendingUploads(uid)
- let trackManifest = this.state.list.find(
+ let trackManifest = this.state.items.find(
(item) => item.uid === uid,
)
@@ -206,6 +204,23 @@ class TracksManager extends React.Component {
trackManifest.source = change.file.response.url
trackManifest = await trackManifest.initialize()
+ // if has a cover, Upload
+ if (trackManifest._coverBlob) {
+ console.log(
+ `[${trackManifest.uid}] Founded cover, uploading...`,
+ )
+ const coverFile = new File(
+ [trackManifest._coverBlob],
+ "cover.jpg",
+ { type: trackManifest._coverBlob.type },
+ )
+
+ const coverUpload =
+ await app.cores.remoteStorage.uploadFile(coverFile)
+
+ trackManifest.cover = coverUpload.url
+ }
+
await this.modifyTrackByUid(uid, trackManifest)
break
@@ -231,9 +246,8 @@ class TracksManager extends React.Component {
const response = await app.cores.remoteStorage
.uploadFile(req.file, {
onProgress: this.handleTrackFileUploadProgress,
- service: "b2",
headers: {
- transmux: "a-dash",
+ transformations: "a-dash",
},
})
.catch((error) => {
@@ -258,17 +272,17 @@ class TracksManager extends React.Component {
this.setState((prev) => {
// move all list items by id
const orderedIds = orderedIdsArray.map((id) =>
- this.state.list.find((item) => item._id === id),
+ this.state.items.find((item) => item._id === id),
)
console.log("orderedIds", orderedIds)
return {
- list: orderedIds,
+ items: orderedIds,
}
})
}
render() {
- console.log(`Tracks List >`, this.state.list)
+ console.log(`Tracks List >`, this.state.items)
return (
@@ -280,7 +294,7 @@ class TracksManager extends React.Component {
accept="audio/*"
multiple
>
- {this.state.list.length === 0 ? (
+ {this.state.items.length === 0 ? (
) : (
- {this.state.list.length === 0 && (
+ {this.state.items.length === 0 && (
)}
- {this.state.list.map((track, index) => {
+ {this.state.items.map((track, index) => {
const progress = this.getUploadProgress(track.uid)
return (
@@ -310,12 +324,7 @@ class TracksManager extends React.Component {
track={track}
onEdit={this.modifyTrackByUid}
onDelete={this.removeTrackByUid}
- uploading={{
- progress: progress,
- working: this.state.pendingUploads.find(
- (item) => item.uid === track.uid,
- ),
- }}
+ progress={progress}
disabled={progress > 0}
/>
@@ -336,7 +345,7 @@ const ReleaseTracks = (props) => {
{
setState({
...state,
diff --git a/packages/app/src/components/MusicStudio/TrackEditor/index.jsx b/packages/app/src/components/MusicStudio/TrackEditor/index.jsx
index d73f48ba..9c5fab25 100644
--- a/packages/app/src/components/MusicStudio/TrackEditor/index.jsx
+++ b/packages/app/src/components/MusicStudio/TrackEditor/index.jsx
@@ -10,158 +10,163 @@ import { ReleaseEditorStateContext } from "@contexts/MusicReleaseEditor"
import "./index.less"
const TrackEditor = (props) => {
- const context = React.useContext(ReleaseEditorStateContext)
- const [track, setTrack] = React.useState(props.track ?? {})
+ const context = React.useContext(ReleaseEditorStateContext)
+ const [track, setTrack] = React.useState(props.track ?? {})
- async function handleChange(key, value) {
- setTrack((prev) => {
- return {
- ...prev,
- [key]: value
- }
- })
- }
+ async function handleChange(key, value) {
+ setTrack((prev) => {
+ return {
+ ...prev,
+ [key]: value,
+ }
+ })
+ }
- async function openEnhancedLyricsEditor() {
- context.renderCustomPage({
- header: "Enhanced Lyrics",
- content: EnhancedLyricsEditor,
- props: {
- track: track,
- }
- })
- }
+ async function openEnhancedLyricsEditor() {
+ context.renderCustomPage({
+ header: "Enhanced Lyrics",
+ content: EnhancedLyricsEditor,
+ props: {
+ track: track,
+ },
+ })
+ }
- async function handleOnSave() {
- setTrack((prev) => {
- const listData = [...context.list]
+ async function handleOnSave() {
+ setTrack((prev) => {
+ const listData = [...context.items]
- const trackIndex = listData.findIndex((item) => item.uid === prev.uid)
+ const trackIndex = listData.findIndex(
+ (item) => item.uid === prev.uid,
+ )
- if (trackIndex === -1) {
- return prev
- }
+ if (trackIndex === -1) {
+ return prev
+ }
- listData[trackIndex] = prev
+ listData[trackIndex] = prev
- context.setGlobalState({
- ...context,
- list: listData
- })
+ context.setGlobalState({
+ ...context,
+ items: listData,
+ })
- return prev
- })
- }
+ props.close()
- React.useEffect(() => {
- context.setCustomPageActions([
- {
- label: "Save",
- icon: "FiSave",
- type: "primary",
- onClick: handleOnSave,
- disabled: props.track === track,
- },
- ])
- }, [track])
+ return prev
+ })
+ }
- return
-
-
-
- Cover
-
+ function setParentCover() {
+ handleChange("cover", context.cover)
+ }
-
handleChange("cover", url)}
- extraActions={[
-
- Use Parent
-
- ]}
- />
-
+ React.useEffect(() => {
+ context.setCustomPageActions([
+ {
+ label: "Save",
+ icon: "FiSave",
+ type: "primary",
+ onClick: handleOnSave,
+ disabled: props.track === track,
+ },
+ ])
+ }, [track])
-
-
-
- Title
-
+ return (
+
+
+
+
+ Cover
+
-
handleChange("title", e.target.value)}
- />
-
+
handleChange("cover", url)}
+ extraActions={[
+
+ Use Parent
+ ,
+ ]}
+ />
+
-
-
-
- Artist
-
+
+
+
+ Title
+
-
handleChange("artist", e.target.value)}
- />
-
+
handleChange("title", e.target.value)}
+ />
+
-
-
-
- Album
-
+
+
+
+ Artist
+
-
handleChange("album", e.target.value)}
- />
-
+
handleChange("artist", e.target.value)}
+ />
+
-
-
-
- Explicit
-
+
+
+
+ Album
+
-
handleChange("explicit", value)}
- />
-
+
handleChange("album", e.target.value)}
+ />
+
-
-
-
-
Enhanced Lyrics
+
+
+
+ Explicit
+
-
handleChange("lyrics_enabled", value)}
- disabled={!track.params._id}
- />
-
+
handleChange("explicit", value)}
+ />
+
-
-
- Edit
-
+
+
+
+ Enhanced Lyrics
+
- {
- !track.params._id &&
- You cannot edit Video and Lyrics without release first
-
- }
-
-
-
+
+
+ Edit
+
+
+ {!track.params._id && (
+
+ You cannot edit Video and Lyrics without release
+ first
+
+ )}
+
+
+
+ )
}
-export default TrackEditor
\ No newline at end of file
+export default TrackEditor
diff --git a/packages/app/src/components/Player/ExtraActions/index.jsx b/packages/app/src/components/Player/Actions/index.jsx
similarity index 51%
rename from packages/app/src/components/Player/ExtraActions/index.jsx
rename to packages/app/src/components/Player/Actions/index.jsx
index b3be128b..9b873395 100755
--- a/packages/app/src/components/Player/ExtraActions/index.jsx
+++ b/packages/app/src/components/Player/Actions/index.jsx
@@ -6,41 +6,51 @@ import LikeButton from "@components/LikeButton"
import { usePlayerStateContext } from "@contexts/WithPlayerContext"
+import "./index.less"
+
const ExtraActions = (props) => {
- const [playerState] = usePlayerStateContext()
+ const [trackInstance, setTrackInstance] = React.useState({})
+
+ const onPlayerStateChange = React.useCallback((state) => {
+ const instance = app.cores.player.track()
+
+ if (instance) {
+ setTrackInstance(instance)
+ }
+ }, [])
+
+ const [playerState] = usePlayerStateContext(onPlayerStateChange)
const handleClickLike = async () => {
- if (!playerState.track_manifest) {
+ if (!trackInstance) {
console.error("Cannot like a track if nothing is playing")
return false
}
- const track = app.cores.player.track()
-
- await track.manifest.serviceOperations.toggleItemFavourite(
+ await trackInstance.manifest.serviceOperations.toggleItemFavourite(
"track",
- playerState.track_manifest._id,
+ trackInstance.manifest._id,
)
}
return (
-
+
{app.isMobile && (
}
- disabled={!playerState.track_manifest?.lyrics_enabled}
+ disabled={!trackInstance?.manifest?.lyrics_enabled}
/>
)}
{!app.isMobile && (
)}
diff --git a/packages/app/src/components/Player/Actions/index.less b/packages/app/src/components/Player/Actions/index.less
new file mode 100644
index 00000000..7ef2ef0f
--- /dev/null
+++ b/packages/app/src/components/Player/Actions/index.less
@@ -0,0 +1,19 @@
+.player-actions {
+ display: flex;
+ flex-direction: row;
+
+ align-items: center;
+ justify-content: space-between;
+
+ width: 70%;
+ margin: auto;
+
+ padding: 2px 25px;
+
+ background-color: rgba(var(--layoutBackgroundColor), 0.7);
+
+ -webkit-backdrop-filter: blur(5px);
+ backdrop-filter: blur(5px);
+
+ border-radius: 12px;
+}
diff --git a/packages/app/src/components/Player/Controls/index.jsx b/packages/app/src/components/Player/Controls/index.jsx
index 3de4533d..8d520107 100755
--- a/packages/app/src/components/Player/Controls/index.jsx
+++ b/packages/app/src/components/Player/Controls/index.jsx
@@ -47,7 +47,17 @@ const EventsHandlers = {
}
const Controls = (props) => {
- const [playerState] = usePlayerStateContext()
+ const [trackInstance, setTrackInstance] = React.useState({})
+
+ const onPlayerStateChange = React.useCallback((state) => {
+ const instance = app.cores.player.track()
+
+ if (instance) {
+ setTrackInstance(instance)
+ }
+ }, [])
+
+ const [playerState] = usePlayerStateContext(onPlayerStateChange)
const handleAction = (event, ...args) => {
if (typeof EventsHandlers[event] !== "function") {
@@ -122,10 +132,11 @@ const Controls = (props) => {
{app.isMobile && (
handleAction("like")}
+ disabled={!trackInstance?.manifest?._id}
/>
)}
diff --git a/packages/app/src/components/Player/ToolBarPlayer/index.jsx b/packages/app/src/components/Player/ToolBarPlayer/index.jsx
index fac61db7..14f671ac 100755
--- a/packages/app/src/components/Player/ToolBarPlayer/index.jsx
+++ b/packages/app/src/components/Player/ToolBarPlayer/index.jsx
@@ -8,11 +8,10 @@ import { usePlayerStateContext } from "@contexts/WithPlayerContext"
import LiveInfo from "@components/Player/LiveInfo"
import SeekBar from "@components/Player/SeekBar"
import Controls from "@components/Player/Controls"
+import Actions from "@components/Player/Actions"
import RGBStringToValues from "@utils/rgbToValues"
-import ExtraActions from "../ExtraActions"
-
import "./index.less"
function isOverflown(parent, element) {
@@ -93,7 +92,7 @@ const Player = (props) => {
}
}
- const { title, artistStr, service, cover_analysis, cover } =
+ const { title, artist, service, cover_analysis, cover } =
playerState.track_manifest ?? {}
const playing = playerState.playback_status === "playing"
@@ -201,7 +200,7 @@ const Player = (props) => {
)}
- {artistStr ?? ""}
+ {artist ?? ""}
@@ -218,7 +217,7 @@ const Player = (props) => {
streamMode={playerState.live}
/>
-
+
{
+ if (this.state.loading === true) {
+ console.warn(`Please wait to load the post before load more`)
+ return
+ }
+
this.setState({
loading: true,
})
let payload = {
- trim: this.state.list.length,
+ page: this.state.pageCount,
limit: app.cores.settings.get("feed_max_fetch"),
}
@@ -164,10 +170,6 @@ export class PostsListsComponent extends React.Component {
}
}
- if (params.replace) {
- payload.trim = 0
- }
-
const result = await fn(payload).catch((err) => {
console.error(err)
@@ -186,10 +188,12 @@ export class PostsListsComponent extends React.Component {
if (params.replace) {
this.setState({
list: result,
+ pageCount: 0,
})
} else {
this.setState({
list: [...this.state.list, ...result],
+ pageCount: this.state.pageCount + 1,
})
}
}
diff --git a/packages/app/src/components/UploadButton/index.jsx b/packages/app/src/components/UploadButton/index.jsx
index 0a7caee1..90ca59e4 100755
--- a/packages/app/src/components/UploadButton/index.jsx
+++ b/packages/app/src/components/UploadButton/index.jsx
@@ -7,112 +7,102 @@ import { Icons } from "@components/Icons"
import "./index.less"
export default (props) => {
- const [uploading, setUploading] = React.useState(false)
- const [progess, setProgess] = React.useState(null)
+ const [uploading, setUploading] = React.useState(false)
+ const [progress, setProgress] = React.useState(null)
- const handleOnStart = (file_uid, file) => {
- if (typeof props.onStart === "function") {
- props.onStart(file_uid, file)
- }
- }
+ const handleOnStart = (file_uid, file) => {
+ if (typeof props.onStart === "function") {
+ props.onStart(file_uid, file)
+ }
+ }
- const handleOnProgress = (file_uid, progress) => {
- if (typeof props.onProgress === "function") {
- props.onProgress(file_uid, progress)
- }
- }
+ const handleOnProgress = (file_uid, progress) => {
+ if (typeof props.onProgress === "function") {
+ props.onProgress(file_uid, progress)
+ }
+ }
- const handleOnError = (file_uid, error) => {
- if (typeof props.onError === "function") {
- props.onError(file_uid, error)
- }
- }
+ const handleOnError = (file_uid, error) => {
+ if (typeof props.onError === "function") {
+ props.onError(file_uid, error)
+ }
+ }
- const handleOnSuccess = (file_uid, response) => {
- if (typeof props.onSuccess === "function") {
- props.onSuccess(file_uid, response)
- }
- }
+ const handleOnSuccess = (file_uid, response) => {
+ if (typeof props.onSuccess === "function") {
+ props.onSuccess(file_uid, response)
+ }
+ }
- const handleUpload = async (req) => {
- setUploading(true)
- setProgess(1)
+ const handleUpload = async (req) => {
+ setUploading(true)
+ setProgress(1)
- handleOnStart(req.file.uid, req.file)
+ handleOnStart(req.file.uid, req.file)
- await app.cores.remoteStorage.uploadFile(req.file, {
- headers: props.headers,
- onProgress: (file, progress) => {
- setProgess(progress)
- handleOnProgress(file.uid, progress)
- },
- onError: (file, error) => {
- setProgess(null)
- handleOnError(file.uid, error)
- setUploading(false)
- },
- onFinish: (file, response) => {
- if (typeof props.ctx?.onUpdateItem === "function") {
- props.ctx.onUpdateItem(response.url)
- }
+ await app.cores.remoteStorage.uploadFile(req.file, {
+ headers: props.headers,
+ onProgress: (file, progress) => {
+ setProgress(progress)
+ handleOnProgress(file.uid, progress)
+ },
+ onError: (file, error) => {
+ setProgress(null)
+ handleOnError(file.uid, error)
+ setUploading(false)
+ },
+ onFinish: (file, response) => {
+ if (typeof props.ctx?.onUpdateItem === "function") {
+ props.ctx.onUpdateItem(response.url)
+ }
- if (typeof props.onUploadDone === "function") {
- props.onUploadDone(response)
- }
+ if (typeof props.onUploadDone === "function") {
+ props.onUploadDone(response)
+ }
- setUploading(false)
- handleOnSuccess(req.file.uid, response)
+ setUploading(false)
+ handleOnSuccess(req.file.uid, response)
- setTimeout(() => {
- setProgess(null)
- }, 1000)
- },
- })
- }
+ setTimeout(() => {
+ setProgress(null)
+ }, 1000)
+ },
+ })
+ }
- return
-
- {
- !progess && (props.icon ??
)
- }
+ return (
+
+
+ {!progress &&
+ (props.icon ?? (
+
+ ))}
- {
- progess &&
-
-}
\ No newline at end of file
+ {props.children ?? "Upload"}
+
+
+ )
+}
diff --git a/packages/app/src/contexts/MusicReleaseEditor/index.js b/packages/app/src/contexts/MusicReleaseEditor/index.js
index 75046ce7..9b327be7 100644
--- a/packages/app/src/contexts/MusicReleaseEditor/index.js
+++ b/packages/app/src/contexts/MusicReleaseEditor/index.js
@@ -1,17 +1,19 @@
import React from "react"
export const DefaultReleaseEditorState = {
- cover: null,
- title: "Untitled",
- type: "single",
- public: false,
+ cover: null,
+ title: "Untitled",
+ type: "single",
+ public: false,
- list: [],
- pendingUploads: [],
+ items: [],
+ pendingUploads: [],
- setCustomPage: () => {},
+ setCustomPage: () => {},
}
-export const ReleaseEditorStateContext = React.createContext(DefaultReleaseEditorState)
+export const ReleaseEditorStateContext = React.createContext(
+ DefaultReleaseEditorState,
+)
-export default ReleaseEditorStateContext
\ No newline at end of file
+export default ReleaseEditorStateContext
diff --git a/packages/app/src/cores/player/classes/AudioBase.js b/packages/app/src/cores/player/classes/AudioBase.js
new file mode 100644
index 00000000..1818bfcf
--- /dev/null
+++ b/packages/app/src/cores/player/classes/AudioBase.js
@@ -0,0 +1,123 @@
+import { MediaPlayer } from "dashjs"
+import PlayerProcessors from "./PlayerProcessors"
+import AudioPlayerStorage from "../player.storage"
+
+export default class AudioBase {
+ constructor(player) {
+ this.player = player
+ }
+
+ audio = new Audio()
+ context = null
+ demuxer = null
+ elementSource = null
+
+ processorsManager = new PlayerProcessors(this)
+ processors = {}
+
+ waitUpdateTimeout = null
+
+ initialize = async () => {
+ // create a audio context
+ this.context = new AudioContext({
+ sampleRate:
+ AudioPlayerStorage.get("sample_rate") ??
+ this.player.constructor.defaultSampleRate,
+ latencyHint: "playback",
+ })
+
+ // configure some settings for audio
+ this.audio.crossOrigin = "anonymous"
+ this.audio.preload = "metadata"
+
+ // listen all events
+ for (const [key, value] of Object.entries(this.audioEvents)) {
+ this.audio.addEventListener(key, value)
+ }
+
+ // setup demuxer for mpd
+ this.createDemuxer()
+
+ // create element source
+ this.elementSource = this.context.createMediaElementSource(this.audio)
+
+ // initialize audio processors
+ await this.processorsManager.initialize()
+ await this.processorsManager.attachAllNodes()
+ }
+
+ createDemuxer() {
+ this.demuxer = MediaPlayer().create()
+
+ this.demuxer.updateSettings({
+ streaming: {
+ buffer: {
+ resetSourceBuffersForTrackSwitch: true,
+ },
+ },
+ })
+
+ this.demuxer.initialize(this.audio, null, false)
+ }
+
+ flush() {
+ this.audio.pause()
+ this.audio.src = null
+ this.audio.currentTime = 0
+
+ this.demuxer.destroy()
+ this.createDemuxer()
+ }
+
+ audioEvents = {
+ ended: () => {
+ this.player.next()
+ },
+ loadeddata: () => {
+ this.player.state.loading = false
+ },
+ loadedmetadata: () => {
+ if (this.audio.duration === Infinity) {
+ this.player.state.live = true
+ } else {
+ this.player.state.live = false
+ }
+ },
+ play: () => {
+ this.player.state.playback_status = "playing"
+ },
+ playing: () => {
+ this.player.state.loading = false
+
+ this.player.state.playback_status = "playing"
+
+ if (typeof this.waitUpdateTimeout !== "undefined") {
+ clearTimeout(this.waitUpdateTimeout)
+ this.waitUpdateTimeout = null
+ }
+ },
+ pause: () => {
+ this.player.state.playback_status = "paused"
+ },
+ durationchange: () => {
+ this.player.eventBus.emit(
+ `player.durationchange`,
+ this.audio.duration,
+ )
+ },
+ waiting: () => {
+ if (this.waitUpdateTimeout) {
+ clearTimeout(this.waitUpdateTimeout)
+ this.waitUpdateTimeout = null
+ }
+
+ // if takes more than 150ms to load, update loading state
+ this.waitUpdateTimeout = setTimeout(() => {
+ this.player.state.loading = true
+ }, 150)
+ },
+ seeked: () => {
+ this.player.eventBus.emit(`player.seeked`, this.audio.currentTime)
+ },
+ }
+}
diff --git a/packages/app/src/cores/player/classes/MediaSession.js b/packages/app/src/cores/player/classes/MediaSession.js
new file mode 100644
index 00000000..cd34fad7
--- /dev/null
+++ b/packages/app/src/cores/player/classes/MediaSession.js
@@ -0,0 +1,56 @@
+export default class MediaSession {
+ constructor(player) {
+ this.player = player
+ }
+
+ async initialize() {
+ for (const [action, handler] of this.handlers) {
+ navigator.mediaSession.setActionHandler(action, handler)
+ }
+ }
+
+ handlers = [
+ [
+ "play",
+ () => {
+ console.log("media session play event", "play")
+ this.player.resumePlayback()
+ },
+ ],
+ [
+ "pause",
+ () => {
+ console.log("media session pause event", "pause")
+ this.player.pausePlayback()
+ },
+ ],
+ [
+ "seekto",
+ (seek) => {
+ console.log("media session seek event", seek)
+ this.player.seek(seek.seekTime)
+ },
+ ],
+ ]
+
+ update = (manifest) => {
+ navigator.mediaSession.metadata = new MediaMetadata({
+ title: manifest.title,
+ artist: manifest.artist,
+ album: manifest.album,
+ artwork: [
+ {
+ src: manifest.cover,
+ },
+ ],
+ })
+ }
+
+ flush = () => {
+ navigator.mediaSession.metadata = null
+ }
+
+ updateIsPlaying = (isPlaying) => {
+ navigator.mediaSession.playbackState = isPlaying ? "playing" : "paused"
+ }
+}
diff --git a/packages/app/src/cores/player/classes/PlayerProcessors.js b/packages/app/src/cores/player/classes/PlayerProcessors.js
index f59db0aa..fb982159 100644
--- a/packages/app/src/cores/player/classes/PlayerProcessors.js
+++ b/packages/app/src/cores/player/classes/PlayerProcessors.js
@@ -1,83 +1,96 @@
import defaultAudioProccessors from "../processors"
export default class PlayerProcessors {
- constructor(player) {
- this.player = player
- }
+ constructor(base) {
+ this.base = base
+ }
- processors = []
+ nodes = []
+ attached = []
- public = {}
+ public = {}
- async initialize() {
- // if already exists audio processors, destroy all before create new
- if (this.processors.length > 0) {
- this.player.console.log("Destroying audio processors")
+ async initialize() {
+ // if already exists audio processors, destroy all before create new
+ if (this.nodes.length > 0) {
+ this.base.player.console.log("Destroying audio processors")
- this.processors.forEach((processor) => {
- this.player.console.log(`Destroying audio processor ${processor.constructor.name}`, processor)
- processor._destroy()
- })
+ this.nodes.forEach((node) => {
+ this.base.player.console.log(
+ `Destroying audio processor node ${node.constructor.name}`,
+ node,
+ )
+ node._destroy()
+ })
- this.processors = []
- }
+ this.nodes = []
+ }
- // instanciate default audio processors
- for await (const defaultProccessor of defaultAudioProccessors) {
- this.processors.push(new defaultProccessor(this.player))
- }
+ // instanciate default audio processors
+ for await (const defaultProccessor of defaultAudioProccessors) {
+ this.nodes.push(new defaultProccessor(this))
+ }
- // initialize audio processors
- for await (const processor of this.processors) {
- if (typeof processor._init === "function") {
- try {
- await processor._init(this.player.audioContext)
- } catch (error) {
- this.player.console.error(`Failed to initialize audio processor ${processor.constructor.name} >`, error)
- continue
- }
- }
+ // initialize audio processors
+ for await (const node of this.nodes) {
+ if (typeof node._init === "function") {
+ try {
+ await node._init()
+ } catch (error) {
+ this.base.player.console.error(
+ `Failed to initialize audio processor node ${node.constructor.name} >`,
+ error,
+ )
+ continue
+ }
+ }
- // check if processor has exposed public methods
- if (processor.exposeToPublic) {
- Object.entries(processor.exposeToPublic).forEach(([key, value]) => {
- const refName = processor.constructor.refName
+ // check if processor has exposed public methods
+ if (node.exposeToPublic) {
+ Object.entries(node.exposeToPublic).forEach(([key, value]) => {
+ const refName = node.constructor.refName
- if (typeof this.player.public[refName] === "undefined") {
- // by default create a empty object
- this.player.public[refName] = {}
- }
+ if (typeof this.base.processors[refName] === "undefined") {
+ // by default create a empty object
+ this.base.processors[refName] = {}
+ }
- this.player.public[refName][key] = value
- })
- }
- }
- }
+ this.base.processors[refName][key] = value
+ })
+ }
+ }
+ }
- async attachProcessorsToInstance(instance) {
- for await (const [index, processor] of this.processors.entries()) {
- if (processor.constructor.node_bypass === true) {
- instance.contextElement.connect(processor.processor)
+ attachAllNodes = async () => {
+ for await (const [index, node] of this.nodes.entries()) {
+ if (node.constructor.node_bypass === true) {
+ this.base.context.elementSource.connect(node.processor)
- processor.processor.connect(this.player.audioContext.destination)
+ node.processor.connect(this.base.context.destination)
- continue
- }
+ continue
+ }
- if (typeof processor._attach !== "function") {
- this.player.console.error(`Processor ${processor.constructor.refName} not support attach`)
+ if (typeof node._attach !== "function") {
+ this.base.console.error(
+ `Processor ${node.constructor.refName} not support attach`,
+ )
- continue
- }
+ continue
+ }
- instance = await processor._attach(instance, index)
- }
+ await node._attach(index)
+ }
- const lastProcessor = instance.attachedProcessors[instance.attachedProcessors.length - 1].processor
+ const lastProcessor = this.attached[this.attached.length - 1].processor
- // now attach to destination
- lastProcessor.connect(this.player.audioContext.destination)
+ // now attach to destination
+ lastProcessor.connect(this.base.context.destination)
+ }
- return instance
- }
-}
\ No newline at end of file
+ detachAllNodes = async () => {
+ for (const [index, node] of this.attached.entries()) {
+ await node._detach()
+ }
+ }
+}
diff --git a/packages/app/src/cores/player/classes/TrackInstance.js b/packages/app/src/cores/player/classes/TrackInstance.js
index f88278c8..cae48993 100644
--- a/packages/app/src/cores/player/classes/TrackInstance.js
+++ b/packages/app/src/cores/player/classes/TrackInstance.js
@@ -1,206 +1,131 @@
import TrackManifest from "./TrackManifest"
-import { MediaPlayer } from "dashjs"
export default class TrackInstance {
- constructor(player, manifest) {
+ constructor(manifest, player) {
+ if (typeof manifest === "undefined") {
+ throw new Error("Manifest is required")
+ }
+
if (!player) {
throw new Error("Player core is required")
}
- if (typeof manifest === "undefined") {
- throw new Error("Manifest is required")
+ if (!(manifest instanceof TrackManifest)) {
+ manifest = new TrackManifest(manifest, player)
+ }
+
+ if (!manifest.source) {
+ throw new Error("Manifest must have a source")
}
this.player = player
this.manifest = manifest
this.id = this.manifest.id ?? this.manifest._id
-
- return this
}
- _initialized = false
+ play = async (params = {}) => {
+ const startTime = performance.now()
- audio = null
-
- contextElement = null
-
- abortController = new AbortController()
-
- attachedProcessors = []
-
- waitUpdateTimeout = null
-
- mediaEvents = {
- ended: () => {
- this.player.next()
- },
- loadeddata: () => {
- this.player.state.loading = false
- },
- loadedmetadata: () => {
- if (this.audio.duration === Infinity) {
- this.player.state.live = true
- } else {
- this.player.state.live = false
- }
- },
- play: () => {
- this.player.state.playback_status = "playing"
- },
- playing: () => {
- this.player.state.loading = false
-
- this.player.state.playback_status = "playing"
-
- if (typeof this.waitUpdateTimeout !== "undefined") {
- clearTimeout(this.waitUpdateTimeout)
- this.waitUpdateTimeout = null
- }
- },
- pause: () => {
- this.player.state.playback_status = "paused"
- },
- durationchange: () => {
- this.player.eventBus.emit(
- `player.durationchange`,
- this.audio.duration,
- )
- },
- waiting: () => {
- if (this.waitUpdateTimeout) {
- clearTimeout(this.waitUpdateTimeout)
- this.waitUpdateTimeout = null
- }
-
- // if takes more than 150ms to load, update loading state
- this.waitUpdateTimeout = setTimeout(() => {
- this.player.state.loading = true
- }, 150)
- },
- seeked: () => {
- this.player.eventBus.emit(`player.seeked`, this.audio.currentTime)
- },
- }
-
- initialize = async () => {
- this.manifest = await this.resolveManifest()
-
- this.audio = new Audio()
-
- this.audio.signal = this.abortController.signal
- this.audio.crossOrigin = "anonymous"
- this.audio.preload = "metadata"
-
- // support for dash audio streaming
- if (this.manifest.source.endsWith(".mpd")) {
- this.muxerPlayer = MediaPlayer().create()
- this.muxerPlayer.updateSettings({
- streaming: {
- buffer: {
- resetSourceBuffersForTrackSwitch: true,
- useChangeTypeForTrackSwitch: false,
- },
- },
- })
- this.muxerPlayer.initialize(this.audio, null, false)
-
- this.muxerPlayer.attachSource(this.manifest.source)
+ if (!this.manifest.source.endsWith(".mpd")) {
+ this.player.base.demuxer.destroy()
+ this.player.base.audio.src = this.manifest.source
} else {
- this.audio.src = this.manifest.source
- }
-
- for (const [key, value] of Object.entries(this.mediaEvents)) {
- this.audio.addEventListener(key, value)
- }
-
- this.contextElement = this.player.audioContext.createMediaElementSource(
- this.audio,
- )
-
- this._initialized = true
-
- return this
- }
-
- stop = () => {
- if (this.audio) {
- this.audio.pause()
- }
-
- if (this.muxerPlayer) {
- this.muxerPlayer.destroy()
- }
-
- const lastProcessor =
- this.attachedProcessors[this.attachedProcessors.length - 1]
-
- if (lastProcessor) {
- this.attachedProcessors[
- this.attachedProcessors.length - 1
- ]._destroy(this)
- }
-
- this.attachedProcessors = []
- }
-
- resolveManifest = async () => {
- if (typeof this.manifest === "string") {
- this.manifest = {
- src: this.manifest,
- }
- }
-
- this.manifest = new TrackManifest(this.manifest, {
- serviceProviders: this.player.serviceProviders,
- })
-
- if (this.manifest.service) {
- if (!this.player.serviceProviders.has(this.manifest.service)) {
- throw new Error(
- `Service ${this.manifest.service} is not supported`,
- )
+ if (!this.player.base.demuxer) {
+ this.player.base.createDemuxer()
}
- // try to resolve source file
- if (!this.manifest.source) {
- console.log("Resolving manifest cause no source defined")
-
- this.manifest = await this.player.serviceProviders.resolve(
- this.manifest.service,
- this.manifest,
- )
-
- console.log("Manifest resolved", this.manifest)
- }
- }
-
- if (!this.manifest.source) {
- throw new Error("Manifest `source` is required")
- }
-
- // set empty metadata if not provided
- if (!this.manifest.metadata) {
- this.manifest.metadata = {}
- }
-
- // auto name if a title is not provided
- if (!this.manifest.metadata.title) {
- this.manifest.metadata.title = this.manifest.source.split("/").pop()
- }
-
- // process overrides
- const override = await this.manifest.serviceOperations.fetchOverride()
-
- if (override) {
- console.log(
- `Override found for track ${this.manifest._id}`,
- override,
+ await this.player.base.demuxer.attachSource(
+ `${this.manifest.source}?t=${Date.now()}`,
)
-
- this.manifest.overrides = override
}
- return this.manifest
+ this.player.base.audio.currentTime = params.time ?? 0
+
+ if (this.player.base.audio.paused) {
+ await this.player.base.audio.play()
+ }
+
+ // reset audio volume and gain
+ this.player.base.audio.volume = 1
+ this.player.base.processors.gain.set(this.player.state.volume)
+
+ const endTime = performance.now()
+
+ this._loadMs = endTime - startTime
+
+ console.log(`[INSTANCE] Playing >`, this)
}
+
+ pause = async () => {
+ console.log("[INSTANCE] Pausing >", this)
+
+ this.player.base.audio.pause()
+ }
+
+ resume = async () => {
+ console.log("[INSTANCE] Resuming >", this)
+
+ this.player.base.audio.play()
+ }
+
+ // resolveManifest = async () => {
+ // if (typeof this.manifest === "string") {
+ // this.manifest = {
+ // src: this.manifest,
+ // }
+ // }
+
+ // this.manifest = new TrackManifest(this.manifest, {
+ // serviceProviders: this.player.serviceProviders,
+ // })
+
+ // if (this.manifest.service) {
+ // if (!this.player.serviceProviders.has(this.manifest.service)) {
+ // throw new Error(
+ // `Service ${this.manifest.service} is not supported`,
+ // )
+ // }
+
+ // // try to resolve source file
+ // if (!this.manifest.source) {
+ // console.log("Resolving manifest cause no source defined")
+
+ // this.manifest = await this.player.serviceProviders.resolve(
+ // this.manifest.service,
+ // this.manifest,
+ // )
+
+ // console.log("Manifest resolved", this.manifest)
+ // }
+ // }
+
+ // if (!this.manifest.source) {
+ // throw new Error("Manifest `source` is required")
+ // }
+
+ // // set empty metadata if not provided
+ // if (!this.manifest.metadata) {
+ // this.manifest.metadata = {}
+ // }
+
+ // // auto name if a title is not provided
+ // if (!this.manifest.metadata.title) {
+ // this.manifest.metadata.title = this.manifest.source.split("/").pop()
+ // }
+
+ // // process overrides
+ // const override = await this.manifest.serviceOperations.fetchOverride()
+
+ // if (override) {
+ // console.log(
+ // `Override found for track ${this.manifest._id}`,
+ // override,
+ // )
+
+ // this.manifest.overrides = override
+ // }
+
+ // return this.manifest
+ // }
}
diff --git a/packages/app/src/cores/player/classes/TrackManifest.js b/packages/app/src/cores/player/classes/TrackManifest.js
index 1a8e404a..446f650d 100644
--- a/packages/app/src/cores/player/classes/TrackManifest.js
+++ b/packages/app/src/cores/player/classes/TrackManifest.js
@@ -1,4 +1,4 @@
-import jsmediatags from "jsmediatags/dist/jsmediatags.min.js"
+import { parseBlob } from "music-metadata"
import { FastAverageColor } from "fast-average-color"
export default class TrackManifest {
@@ -33,13 +33,6 @@ export default class TrackManifest {
this.artist = params.artist
}
- if (
- typeof params.artists !== "undefined" ||
- Array.isArray(params.artists)
- ) {
- this.artistStr = params.artists.join(", ")
- }
-
if (typeof params.source !== "undefined") {
this.source = params.source
}
@@ -48,8 +41,8 @@ export default class TrackManifest {
this.metadata = params.metadata
}
- if (typeof params.lyrics_enabled !== "undefined") {
- this.lyrics_enabled = params.lyrics_enabled
+ if (typeof params.liked !== "undefined") {
+ this.liked = params.liked
}
return this
@@ -58,87 +51,45 @@ export default class TrackManifest {
_id = null // used for api requests
uid = null // used for internal
- cover =
- "https://storage.ragestudio.net/comty-static-assets/default_song.png"
title = "Untitled"
album = "Unknown"
artist = "Unknown"
+ cover = null // set default cover url
source = null
- metadata = null
+ metadata = {}
// set default service to default
service = "default"
- // Extended from db
- lyrics_enabled = false
- liked = null
-
async initialize() {
- if (this.params.file) {
- this.metadata = await this.analyzeMetadata(
- this.params.file.originFileObj,
- )
+ if (!this.params.file) {
+ return this
+ }
- this.metadata.format = this.metadata.type.toUpperCase()
+ const analyzedMetadata = await parseBlob(this.params.file, {
+ skipPostHeaders: true,
+ }).catch(() => ({}))
- if (this.metadata.tags) {
- if (this.metadata.tags.title) {
- this.title = this.metadata.tags.title
- }
+ if (analyzedMetadata.format) {
+ this.metadata.format = analyzedMetadata.format.codec
+ }
- if (this.metadata.tags.artist) {
- this.artist = this.metadata.tags.artist
- }
+ if (analyzedMetadata.common) {
+ this.title = analyzedMetadata.common.title ?? this.title
+ this.artist = analyzedMetadata.common.artist ?? this.artist
+ this.album = analyzedMetadata.common.album ?? this.album
+ }
- if (this.metadata.tags.album) {
- this.album = this.metadata.tags.album
- }
+ if (analyzedMetadata.common.picture) {
+ const cover = analyzedMetadata.common.picture[0]
- if (this.metadata.tags.picture) {
- this.cover = app.cores.remoteStorage.binaryArrayToFile(
- this.metadata.tags.picture,
- "cover",
- )
-
- const coverUpload =
- await app.cores.remoteStorage.uploadFile(this.cover)
-
- this.cover = coverUpload.url
-
- delete this.metadata.tags.picture
- }
-
- this.handleChanges({
- cover: this.cover,
- title: this.title,
- artist: this.artist,
- album: this.album,
- })
- }
+ this._coverBlob = new Blob([cover.data], { type: cover.format })
+ this.cover = URL.createObjectURL(this._coverBlob)
}
return this
}
- handleChanges = (changes) => {
- if (typeof this.params.onChange === "function") {
- this.params.onChange(this.uid, changes)
- }
- }
-
- analyzeMetadata = async (file) => {
- return new Promise((resolve, reject) => {
- jsmediatags.read(file, {
- onSuccess: (data) => {
- return resolve(data)
- },
- onError: (error) => {
- return reject(error)
- },
- })
- })
- }
-
analyzeCoverColor = async () => {
const fac = new FastAverageColor()
@@ -169,8 +120,6 @@ export default class TrackManifest {
this,
)
- console.log(this.overrides)
-
if (this.overrides) {
return {
...result,
@@ -210,6 +159,7 @@ export default class TrackManifest {
return {
_id: this._id,
uid: this.uid,
+ cover: this.cover,
title: this.title,
album: this.album,
artist: this.artist,
diff --git a/packages/app/src/cores/player/player.core.js b/packages/app/src/cores/player/player.core.js
index 435a466b..2d0c260f 100755
--- a/packages/app/src/cores/player/player.core.js
+++ b/packages/app/src/cores/player/player.core.js
@@ -3,11 +3,11 @@ import { Core } from "@ragestudio/vessel"
import ActivityEvent from "@classes/ActivityEvent"
import QueueManager from "@classes/QueueManager"
import TrackInstance from "./classes/TrackInstance"
-//import MediaSession from "./classes/MediaSession"
+import MediaSession from "./classes/MediaSession"
import ServiceProviders from "./classes/Services"
import PlayerState from "./classes/PlayerState"
import PlayerUI from "./classes/PlayerUI"
-import PlayerProcessors from "./classes/PlayerProcessors"
+import AudioBase from "./classes/AudioBase"
import setSampleRate from "./helpers/setSampleRate"
@@ -22,27 +22,18 @@ export default class Player extends Core {
// player config
static defaultSampleRate = 48000
- static gradualFadeMs = 150
- static maxManifestPrecompute = 3
state = new PlayerState(this)
ui = new PlayerUI(this)
serviceProviders = new ServiceProviders()
- //nativeControls = new MediaSession()
- audioContext = new AudioContext({
- sampleRate:
- AudioPlayerStorage.get("sample_rate") ?? Player.defaultSampleRate,
- latencyHint: "playback",
- })
+ nativeControls = new MediaSession(this)
- audioProcessors = new PlayerProcessors(this)
+ base = new AudioBase(this)
queue = new QueueManager({
loadFunction: this.createInstance,
})
- currentTrackInstance = null
-
public = {
start: this.start,
close: this.close,
@@ -74,10 +65,11 @@ export default class Player extends Core {
eventBus: () => {
return this.eventBus
},
+ base: () => {
+ return this.base
+ },
state: this.state,
ui: this.ui.public,
- audioContext: this.audioContext,
- gradualFadeMs: Player.gradualFadeMs,
}
async afterInitialize() {
@@ -85,8 +77,8 @@ export default class Player extends Core {
this.state.volume = 1
}
- //await this.nativeControls.initialize()
- await this.audioProcessors.initialize()
+ await this.nativeControls.initialize()
+ await this.base.initialize()
}
//
@@ -100,10 +92,6 @@ export default class Player extends Core {
}
}
- async createInstance(manifest) {
- return new TrackInstance(this, manifest)
- }
-
//
// Playback methods
//
@@ -112,46 +100,21 @@ export default class Player extends Core {
throw new Error("Audio instance is required")
}
- this.console.log("Initializing instance", instance)
-
// resume audio context if needed
- if (this.audioContext.state === "suspended") {
- this.audioContext.resume()
+ if (this.base.context.state === "suspended") {
+ this.base.context.resume()
}
- // initialize instance if is not
- if (this.queue.currentItem._initialized === false) {
- this.queue.currentItem = await instance.initialize()
- }
-
- this.console.log("Instance", this.queue.currentItem)
-
// update manifest
- this.state.track_manifest = this.queue.currentItem.manifest
-
- // attach processors
- this.queue.currentItem =
- await this.audioProcessors.attachProcessorsToInstance(
- this.queue.currentItem,
- )
-
- // set audio properties
- this.queue.currentItem.audio.currentTime = params.time ?? 0
- this.queue.currentItem.audio.muted = this.state.muted
- this.queue.currentItem.audio.loop =
- this.state.playback_mode === "repeat"
- this.queue.currentItem.gainNode.gain.value = Math.pow(
- this.state.volume,
- 2,
- )
+ this.state.track_manifest =
+ this.queue.currentItem.manifest.toSeriableObject()
// play
- await this.queue.currentItem.audio.play()
-
- this.console.log(`Playing track >`, this.queue.currentItem)
+ //await this.queue.currentItem.audio.play()
+ await this.queue.currentItem.play(params)
// update native controls
- //this.nativeControls.update(this.queue.currentItem.manifest)
+ this.nativeControls.update(this.queue.currentItem.manifest)
return this.queue.currentItem
}
@@ -160,10 +123,10 @@ export default class Player extends Core {
this.ui.attachPlayerComponent()
if (this.queue.currentItem) {
- await this.queue.currentItem.stop()
+ await this.queue.currentItem.pause()
}
- await this.abortPreloads()
+ //await this.abortPreloads()
await this.queue.flush()
this.state.loading = true
@@ -187,8 +150,8 @@ export default class Player extends Core {
playlist = await this.serviceProviders.resolveMany(playlist)
}
- for await (const [index, _manifest] of playlist.entries()) {
- let instance = await this.createInstance(_manifest)
+ for await (let [index, _manifest] of playlist.entries()) {
+ let instance = new TrackInstance(_manifest, this)
this.queue.add(instance)
}
@@ -229,10 +192,6 @@ export default class Player extends Core {
}
next() {
- if (this.queue.currentItem) {
- this.queue.currentItem.stop()
- }
-
//const isRandom = this.state.playback_mode === "shuffle"
const item = this.queue.next()
@@ -244,10 +203,6 @@ export default class Player extends Core {
}
previous() {
- if (this.queue.currentItem) {
- this.queue.currentItem.stop()
- }
-
const item = this.queue.previous()
return this.play(item)
@@ -275,18 +230,14 @@ export default class Player extends Core {
return null
}
- // set gain exponentially
- this.queue.currentItem.gainNode.gain.linearRampToValueAtTime(
- 0.0001,
- this.audioContext.currentTime + Player.gradualFadeMs / 1000,
- )
+ this.base.processors.gain.fade(0)
setTimeout(() => {
- this.queue.currentItem.audio.pause()
+ this.queue.currentItem.pause()
resolve()
}, Player.gradualFadeMs)
- //this.nativeControls.updateIsPlaying(false)
+ this.nativeControls.updateIsPlaying(false)
})
}
@@ -302,19 +253,12 @@ export default class Player extends Core {
}
// ensure audio elemeto starts from 0 volume
- this.queue.currentItem.gainNode.gain.value = 0.0001
-
- this.queue.currentItem.audio.play().then(() => {
+ this.queue.currentItem.resume().then(() => {
resolve()
})
+ this.base.processors.gain.fade(this.state.volume)
- // set gain exponentially
- this.queue.currentItem.gainNode.gain.linearRampToValueAtTime(
- Math.pow(this.state.volume, 2),
- this.audioContext.currentTime + Player.gradualFadeMs / 1000,
- )
-
- //this.nativeControls.updateIsPlaying(true)
+ this.nativeControls.updateIsPlaying(true)
})
}
@@ -325,10 +269,7 @@ export default class Player extends Core {
this.state.playback_mode = mode
- if (this.queue.currentItem) {
- this.queue.currentItem.audio.loop =
- this.state.playback_mode === "repeat"
- }
+ this.base.audio.loop = this.state.playback_mode === "repeat"
AudioPlayerStorage.set("mode", mode)
@@ -336,22 +277,15 @@ export default class Player extends Core {
}
stopPlayback() {
- if (this.queue.currentItem) {
- this.queue.currentItem.stop()
- }
-
+ this.base.flush()
this.queue.flush()
- this.abortPreloads()
-
this.state.playback_status = "stopped"
this.state.track_manifest = null
-
this.queue.currentItem = null
- this.track_next_instances = []
- this.track_prev_instances = []
- //this.nativeControls.destroy()
+ //this.abortPreloads()
+ this.nativeControls.flush()
}
//
@@ -369,7 +303,7 @@ export default class Player extends Core {
if (typeof to === "boolean") {
this.state.muted = to
- this.queue.currentItem.audio.muted = to
+ this.base.audio.muted = to
}
return this.state.muted
@@ -395,65 +329,42 @@ export default class Player extends Core {
volume = 0
}
- this.state.volume = volume
-
AudioPlayerStorage.set("volume", volume)
- if (this.queue.currentItem) {
- if (this.queue.currentItem.gainNode) {
- this.queue.currentItem.gainNode.gain.value = Math.pow(
- this.state.volume,
- 2,
- )
- }
- }
+ this.state.volume = volume
+ this.base.processors.gain.set(volume)
return this.state.volume
}
seek(time) {
- if (!this.queue.currentItem || !this.queue.currentItem.audio) {
+ if (!this.base.audio) {
return false
}
// if time not provided, return current time
if (typeof time === "undefined") {
- return this.queue.currentItem.audio.currentTime
+ return this.base.audio.currentTime
}
// if time is provided, seek to that time
if (typeof time === "number") {
this.console.log(
- `Seeking to ${time} | Duration: ${this.queue.currentItem.audio.duration}`,
+ `Seeking to ${time} | Duration: ${this.base.audio.duration}`,
)
- this.queue.currentItem.audio.currentTime = time
+ this.base.audio.currentTime = time
return time
}
}
duration() {
- if (!this.queue.currentItem || !this.queue.currentItem.audio) {
+ if (!this.base.audio) {
return false
}
- return this.queue.currentItem.audio.duration
- }
-
- loop(to) {
- if (typeof to !== "boolean") {
- this.console.warn("Loop must be a boolean")
- return false
- }
-
- this.state.loop = to ?? !this.state.loop
-
- if (this.queue.currentItem.audio) {
- this.queue.currentItem.audio.loop = this.state.loop
- }
-
- return this.state.loop
+ return this.base.audio.duration
}
close() {
diff --git a/packages/app/src/cores/player/processors/compressorNode/index.js b/packages/app/src/cores/player/processors/compressorNode/index.js
index 8c372892..446e5e15 100755
--- a/packages/app/src/cores/player/processors/compressorNode/index.js
+++ b/packages/app/src/cores/player/processors/compressorNode/index.js
@@ -2,44 +2,40 @@ import ProcessorNode from "../node"
import Presets from "../../classes/Presets"
export default class CompressorProcessorNode extends ProcessorNode {
- constructor(props) {
- super(props)
+ constructor(props) {
+ super(props)
- this.presets = new Presets({
- storage_key: "compressor",
- defaultPresetValue: {
- threshold: -50,
- knee: 40,
- ratio: 12,
- attack: 0.003,
- release: 0.25,
- },
- onApplyValues: this.applyValues.bind(this),
- })
+ this.presets = new Presets({
+ storage_key: "compressor",
+ defaultPresetValue: {
+ threshold: -50,
+ knee: 40,
+ ratio: 12,
+ attack: 0.003,
+ release: 0.25,
+ },
+ onApplyValues: this.applyValues.bind(this),
+ })
- this.exposeToPublic = {
- presets: this.presets,
- detach: this._detach,
- attach: this._attach,
- }
- }
+ this.exposeToPublic = {
+ presets: this.presets,
+ detach: this._detach,
+ attach: this._attach,
+ }
+ }
- static refName = "compressor"
- static dependsOnSettings = ["player.compressor"]
+ static refName = "compressor"
+ static dependsOnSettings = ["player.compressor"]
- async init(AudioContext) {
- if (!AudioContext) {
- throw new Error("AudioContext is required")
- }
+ async init() {
+ this.processor = this.audioContext.createDynamicsCompressor()
- this.processor = AudioContext.createDynamicsCompressor()
+ this.applyValues()
+ }
- this.applyValues()
- }
-
- applyValues() {
- Object.keys(this.presets.currentPresetValues).forEach((key) => {
- this.processor[key].value = this.presets.currentPresetValues[key]
- })
- }
-}
\ No newline at end of file
+ applyValues() {
+ Object.keys(this.presets.currentPresetValues).forEach((key) => {
+ this.processor[key].value = this.presets.currentPresetValues[key]
+ })
+ }
+}
diff --git a/packages/app/src/cores/player/processors/eqNode/index.js b/packages/app/src/cores/player/processors/eqNode/index.js
index 09103971..5525a6d6 100755
--- a/packages/app/src/cores/player/processors/eqNode/index.js
+++ b/packages/app/src/cores/player/processors/eqNode/index.js
@@ -2,93 +2,98 @@ import ProcessorNode from "../node"
import Presets from "../../classes/Presets"
export default class EqProcessorNode extends ProcessorNode {
- constructor(props) {
- super(props)
+ constructor(props) {
+ super(props)
- this.presets = new Presets({
- storage_key: "eq",
- defaultPresetValue: {
- 32: 0,
- 64: 0,
- 125: 0,
- 250: 0,
- 500: 0,
- 1000: 0,
- 2000: 0,
- 4000: 0,
- 8000: 0,
- 16000: 0,
- },
- onApplyValues: this.applyValues.bind(this),
- })
+ this.presets = new Presets({
+ storage_key: "eq",
+ defaultPresetValue: {
+ 32: 0,
+ 64: 0,
+ 125: 0,
+ 250: 0,
+ 500: 0,
+ 1000: 0,
+ 2000: 0,
+ 4000: 0,
+ 8000: 0,
+ 16000: 0,
+ },
+ onApplyValues: this.applyValues.bind(this),
+ })
- this.exposeToPublic = {
- presets: this.presets,
- }
- }
+ this.exposeToPublic = {
+ presets: this.presets,
+ }
+ }
- static refName = "eq"
- static lock = true
+ static refName = "eq"
- applyValues() {
- // apply to current instance
- this.processor.eqNodes.forEach((processor) => {
- const gainValue = this.presets.currentPresetValues[processor.frequency.value]
+ applyValues() {
+ // apply to current instance
+ this.processor.eqNodes.forEach((processor) => {
+ const gainValue =
+ this.presets.currentPresetValues[processor.frequency.value]
- if (processor.gain.value !== gainValue) {
- console.debug(`[EQ] Applying values to ${processor.frequency.value} Hz frequency with gain ${gainValue}`)
- processor.gain.value = gainValue
- }
- })
- }
+ if (processor.gain.value !== gainValue) {
+ console.debug(
+ `[EQ] Applying values to ${processor.frequency.value} Hz frequency with gain ${gainValue}`,
+ )
+ processor.gain.value = gainValue
+ }
+ })
+ }
- async init() {
- if (!this.audioContext) {
- throw new Error("audioContext is required")
- }
+ async init() {
+ if (!this.audioContext) {
+ throw new Error("audioContext is required")
+ }
- this.processor = this.audioContext.createGain()
+ this.processor = this.audioContext.createGain()
- this.processor.gain.value = 1
+ this.processor.gain.value = 1
- this.processor.eqNodes = []
+ this.processor.eqNodes = []
- const values = Object.entries(this.presets.currentPresetValues).map((entry) => {
- return {
- freq: parseFloat(entry[0]),
- gain: parseFloat(entry[1]),
- }
- })
+ const values = Object.entries(this.presets.currentPresetValues).map(
+ (entry) => {
+ return {
+ freq: parseFloat(entry[0]),
+ gain: parseFloat(entry[1]),
+ }
+ },
+ )
- values.forEach((eqValue, index) => {
- // chekc if freq and gain is valid
- if (isNaN(eqValue.freq)) {
- eqValue.freq = 0
- }
- if (isNaN(eqValue.gain)) {
- eqValue.gain = 0
- }
+ values.forEach((eqValue, index) => {
+ // chekc if freq and gain is valid
+ if (isNaN(eqValue.freq)) {
+ eqValue.freq = 0
+ }
+ if (isNaN(eqValue.gain)) {
+ eqValue.gain = 0
+ }
- this.processor.eqNodes[index] = this.audioContext.createBiquadFilter()
- this.processor.eqNodes[index].type = "peaking"
- this.processor.eqNodes[index].frequency.value = eqValue.freq
- this.processor.eqNodes[index].gain.value = eqValue.gain
- })
+ this.processor.eqNodes[index] =
+ this.audioContext.createBiquadFilter()
+ this.processor.eqNodes[index].type = "peaking"
+ this.processor.eqNodes[index].frequency.value = eqValue.freq
+ this.processor.eqNodes[index].gain.value = eqValue.gain
+ })
- // connect nodes
- for await (let [index, eqNode] of this.processor.eqNodes.entries()) {
- const nextNode = this.processor.eqNodes[index + 1]
+ // connect nodes
+ for await (let [index, eqNode] of this.processor.eqNodes.entries()) {
+ const nextNode = this.processor.eqNodes[index + 1]
- if (index === 0) {
- this.processor.connect(eqNode)
- }
+ if (index === 0) {
+ this.processor.connect(eqNode)
+ }
- if (nextNode) {
- eqNode.connect(nextNode)
- }
- }
+ if (nextNode) {
+ eqNode.connect(nextNode)
+ }
+ }
- // set last processor for processor node can properly connect to the next node
- this.processor._last = this.processor.eqNodes.at(-1)
- }
-}
\ No newline at end of file
+ // set last processor for processor node can properly connect to the next node
+ this.processor._last = this.processor.eqNodes.at(-1)
+ }
+}
diff --git a/packages/app/src/cores/player/processors/gainNode/index.js b/packages/app/src/cores/player/processors/gainNode/index.js
index dc00fa59..0ed6c33f 100755
--- a/packages/app/src/cores/player/processors/gainNode/index.js
+++ b/packages/app/src/cores/player/processors/gainNode/index.js
@@ -1,60 +1,49 @@
-import AudioPlayerStorage from "../../player.storage"
import ProcessorNode from "../node"
export default class GainProcessorNode extends ProcessorNode {
- static refName = "gain"
+ static refName = "gain"
+ static gradualFadeMs = 150
- static lock = true
+ exposeToPublic = {
+ set: this.setGain.bind(this),
+ linearRampToValueAtTime: this.linearRampToValueAtTime.bind(this),
+ fade: this.fade.bind(this),
+ }
- static defaultValues = {
- gain: 1,
- }
+ setGain(gain) {
+ gain = this.processGainValue(gain)
- state = {
- gain: AudioPlayerStorage.get("gain") ?? GainProcessorNode.defaultValues.gain,
- }
+ return (this.processor.gain.value = gain)
+ }
- exposeToPublic = {
- modifyValues: function (values) {
- this.state = {
- ...this.state,
- ...values,
- }
+ linearRampToValueAtTime(gain, time) {
+ gain = this.processGainValue(gain)
+ return this.processor.gain.linearRampToValueAtTime(gain, time)
+ }
- AudioPlayerStorage.set("gain", this.state.gain)
+ fade(gain) {
+ if (gain <= 0) {
+ gain = 0.0001
+ } else {
+ gain = this.processGainValue(gain)
+ }
- this.applyValues()
- }.bind(this),
- resetDefaultValues: function () {
- this.exposeToPublic.modifyValues(GainProcessorNode.defaultValues)
+ const currentTime = this.audioContext.currentTime
+ const fadeTime = currentTime + this.constructor.gradualFadeMs / 1000
- return this.state
- }.bind(this),
- values: () => this.state,
- }
+ this.processor.gain.linearRampToValueAtTime(gain, fadeTime)
+ }
- applyValues() {
- // apply to current instance
- this.processor.gain.value = app.cores.player.state.volume * this.state.gain
- }
+ processGainValue(gain) {
+ return Math.pow(gain, 2)
+ }
- async init() {
- if (!this.audioContext) {
- throw new Error("audioContext is required")
- }
+ async init() {
+ if (!this.audioContext) {
+ throw new Error("audioContext is required")
+ }
- this.processor = this.audioContext.createGain()
-
- this.applyValues()
- }
-
- mutateInstance(instance) {
- if (!instance) {
- throw new Error("instance is required")
- }
-
- instance.gainNode = this.processor
-
- return instance
- }
-}
\ No newline at end of file
+ this.processor = this.audioContext.createGain()
+ this.processor.gain.value = this.player.state.volume
+ }
+}
diff --git a/packages/app/src/cores/player/processors/index.js b/packages/app/src/cores/player/processors/index.js
index c266c920..4a6a87c0 100755
--- a/packages/app/src/cores/player/processors/index.js
+++ b/packages/app/src/cores/player/processors/index.js
@@ -2,13 +2,12 @@ import EqProcessorNode from "./eqNode"
import GainProcessorNode from "./gainNode"
import CompressorProcessorNode from "./compressorNode"
//import BPMProcessorNode from "./bpmNode"
-
-import SpatialNode from "./spatialNode"
+//import SpatialNode from "./spatialNode"
export default [
- //BPMProcessorNode,
- EqProcessorNode,
- GainProcessorNode,
- CompressorProcessorNode,
- SpatialNode,
-]
\ No newline at end of file
+ //BPMProcessorNode,
+ EqProcessorNode,
+ GainProcessorNode,
+ CompressorProcessorNode,
+ //SpatialNode,
+]
diff --git a/packages/app/src/cores/player/processors/node.js b/packages/app/src/cores/player/processors/node.js
index 84a6a865..ddcd1826 100755
--- a/packages/app/src/cores/player/processors/node.js
+++ b/packages/app/src/cores/player/processors/node.js
@@ -1,172 +1,147 @@
export default class ProcessorNode {
- constructor(PlayerCore) {
- if (!PlayerCore) {
- throw new Error("PlayerCore is required")
- }
+ constructor(manager) {
+ if (!manager) {
+ throw new Error("processorManager is required")
+ }
- this.PlayerCore = PlayerCore
- this.audioContext = PlayerCore.audioContext
- }
+ this.manager = manager
+ this.audioContext = manager.base.context
+ this.elementSource = manager.base.elementSource
+ this.player = manager.base.player
+ }
- async _init() {
- // check if has init method
- if (typeof this.init === "function") {
- await this.init(this.audioContext)
- }
+ async _init() {
+ // check if has init method
+ if (typeof this.init === "function") {
+ await this.init()
+ }
- // check if has declared bus events
- if (typeof this.busEvents === "object") {
- Object.entries(this.busEvents).forEach((event, fn) => {
- app.eventBus.on(event, fn)
- })
- }
+ // check if has declared bus events
+ if (typeof this.busEvents === "object") {
+ Object.entries(this.busEvents).forEach((event, fn) => {
+ app.eventBus.on(event, fn)
+ })
+ }
- if (typeof this.processor._last === "undefined") {
- this.processor._last = this.processor
- }
+ if (typeof this.processor._last === "undefined") {
+ this.processor._last = this.processor
+ }
- return this
- }
+ return this
+ }
- _attach(instance, index) {
- if (typeof instance !== "object") {
- instance = this.PlayerCore.currentAudioInstance
- }
+ _attach(index) {
+ // check if has dependsOnSettings
+ if (Array.isArray(this.constructor.dependsOnSettings)) {
+ // check if the instance has the settings
+ if (
+ !this.constructor.dependsOnSettings.every((setting) =>
+ app.cores.settings.get(setting),
+ )
+ ) {
+ console.warn(
+ `Skipping attachment for [${this.constructor.refName ?? this.constructor.name}] node, cause is not passing the settings dependecy > ${this.constructor.dependsOnSettings.join(", ")}`,
+ )
- // check if has dependsOnSettings
- if (Array.isArray(this.constructor.dependsOnSettings)) {
- // check if the instance has the settings
- if (!this.constructor.dependsOnSettings.every((setting) => app.cores.settings.get(setting))) {
- console.warn(`Skipping attachment for [${this.constructor.refName ?? this.constructor.name}] node, cause is not passing the settings dependecy > ${this.constructor.dependsOnSettings.join(", ")}`)
+ return null
+ }
+ }
- return instance
- }
- }
+ // if index is not defined, attach to the last node
+ if (!index) {
+ index = this.manager.attached.length
+ }
- // if index is not defined, attach to the last node
- if (!index) {
- index = instance.attachedProcessors.length
- }
+ const prevNode = this.manager.attached[index - 1]
+ const nextNode = this.manager.attached[index + 1]
- const prevNode = instance.attachedProcessors[index - 1]
- const nextNode = instance.attachedProcessors[index + 1]
+ const currentIndex = this._findIndex()
- const currentIndex = this._findIndex(instance)
+ // check if is already attached
+ if (currentIndex !== false) {
+ console.warn(
+ `[${this.constructor.refName ?? this.constructor.name}] node is already attached`,
+ )
- // check if is already attached
- if (currentIndex !== false) {
- console.warn(`[${this.constructor.refName ?? this.constructor.name}] node is already attached`)
+ return null
+ }
- return instance
- }
+ // first check if has prevNode and if is connected to something
+ // if has, disconnect it
+ // if it not has, its means that is the first node, so connect to the media source
+ if (prevNode && prevNode.processor._last.numberOfOutputs > 0) {
+ //console.log(`[${this.constructor.refName ?? this.constructor.name}] node is already attached to the previous node, disconnecting...`)
+ // if has outputs, disconnect from the next node
+ prevNode.processor._last.disconnect()
- // first check if has prevNode and if is connected to something
- // if has, disconnect it
- // if it not has, its means that is the first node, so connect to the media source
- if (prevNode && prevNode.processor._last.numberOfOutputs > 0) {
- //console.log(`[${this.constructor.refName ?? this.constructor.name}] node is already attached to the previous node, disconnecting...`)
- // if has outputs, disconnect from the next node
- prevNode.processor._last.disconnect()
+ // now, connect to the processor
+ prevNode.processor._last.connect(this.processor)
+ } else {
+ //console.log(`[${this.constructor.refName ?? this.constructor.name}] node is the first node, connecting to the media source...`)
+ this.elementSource.connect(this.processor)
+ }
- // now, connect to the processor
- prevNode.processor._last.connect(this.processor)
- } else {
- //console.log(`[${this.constructor.refName ?? this.constructor.name}] node is the first node, connecting to the media source...`)
- instance.contextElement.connect(this.processor)
- }
+ // now, check if it has a next node
+ // if has, connect to it
+ // if not, connect to the destination
+ if (nextNode) {
+ this.processor.connect(nextNode.processor)
+ }
- // now, check if it has a next node
- // if has, connect to it
- // if not, connect to the destination
- if (nextNode) {
- this.processor.connect(nextNode.processor)
- }
+ // add to the attachedProcessors
+ this.manager.attached.splice(index, 0, this)
- // add to the attachedProcessors
- instance.attachedProcessors.splice(index, 0, this)
+ // // handle instance mutation
+ // if (typeof this.mutateInstance === "function") {
+ // instance = this.mutateInstance(instance)
+ // }
- // handle instance mutation
- if (typeof this.mutateInstance === "function") {
- instance = this.mutateInstance(instance)
- }
+ return this
+ }
- return instance
- }
+ _detach() {
+ // find index of the node within the attachedProcessors serching for matching refName
+ const index = this._findIndex()
- _detach(instance) {
- if (typeof instance !== "object") {
- instance = this.PlayerCore.currentAudioInstance
- }
+ if (!index) {
+ return null
+ }
- // find index of the node within the attachedProcessors serching for matching refName
- const index = this._findIndex(instance)
+ // retrieve the previous and next nodes
+ const prevNode = this.manager.attached[index - 1]
+ const nextNode = this.manager.attached[index + 1]
- if (!index) {
- return instance
- }
+ // check if has previous node and if has outputs
+ if (prevNode && prevNode.processor._last.numberOfOutputs > 0) {
+ // if has outputs, disconnect from the previous node
+ prevNode.processor._last.disconnect()
+ }
- // retrieve the previous and next nodes
- const prevNode = instance.attachedProcessors[index - 1]
- const nextNode = instance.attachedProcessors[index + 1]
+ // disconnect
+ this.processor.disconnect()
+ this.manager.attached.splice(index, 1)
- // check if has previous node and if has outputs
- if (prevNode && prevNode.processor._last.numberOfOutputs > 0) {
- // if has outputs, disconnect from the previous node
- prevNode.processor._last.disconnect()
- }
+ // now, connect the previous node to the next node
+ if (prevNode && nextNode) {
+ prevNode.processor._last.connect(nextNode.processor)
+ } else {
+ // it means that this is the last node, so connect to the destination
+ prevNode.processor._last.connect(this.audioContext.destination)
+ }
- // disconnect
- instance = this._destroy(instance)
+ return this
+ }
- // now, connect the previous node to the next node
- if (prevNode && nextNode) {
- prevNode.processor._last.connect(nextNode.processor)
- } else {
- // it means that this is the last node, so connect to the destination
- prevNode.processor._last.connect(this.audioContext.destination)
- }
+ _findIndex() {
+ // find index of the node within the attachedProcessors serching for matching refName
+ const index = this.manager.attached.findIndex((node) => {
+ return node.constructor.refName === this.constructor.refName
+ })
- return instance
- }
+ if (index === -1) {
+ return false
+ }
- _destroy(instance) {
- if (typeof instance !== "object") {
- instance = this.PlayerCore.currentAudioInstance
- }
-
- const index = this._findIndex(instance)
-
- if (!index) {
- return instance
- }
-
- this.processor.disconnect()
-
- instance.attachedProcessors.splice(index, 1)
-
- return instance
- }
-
- _findIndex(instance) {
- if (!instance) {
- instance = this.PlayerCore.currentAudioInstance
- }
-
- if (!instance) {
- console.warn(`Instance is not defined`)
-
- return false
- }
-
- // find index of the node within the attachedProcessors serching for matching refName
- const index = instance.attachedProcessors.findIndex((node) => {
- return node.constructor.refName === this.constructor.refName
- })
-
- if (index === -1) {
- return false
- }
-
- return index
- }
-}
\ No newline at end of file
+ return index
+ }
+}
diff --git a/packages/app/src/cores/remoteStorage/remoteStorage.core.js b/packages/app/src/cores/remoteStorage/remoteStorage.core.js
index 91bd8da0..3cca026c 100755
--- a/packages/app/src/cores/remoteStorage/remoteStorage.core.js
+++ b/packages/app/src/cores/remoteStorage/remoteStorage.core.js
@@ -84,9 +84,9 @@ export default class RemoteStorage extends Core {
_reject(message)
})
- uploader.events.on("progress", ({ percentProgress }) => {
+ uploader.events.on("progress", (data) => {
if (typeof onProgress === "function") {
- onProgress(file, percentProgress)
+ onProgress(file, data)
}
})
diff --git a/packages/app/src/cores/sfx/sfx.core.js b/packages/app/src/cores/sfx/sfx.core.js
index 06c7ec63..aad78101 100755
--- a/packages/app/src/cores/sfx/sfx.core.js
+++ b/packages/app/src/cores/sfx/sfx.core.js
@@ -6,102 +6,119 @@ import store from "store"
import config from "@config"
export default class SFXCore extends Core {
- static namespace = "sfx"
+ static namespace = "sfx"
- soundsPool = {}
+ soundsPool = {}
- public = {
- loadSoundpack: this.loadSoundpack.bind(this),
- play: this.play,
- }
+ public = {
+ loadSoundpack: this.loadSoundpack.bind(this),
+ play: this.play,
+ }
- onEvents = {
- "sfx:test": (volume) => {
- // play a sound to test volume
- this.play("test", {
- volume: volume / 100,
- })
- }
- }
+ onEvents = {
+ "sfx:test": (volume) => {
+ // play a sound to test volume
+ this.play("test", {
+ volume: volume / 100,
+ })
+ },
+ }
- async loadSoundpack(soundpack) {
- if (!soundpack) {
- soundpack = store.get("soundpack")
- }
+ async loadSoundpack(soundpack) {
+ if (!soundpack) {
+ soundpack = store.get("soundpack")
+ }
- if (!soundpack) {
- soundpack = config.defaultSoundPack ?? {}
- }
+ if (!soundpack) {
+ soundpack = config.defaultSoundPack ?? {}
+ }
- // check if is valid url with regex
- const urlRegex = /^(http|https):\/\/[^ "]+$/;
+ // check if is valid url with regex
+ const urlRegex = /^(http|https):\/\/[^ "]+$/
- if (urlRegex.test(soundpack)) {
- const { data } = await axios.get(soundpack)
+ if (urlRegex.test(soundpack)) {
+ const { data } = await axios.get(soundpack)
- soundpack = data
- }
+ soundpack = data
+ }
- if (typeof soundpack.sounds !== "object") {
- this.console.error(`Soundpack [${soundpack.id}] is not a valid soundpack.`)
- return false
- }
+ if (typeof soundpack.sounds !== "object") {
+ this.console.error(
+ `Soundpack [${soundpack.id}] is not a valid soundpack.`,
+ )
+ return false
+ }
- this.console.log(`Loading soundpack [${soundpack.id} | ${soundpack.name}] by ${soundpack.author} (${soundpack.version})`)
+ this.console.log(
+ `Loading soundpack [${soundpack.id} | ${soundpack.name}] by ${soundpack.author} (${soundpack.version})`,
+ )
- for (const [name, path] of Object.entries(soundpack.sounds)) {
- this.soundsPool[name] = new Howl({
- volume: 0.5,
- src: [path],
- })
- }
- }
+ for (const [name, path] of Object.entries(soundpack.sounds)) {
+ this.soundsPool[name] = new Howl({
+ volume: 0.5,
+ src: [path],
+ })
+ }
+ }
- async play(name, options = {}) {
- if (!window.app.cores.settings.is("ui.effects", true)) {
- return false
- }
+ async play(name, options = {}) {
+ if (!window.app.cores.settings.is("ui.effects", true)) {
+ return false
+ }
- const audioInstance = this.soundsPool[name]
+ const audioInstance = this.soundsPool[name]
- if (!audioInstance) {
- return false
- }
+ if (!audioInstance) {
+ return false
+ }
- if (typeof options.volume !== "undefined") {
- audioInstance.volume(options.volume)
- } else {
- audioInstance.volume((window.app.cores.settings.get("ui.general_volume") ?? 0) / 100)
- }
+ if (typeof options.volume !== "undefined") {
+ audioInstance.volume(options.volume)
+ } else {
+ audioInstance.volume(
+ (window.app.cores.settings.get("ui.general_volume") ?? 0) / 100,
+ )
+ }
- audioInstance.play()
- }
+ audioInstance.play()
+ }
- async handleClick(event) {
- // search for closest button
- const button = event.target.closest("button") || event.target.closest(".ant-btn")
+ async handleClick(event) {
+ // search for closest button
+ const button =
+ event.target.closest("button") || event.target.closest(".ant-btn")
- // search for a slider
- const slider = event.target.closest("input[type=range]")
+ // search for a slider
+ const slider = event.target.closest("input[type=range]")
- // if button exist and has aria-checked attribute then play switch_on or switch_off
- if (button) {
- if (button.hasAttribute("aria-checked")) {
- return this.play(button.getAttribute("aria-checked") === "true" ? "component.switch_off" : "component.switch_on")
- }
+ // if button exist and has aria-checked attribute then play switch_on or switch_off
+ if (button) {
+ if (button.hasAttribute("aria-checked")) {
+ return this.play(
+ button.getAttribute("aria-checked") === "true"
+ ? "component.switch_off"
+ : "component.switch_on",
+ )
+ }
- return this.play("generic_click")
- }
+ return this.play("generic_click")
+ }
- if (slider) {
- // check if is up or down
- this.console.log(slider)
- }
- }
+ if (slider) {
+ // check if is up or down
+ this.console.log(slider)
+ }
+ }
- async onInitialize() {
- await this.loadSoundpack()
+ async onInitialize() {
+ await this.loadSoundpack()
- document.addEventListener("click", (...args) => { this.handleClick(...args) }, true)
- }
-}
\ No newline at end of file
+ document.addEventListener(
+ "click",
+ (...args) => {
+ this.handleClick(...args)
+ },
+ true,
+ )
+ }
+}
diff --git a/packages/app/src/pages/@mobile-views/player/index.jsx b/packages/app/src/pages/@mobile-views/player/index.jsx
index 305e6452..eb3c204f 100755
--- a/packages/app/src/pages/@mobile-views/player/index.jsx
+++ b/packages/app/src/pages/@mobile-views/player/index.jsx
@@ -4,7 +4,7 @@ import classnames from "classnames"
import { Icons } from "@components/Icons"
import SeekBar from "@components/Player/SeekBar"
import Controls from "@components/Player/Controls"
-import ExtraActions from "@components/Player/ExtraActions"
+import Actions from "@components/Player/Actions"
import { usePlayerStateContext } from "@contexts/WithPlayerContext"
import RGBStringToValues from "@utils/rgbToValues"
@@ -12,102 +12,96 @@ import RGBStringToValues from "@utils/rgbToValues"
import "./index.less"
const ServiceIndicator = (props) => {
- if (!props.service) {
- return null
- }
+ if (!props.service) {
+ return null
+ }
- switch (props.service) {
- case "tidal": {
- return
- Playing from Tidal
-
- }
- default: {
- return null
- }
- }
+ switch (props.service) {
+ case "tidal": {
+ return (
+
+ Playing from Tidal
+
+ )
+ }
+ default: {
+ return null
+ }
+ }
}
const AudioPlayer = (props) => {
- const [playerState] = usePlayerStateContext()
+ const [playerState] = usePlayerStateContext()
- React.useEffect(() => {
- if (app.currentDragger) {
- app.currentDragger.setBackgroundColorValues(RGBStringToValues(playerState.track_manifest?.cover_analysis?.rgb))
- }
+ React.useEffect(() => {
+ if (app.currentDragger) {
+ app.currentDragger.setBackgroundColorValues(
+ RGBStringToValues(
+ playerState.track_manifest?.cover_analysis?.rgb,
+ ),
+ )
+ }
+ }, [playerState.track_manifest?.cover_analysis])
- }, [playerState.track_manifest?.cover_analysis])
+ const {
+ title,
+ album,
+ artist,
+ service,
+ lyricsEnabled,
+ cover_analysis,
+ cover,
+ } = playerState.track_manifest ?? {}
- const {
- title,
- album,
- artist,
- service,
- lyricsEnabled,
- cover_analysis,
- cover,
- } = playerState.track_manifest ?? {}
+ const playing = playerState.playback_status === "playing"
+ const stopped = playerState.playback_status === "stopped"
- const playing = playerState.playback_status === "playing"
- const stopped = playerState.playback_status === "stopped"
+ const titleText = !playing && stopped ? "Stopped" : (title ?? "Untitled")
+ const subtitleText = `${artist} | ${album?.title ?? album}`
- const titleText = (!playing && stopped) ? "Stopped" : (title ?? "Untitled")
- const subtitleText = `${artist} | ${album?.title ?? album}`
+ return (
+
+
+
- return
-
-
+
-
+
+
+
+
{titleText}
+
+
+ {subtitleText}
+
+
+
-
-
-
-
- {
- titleText
- }
-
-
-
-
-
- {subtitleText}
-
-
-
-
-
+
-
+
-
-
-
-
-
+
+
+
+ )
}
-export default AudioPlayer
\ No newline at end of file
+export default AudioPlayer
diff --git a/packages/app/src/pages/@mobile-views/player/index.less b/packages/app/src/pages/@mobile-views/player/index.less
index 40cf24cc..880817ce 100755
--- a/packages/app/src/pages/@mobile-views/player/index.less
+++ b/packages/app/src/pages/@mobile-views/player/index.less
@@ -1,199 +1,184 @@
@top_controls_height: 55px;
-.mobile_media_player_wrapper {
- position: relative;
+.mobile-player_wrapper {
+ position: relative;
- z-index: 320;
+ z-index: 320;
- display: flex;
+ display: flex;
- flex-direction: column;
+ flex-direction: column;
- height: 100%;
- width: 100%;
+ height: 100%;
+ width: 100%;
- .mobile_media_player_background {
- position: absolute;
+ margin-bottom: 30px;
- z-index: 320;
+ .mobile-player_background {
+ position: absolute;
- top: 0;
- left: 0;
+ z-index: 320;
- width: 100%;
- height: 100%;
+ top: 0;
+ left: 0;
- background-color: rgba(var(--cover_averageValues), 0.4);
- }
+ width: 100%;
+ height: 100%;
+
+ background-color: rgba(var(--cover_averageValues), 0.4);
+ }
}
-.mobile_media_player {
- position: relative;
+.mobile-player {
+ position: relative;
- display: inline-flex;
- flex-direction: column;
+ display: inline-flex;
+ flex-direction: column;
- align-items: flex-start;
- justify-content: center;
+ align-items: flex-start;
+ justify-content: center;
- width: 100%;
- height: 100%;
+ width: 100%;
+ height: 100%;
- gap: 10px;
+ gap: 10px;
- transition: all 150ms ease-out;
+ transition: all 150ms ease-out;
- z-index: 330;
+ z-index: 330;
- .service_indicator {
- color: var(--text-color);
+ .service_indicator {
+ color: var(--text-color);
- background-color: var(--background-color-accent);
- padding: 7px;
- border-radius: 8px;
+ background-color: var(--background-color-accent);
+ padding: 7px;
+ border-radius: 8px;
- font-size: 0.9rem;
- }
+ font-size: 0.9rem;
+ }
- .cover {
- position: relative;
+ .mobile-player-cover {
+ position: relative;
- z-index: 320;
+ z-index: 320;
- margin: auto;
+ margin: auto;
- width: 100%;
- height: 100%;
+ width: 100%;
+ height: 100%;
- min-height: 40vh;
- min-width: 100%;
+ min-height: 40vh;
+ min-width: 100%;
- border-radius: 24px;
+ border-radius: 24px;
- background-position: center;
- background-size: cover;
- background-repeat: no-repeat;
+ background-position: center;
+ background-size: cover;
+ background-repeat: no-repeat;
- transition: all 0.3s ease-in-out;
+ transition: all 0.3s ease-in-out;
- img {
- width: 100%;
- height: 100%;
+ img {
+ width: 100%;
+ height: 100%;
- object-fit: cover;
- object-position: center;
- }
- }
+ object-fit: cover;
+ object-position: center;
+ }
+ }
- .header {
- position: relative;
+ .mobile-player-header {
+ position: relative;
- display: flex;
- flex-direction: row;
+ display: flex;
+ flex-direction: row;
- width: 100%;
+ width: 100%;
- .info {
- display: flex;
- flex-direction: column;
+ .mobile-player-info {
+ display: flex;
+ flex-direction: column;
- h1,
- h2,
- h3,
- h4,
- h5,
- h6,
- p,
- span {
- margin: 0;
- color: var(--text-color);
- }
+ h1,
+ h2,
+ h3,
+ h4,
+ h5,
+ h6,
+ p,
+ span {
+ margin: 0;
+ color: var(--text-color);
+ }
- width: 100%;
+ width: 100%;
- .title {
- font-size: 1rem;
- font-weight: 600;
- color: var(--text-color);
+ .mobile-player-info-title {
+ display: flex;
+ flex-direction: row;
- word-break: break-all;
+ align-items: center;
- font-family: "Space Grotesk", sans-serif;
- }
+ font-size: 1rem;
+ font-weight: 600;
- .subTitle {
- display: flex;
- flex-direction: row;
+ word-break: break-all;
- width: 100%;
+ font-family: "Space Grotesk", sans-serif;
+ }
- justify-content: space-between;
+ .mobile-player-info-subTitle {
+ display: flex;
+ flex-direction: row;
- .likeButton {
- margin-right: 20px;
- }
+ align-items: center;
+ justify-content: space-between;
- .artist {
- font-size: 0.6rem;
- font-weight: 400;
- color: var(--text-color);
- }
- }
- }
- }
+ font-size: 0.7rem;
+ font-weight: 400;
+ }
+ }
+ }
- .player-controls {
- .ant-btn {
- min-width: 40px !important;
- min-height: 40px !important;
- }
+ .player-controls {
+ .ant-btn {
+ min-width: 40px !important;
+ min-height: 40px !important;
+ }
- svg {
- font-size: 1.2rem;
- }
+ svg {
+ font-size: 1.2rem;
+ }
- .playButton {
- min-width: 50px !important;
- min-height: 50px !important;
+ .playButton {
+ min-width: 50px !important;
+ min-height: 50px !important;
- svg {
- font-size: 1.6rem;
- }
- }
- }
+ svg {
+ font-size: 1.6rem;
+ }
+ }
+ }
- .player-seek_bar {
- .progress {
- .MuiSlider-root {
- .MuiSlider-rail {
- height: 7px;
- }
+ .player-seek_bar {
+ .progress {
+ .MuiSlider-root {
+ .MuiSlider-rail {
+ height: 7px;
+ }
- .MuiSlider-track {
- height: 7px;
- }
+ .MuiSlider-track {
+ height: 7px;
+ }
- .MuiSlider-thumb {
- width: 5px;
- height: 13px;
- border-radius: 2px;
+ .MuiSlider-thumb {
+ width: 5px;
+ height: 13px;
+ border-radius: 2px;
- background-color: var(--background-color-contrast);
- }
- }
- }
- }
-
- .extra_actions {
- padding: 0 30px;
-
- .ant-btn {
- padding: 5px;
-
- svg {
- height: 23px;
- min-width: 23px;
- }
- }
- }
-}
\ No newline at end of file
+ background-color: var(--background-color-contrast);
+ }
+ }
+ }
+ }
+}
diff --git a/packages/app/src/pages/_debug/audiometadata/index.jsx b/packages/app/src/pages/_debug/audiometadata/index.jsx
new file mode 100644
index 00000000..c9aea4c8
--- /dev/null
+++ b/packages/app/src/pages/_debug/audiometadata/index.jsx
@@ -0,0 +1,47 @@
+import TrackManifest from "@cores/player/classes/TrackManifest"
+
+const D_Manifest = () => {
+ const [manifest, setManifest] = React.useState(null)
+
+ function selectLocalFile() {
+ const input = document.createElement("input")
+ input.type = "file"
+ input.accept = "audio/*"
+ input.onchange = (e) => {
+ loadManifest(e.target.files[0])
+ }
+ input.click()
+ }
+
+ async function loadManifest(file) {
+ let track = new TrackManifest({ file: file })
+
+ await track.initialize()
+
+ console.log(track)
+
+ setManifest(track)
+ }
+
+ return (
+
+
Select a local file to view & create a track manifest
+
+
+
+ {manifest?.cover && (
+

+ )}
+
+
+ {JSON.stringify(manifest)}
+
+
+ )
+}
+
+export default D_Manifest
diff --git a/packages/app/src/pages/lyrics/components/controller/index.jsx b/packages/app/src/pages/lyrics/components/controller/index.jsx
index 94265cf3..116c287d 100644
--- a/packages/app/src/pages/lyrics/components/controller/index.jsx
+++ b/packages/app/src/pages/lyrics/components/controller/index.jsx
@@ -177,7 +177,7 @@ const PlayerController = React.forwardRef((props, ref) => {
)}
- {playerState.track_manifest?.metadata.lossless && (
+ {playerState.track_manifest?.metadata?.lossless && (
{
- const [playerState] = usePlayerStateContext()
+ const [playerState] = usePlayerStateContext()
- const { lyrics } = props
+ const { lyrics } = props
- const [initialLoading, setInitialLoading] = React.useState(true)
- const [syncInterval, setSyncInterval] = React.useState(null)
- const [syncingVideo, setSyncingVideo] = React.useState(false)
- const [currentVideoLatency, setCurrentVideoLatency] = React.useState(0)
- const hls = React.useRef(new HLS())
+ const [initialLoading, setInitialLoading] = React.useState(true)
+ const [syncInterval, setSyncInterval] = React.useState(null)
+ const [syncingVideo, setSyncingVideo] = React.useState(false)
+ const [currentVideoLatency, setCurrentVideoLatency] = React.useState(0)
+ const hls = React.useRef(new HLS())
- async function seekVideoToSyncAudio() {
- if (!lyrics) {
- return null
- }
+ async function seekVideoToSyncAudio() {
+ if (!lyrics) {
+ return null
+ }
- if (!lyrics.video_source || typeof lyrics.sync_audio_at_ms === "undefined") {
- return null
- }
+ if (
+ !lyrics.video_source ||
+ typeof lyrics.sync_audio_at_ms === "undefined"
+ ) {
+ return null
+ }
- const currentTrackTime = app.cores.player.controls.seek()
+ const currentTrackTime = app.cores.player.controls.seek()
- setSyncingVideo(true)
+ setSyncingVideo(true)
- let newTime = currentTrackTime + (lyrics.sync_audio_at_ms / 1000) + app.cores.player.gradualFadeMs / 1000
+ let newTime =
+ currentTrackTime + lyrics.sync_audio_at_ms / 1000 + 150 / 1000
- // dec some ms to ensure the video seeks correctly
- newTime -= 5 / 1000
+ // dec some ms to ensure the video seeks correctly
+ newTime -= 5 / 1000
- videoRef.current.currentTime = newTime
- }
+ videoRef.current.currentTime = newTime
+ }
- async function syncPlayback() {
- // if something is wrong, stop syncing
- if (videoRef.current === null || !lyrics || !lyrics.video_source || typeof lyrics.sync_audio_at_ms === "undefined" || playerState.playback_status !== "playing") {
- return stopSyncInterval()
- }
+ async function syncPlayback() {
+ // if something is wrong, stop syncing
+ if (
+ videoRef.current === null ||
+ !lyrics ||
+ !lyrics.video_source ||
+ typeof lyrics.sync_audio_at_ms === "undefined" ||
+ playerState.playback_status !== "playing"
+ ) {
+ return stopSyncInterval()
+ }
- const currentTrackTime = app.cores.player.controls.seek()
- const currentVideoTime = videoRef.current.currentTime - (lyrics.sync_audio_at_ms / 1000)
+ const currentTrackTime = app.cores.player.controls.seek()
+ const currentVideoTime =
+ videoRef.current.currentTime - lyrics.sync_audio_at_ms / 1000
- //console.log(`Current track time: ${currentTrackTime}, current video time: ${currentVideoTime}`)
+ //console.log(`Current track time: ${currentTrackTime}, current video time: ${currentVideoTime}`)
- const maxOffset = maxLatencyInMs / 1000
- const currentVideoTimeDiff = Math.abs(currentVideoTime - currentTrackTime)
+ const maxOffset = maxLatencyInMs / 1000
+ const currentVideoTimeDiff = Math.abs(
+ currentVideoTime - currentTrackTime,
+ )
- setCurrentVideoLatency(currentVideoTimeDiff)
+ setCurrentVideoLatency(currentVideoTimeDiff)
- if (syncingVideo === true) {
- return false
- }
+ if (syncingVideo === true) {
+ return false
+ }
- if (currentVideoTimeDiff > maxOffset) {
- seekVideoToSyncAudio()
- }
- }
+ if (currentVideoTimeDiff > maxOffset) {
+ seekVideoToSyncAudio()
+ }
+ }
- function startSyncInterval() {
- setSyncInterval(setInterval(syncPlayback, 300))
- }
+ function startSyncInterval() {
+ setSyncInterval(setInterval(syncPlayback, 300))
+ }
- function stopSyncInterval() {
- setSyncingVideo(false)
- setSyncInterval(null)
- clearInterval(syncInterval)
- }
+ function stopSyncInterval() {
+ setSyncingVideo(false)
+ setSyncInterval(null)
+ clearInterval(syncInterval)
+ }
- //* handle when player is loading
- React.useEffect(() => {
- if (lyrics?.video_source && playerState.loading === true && playerState.playback_status === "playing") {
- videoRef.current.pause()
- }
+ //* handle when player is loading
+ React.useEffect(() => {
+ if (
+ lyrics?.video_source &&
+ playerState.loading === true &&
+ playerState.playback_status === "playing"
+ ) {
+ videoRef.current.pause()
+ }
- if (lyrics?.video_source && playerState.loading === false && playerState.playback_status === "playing") {
- videoRef.current.play()
- }
- }, [playerState.loading])
+ if (
+ lyrics?.video_source &&
+ playerState.loading === false &&
+ playerState.playback_status === "playing"
+ ) {
+ videoRef.current.play()
+ }
+ }, [playerState.loading])
- //* Handle when playback status change
- React.useEffect(() => {
- if (initialLoading === false) {
- console.log(`VIDEO:: Playback status changed to ${playerState.playback_status}`)
+ //* Handle when playback status change
+ React.useEffect(() => {
+ if (initialLoading === false) {
+ console.log(
+ `VIDEO:: Playback status changed to ${playerState.playback_status}`,
+ )
- if (lyrics && lyrics.video_source) {
- if (playerState.playback_status === "playing") {
- videoRef.current.play()
- startSyncInterval()
- } else {
- videoRef.current.pause()
- stopSyncInterval()
- }
- }
- }
- }, [playerState.playback_status])
+ if (lyrics && lyrics.video_source) {
+ if (playerState.playback_status === "playing") {
+ videoRef.current.play()
+ startSyncInterval()
+ } else {
+ videoRef.current.pause()
+ stopSyncInterval()
+ }
+ }
+ }
+ }, [playerState.playback_status])
- //* Handle when lyrics object change
- React.useEffect(() => {
- setCurrentVideoLatency(0)
- stopSyncInterval()
+ //* Handle when lyrics object change
+ React.useEffect(() => {
+ setCurrentVideoLatency(0)
+ stopSyncInterval()
- if (lyrics) {
- if (lyrics.video_source) {
- console.log("Loading video source >", lyrics.video_source)
+ if (lyrics) {
+ if (lyrics.video_source) {
+ console.log("Loading video source >", lyrics.video_source)
- if (lyrics.video_source.endsWith(".mp4")) {
- videoRef.current.src = lyrics.video_source
- } else {
- hls.current.loadSource(lyrics.video_source)
- }
+ if (lyrics.video_source.endsWith(".mp4")) {
+ videoRef.current.src = lyrics.video_source
+ } else {
+ hls.current.loadSource(lyrics.video_source)
+ }
- if (typeof lyrics.sync_audio_at_ms !== "undefined") {
- videoRef.current.loop = false
- videoRef.current.currentTime = lyrics.sync_audio_at_ms / 1000
+ if (typeof lyrics.sync_audio_at_ms !== "undefined") {
+ videoRef.current.loop = false
+ videoRef.current.currentTime =
+ lyrics.sync_audio_at_ms / 1000
- startSyncInterval()
- } else {
- videoRef.current.loop = true
- videoRef.current.currentTime = 0
- }
+ startSyncInterval()
+ } else {
+ videoRef.current.loop = true
+ videoRef.current.currentTime = 0
+ }
- if (playerState.playback_status === "playing") {
- videoRef.current.play()
- }
- }
- }
+ if (playerState.playback_status === "playing") {
+ videoRef.current.play()
+ }
+ }
+ }
- setInitialLoading(false)
- }, [lyrics])
+ setInitialLoading(false)
+ }, [lyrics])
- React.useEffect(() => {
- videoRef.current.addEventListener("seeked", (event) => {
- setSyncingVideo(false)
- })
+ React.useEffect(() => {
+ videoRef.current.addEventListener("seeked", (event) => {
+ setSyncingVideo(false)
+ })
- hls.current.attachMedia(videoRef.current)
+ hls.current.attachMedia(videoRef.current)
- return () => {
- stopSyncInterval()
- }
- }, [])
+ return () => {
+ stopSyncInterval()
+ }
+ }, [])
- return <>
- {
- props.lyrics?.sync_audio_at &&
-
-
Maximun latency
-
{maxLatencyInMs}ms
-
-
-
Video Latency
-
{(currentVideoLatency * 1000).toFixed(2)}ms
-
- {syncingVideo ?
Syncing video...
: null}
-
- }
+ return (
+ <>
+ {props.lyrics?.sync_audio_at && (
+
+
+
Maximun latency
+
{maxLatencyInMs}ms
+
+
+
Video Latency
+
{(currentVideoLatency * 1000).toFixed(2)}ms
+
+ {syncingVideo ?
Syncing video...
: null}
+
+ )}
-
- >
+
+ >
+ )
})
-export default LyricsVideo
\ No newline at end of file
+export default LyricsVideo
diff --git a/packages/app/src/pages/lyrics/index.jsx b/packages/app/src/pages/lyrics/index.jsx
index 3f0428ad..320ca842 100644
--- a/packages/app/src/pages/lyrics/index.jsx
+++ b/packages/app/src/pages/lyrics/index.jsx
@@ -89,7 +89,8 @@ const EnhancedLyricsPage = () => {
// Track manifest comparison
useEffect(() => {
- const newManifest = playerState.track_manifest?.toSeriableObject()
+ const newManifest = playerState.track_manifest
+
if (JSON.stringify(newManifest) !== JSON.stringify(trackManifest)) {
setTrackManifest(newManifest)
}
diff --git a/packages/app/src/settings/player/index.jsx b/packages/app/src/settings/player/index.jsx
index b8c3860c..f6bdb4fe 100755
--- a/packages/app/src/settings/player/index.jsx
+++ b/packages/app/src/settings/player/index.jsx
@@ -11,33 +11,33 @@ export default {
}
},
settings: [
- {
- id: "player.gain",
- title: "Gain",
- icon: "MdGraphicEq",
- group: "general",
- description: "Adjust gain for audio output",
- component: "Slider",
- props: {
- min: 1,
- max: 2,
- step: 0.1,
- marks: {
- 1: "Normal",
- 1.5: "+50%",
- 2: "+100%",
- },
- },
- defaultValue: () => {
- return app.cores.player.gain.values().gain
- },
- onUpdate: (value) => {
- app.cores.player.gain.modifyValues({
- gain: value,
- })
- },
- storaged: false,
- },
+ // {
+ // id: "player.gain",
+ // title: "Gain",
+ // icon: "MdGraphicEq",
+ // group: "general",
+ // description: "Adjust gain for audio output",
+ // component: "Slider",
+ // props: {
+ // min: 1,
+ // max: 2,
+ // step: 0.1,
+ // marks: {
+ // 1: "Normal",
+ // 1.5: "+50%",
+ // 2: "+100%",
+ // },
+ // },
+ // defaultValue: () => {
+ // return app.cores.player.gain.values().gain
+ // },
+ // onUpdate: (value) => {
+ // app.cores.player.gain.modifyValues({
+ // gain: value,
+ // })
+ // },
+ // storaged: false,
+ // },
{
id: "player.sample_rate",
title: "Sample Rate",
@@ -66,7 +66,7 @@ export default {
],
},
defaultValue: (ctx) => {
- return app.cores.player.audioContext.sampleRate
+ return app.cores.player.base().context.sampleRate
},
onUpdate: async (value) => {
const sampleRate =
@@ -94,10 +94,10 @@ export default {
onEnabledChange: (enabled) => {
if (enabled === true) {
app.cores.settings.set("player.compressor", true)
- app.cores.player.compressor.attach()
+ //app.cores.player.compressor.attach()
} else {
app.cores.settings.set("player.compressor", false)
- app.cores.player.compressor.detach()
+ //app.cores.player.compressor.detach()
}
},
extraActions: [
@@ -106,8 +106,9 @@ export default {
title: "Default",
icon: "MdRefresh",
onClick: async (ctx) => {
- const values =
- await app.cores.player.compressor.presets.setCurrentPresetToDefault()
+ const values = await app.cores.player
+ .base()
+ .processors.compressor.presets.setCurrentPresetToDefault()
ctx.updateCurrentValue(values)
},
@@ -152,13 +153,14 @@ export default {
],
},
onUpdate: (value) => {
- app.cores.player.compressor.presets.setToCurrent(value)
+ app.cores.player
+ .base()
+ .processors.compressor.presets.setToCurrent(value)
return value
},
storaged: false,
},
-
{
id: "player.eq",
title: "Equalizer",
@@ -172,8 +174,9 @@ export default {
title: "Reset",
icon: "MdRefresh",
onClick: (ctx) => {
- const values =
- app.cores.player.eq.presets.setCurrentPresetToDefault()
+ const values = app.cores.player
+ .base()
+ .processors.eq.presets.setCurrentPresetToDefault()
ctx.updateCurrentValue(values)
},
@@ -260,7 +263,9 @@ export default {
return acc
}, {})
- app.cores.player.eq.presets.setToCurrent(values)
+ app.cores.player
+ .base()
+ .processors.eq.presets.setToCurrent(values)
return value
},
diff --git a/packages/app/src/settings/player/items/player.compressor/index.jsx b/packages/app/src/settings/player/items/player.compressor/index.jsx
index 395c5d81..b10edac8 100755
--- a/packages/app/src/settings/player/items/player.compressor/index.jsx
+++ b/packages/app/src/settings/player/items/player.compressor/index.jsx
@@ -2,13 +2,11 @@ import { Switch } from "antd"
import SlidersWithPresets from "../../../components/slidersWithPresets"
export default (props) => {
- return
- ]}
- />
-}
\ No newline at end of file
+ return (
+ ]}
+ />
+ )
+}
diff --git a/packages/app/src/settings/player/items/player.eq/index.jsx b/packages/app/src/settings/player/items/player.eq/index.jsx
index 7cc00972..060f221e 100755
--- a/packages/app/src/settings/player/items/player.eq/index.jsx
+++ b/packages/app/src/settings/player/items/player.eq/index.jsx
@@ -1,8 +1,10 @@
import SlidersWithPresets from "../../../components/slidersWithPresets"
export default (props) => {
- return
-}
\ No newline at end of file
+ return (
+
+ )
+}
diff --git a/packages/app/src/utils/compareObjectsByProperties/index.js b/packages/app/src/utils/compareObjectsByProperties/index.js
new file mode 100644
index 00000000..e8e95b63
--- /dev/null
+++ b/packages/app/src/utils/compareObjectsByProperties/index.js
@@ -0,0 +1,99 @@
+export default function compareObjectsByProperties(obj1, obj2, props) {
+ // validate that obj1 and obj2 are objects
+ if (
+ !obj1 ||
+ !obj2 ||
+ typeof obj1 !== "object" ||
+ typeof obj2 !== "object"
+ ) {
+ return false
+ }
+
+ // validate that props is an array
+ if (!Array.isArray(props)) {
+ throw new Error("The props parameter must be an array")
+ }
+
+ // iterate through each property and compare
+ for (const prop of props) {
+ // check if the property exists in both objects
+ const prop1Exists = prop in obj1
+ const prop2Exists = prop in obj2
+
+ // if the property doesnt exist in one of the objects
+ if (prop1Exists !== prop2Exists) {
+ return false
+ }
+
+ // if the property exists in both, compare values
+ if (prop1Exists && prop2Exists) {
+ // for nested objects, perform deep comparison
+ if (
+ typeof obj1[prop] === "object" &&
+ obj1[prop] !== null &&
+ typeof obj2[prop] === "object" &&
+ obj2[prop] !== null
+ ) {
+ // compare arrays
+ if (Array.isArray(obj1[prop]) && Array.isArray(obj2[prop])) {
+ if (obj1[prop].length !== obj2[prop].length) {
+ return false
+ }
+
+ for (let i = 0; i < obj1[prop].length; i++) {
+ // if elements are objects, call recursively
+ if (
+ typeof obj1[prop][i] === "object" &&
+ typeof obj2[prop][i] === "object"
+ ) {
+ // get all properties of the object
+ const nestedProps = [
+ ...new Set([
+ ...Object.keys(obj1[prop][i]),
+ ...Object.keys(obj2[prop][i]),
+ ]),
+ ]
+
+ if (
+ !compareObjectsByProperties(
+ obj1[prop][i],
+ obj2[prop][i],
+ nestedProps,
+ )
+ ) {
+ return false
+ }
+ } else if (obj1[prop][i] !== obj2[prop][i]) {
+ return false
+ }
+ }
+ }
+ // compare objects
+ else {
+ const nestedProps = [
+ ...new Set([
+ ...Object.keys(obj1[prop]),
+ ...Object.keys(obj2[prop]),
+ ]),
+ ]
+
+ if (
+ !compareObjectsByProperties(
+ obj1[prop],
+ obj2[prop],
+ nestedProps,
+ )
+ ) {
+ return false
+ }
+ }
+ }
+ // for primitive values, compare directly
+ else if (obj1[prop] !== obj2[prop]) {
+ return false
+ }
+ }
+ }
+
+ return true
+}
diff --git a/packages/server/.swcrc b/packages/server/.swcrc
deleted file mode 100644
index 4d32447a..00000000
--- a/packages/server/.swcrc
+++ /dev/null
@@ -1,15 +0,0 @@
-{
- "$schema": "http://json.schemastore.org/swcrc",
- "exclude":[
- "node_modules/minio/**",
- "node_modules/@octokit/**"
- ],
- "module": {
- "type": "commonjs",
- // These are defaults.
- "strict": false,
- "strictMode": true,
- "lazy": false,
- "noInterop": false
- }
-}
\ No newline at end of file
diff --git a/packages/server/Dockerfile b/packages/server/Dockerfile
index ab684e19..0373ffe9 100755
--- a/packages/server/Dockerfile
+++ b/packages/server/Dockerfile
@@ -7,6 +7,7 @@ RUN apt install -y --no-install-recommends build-essential
RUN apt install -y --no-install-recommends git
RUN apt install -y --no-install-recommends ssh
RUN apt install -y --no-install-recommends curl
+RUN apt install -y --no-install-recommends nscd
RUN apt install -y --no-install-recommends ca-certificates
RUN apt install -y --no-install-recommends ffmpeg
diff --git a/packages/server/boot b/packages/server/boot
index 618447af..bf184b5d 100755
--- a/packages/server/boot
+++ b/packages/server/boot
@@ -8,17 +8,6 @@ const { Buffer } = require("node:buffer")
const { webcrypto: crypto } = require("node:crypto")
const { InfisicalClient } = require("@infisical/sdk")
const moduleAlias = require("module-alias")
-const { onExit } = require("signal-exit")
-const opentelemetry = require("@opentelemetry/sdk-node")
-const {
- getNodeAutoInstrumentations,
-} = require("@opentelemetry/auto-instrumentations-node")
-const { OTLPTraceExporter } = require("@opentelemetry/exporter-trace-otlp-http")
-const { OTLPLogExporter } = require("@opentelemetry/exporter-logs-otlp-http")
-const { Resource } = require("@opentelemetry/resources")
-const {
- SemanticResourceAttributes,
-} = require("@opentelemetry/semantic-conventions")
// Override file execution arg
process.argv.splice(1, 1)
@@ -164,34 +153,12 @@ async function Boot(main) {
throw new Error("main class is not defined")
}
- const service_id = process.env.lb_service.id
+ const { lb_service_id } = process.env
console.log(
- `[BOOT] Booting (${service_id}) in [${global.isProduction ? "production" : "development"}] mode...`,
+ `[BOOT] Booting in [${global.isProduction ? "production" : "development"}] mode...`,
)
- const traceExporter = new OTLPTraceExporter({
- url:
- process.env.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT ??
- "http://fr02.ragestudio.net:4318/v1/traces",
- })
- const logExporter = new OTLPLogExporter({
- url:
- process.env.OTEL_EXPORTER_OTLP_LOGS_ENDPOINT ??
- "http://fr02.ragestudio.net:4318/v1/logs",
- })
-
- const sdk = new opentelemetry.NodeSDK({
- traceExporter,
- logExporter,
- instrumentations: [getNodeAutoInstrumentations()],
- resource: new Resource({
- [SemanticResourceAttributes.SERVICE_NAME]: service_id ?? "node_app",
- }),
- })
-
- sdk.start()
-
if (
process.env.INFISICAL_CLIENT_ID &&
process.env.INFISICAL_CLIENT_SECRET
@@ -204,30 +171,35 @@ async function Boot(main) {
const instance = new main()
- onExit(
- (code, signal) => {
- console.log(`[BOOT] Cleaning up...`)
-
- sdk.shutdown()
- .then(() => console.log("Tracing terminated"))
- .catch((error) =>
- console.log("Error terminating tracing", error),
- )
+ process.on("exit", (code) => {
+ console.log(`[BOOT] Closing...`)
+ if (instance._fireClose) {
+ instance._fireClose()
+ } else {
if (typeof instance.onClose === "function") {
instance.onClose()
}
instance.engine.close()
- },
- {
- alwaysLast: true,
- },
- )
+ }
+ })
+
+ process.on("SIGTERM", () => {
+ process.exit(0)
+ })
+
+ process.on("SIGINT", () => {
+ process.exit(0)
+ })
+
+ process.on("SIGTERM", () => {
+ process.exit(0)
+ })
await instance.initialize()
- if (process.env.lb_service && process.send) {
+ if (lb_service_id && process.send) {
process.send({
status: "ready",
})
diff --git a/packages/server/classes/ChunkFileUpload/index.js b/packages/server/classes/ChunkFileUpload/index.js
index 98ea5a1e..e648a11c 100755
--- a/packages/server/classes/ChunkFileUpload/index.js
+++ b/packages/server/classes/ChunkFileUpload/index.js
@@ -107,8 +107,7 @@ export async function handleChunkFile(
{ tmpDir, headers, maxFileSize, maxChunkSize },
) {
return await new Promise(async (resolve, reject) => {
- const workPath = path.join(tmpDir, headers["uploader-file-id"])
- const chunksPath = path.join(workPath, "chunks")
+ const chunksPath = path.join(tmpDir, "chunks")
const chunkPath = path.join(
chunksPath,
headers["uploader-chunk-number"],
@@ -188,7 +187,7 @@ export async function handleChunkFile(
// build data
chunksPath: chunksPath,
filePath: path.resolve(
- workPath,
+ tmpDir,
`${filename}.${extension}`,
),
maxFileSize: maxFileSize,
@@ -207,38 +206,4 @@ export async function handleChunkFile(
})
}
-export async function uploadChunkFile(
- req,
- { tmpDir, maxFileSize, maxChunkSize },
-) {
- // create a readable stream from req.body data blob
- //
- const chunkData = new Blob([req.body], { type: "application/octet-stream" })
-
- console.log(chunkData)
-
- if (!checkChunkUploadHeaders(req.headers)) {
- reject(new OperationError(400, "Missing header(s)"))
- return
- }
-
- // return await new Promise(async (resolve, reject) => {
- // // create a readable node stream from "req.body" (octet-stream)
- // await req.multipart(async (field) => {
- // try {
- // const result = await handleChunkFile(field.file.stream, {
- // tmpDir: tmpDir,
- // headers: req.headers,
- // maxFileSize: maxFileSize,
- // maxChunkSize: maxChunkSize,
- // })
-
- // return resolve(result)
- // } catch (error) {
- // return reject(error)
- // }
- // })
- // })
-}
-
export default uploadChunkFile
diff --git a/packages/server/classes/FFMPEGLib/index.js b/packages/server/classes/FFMPEGLib/index.js
new file mode 100644
index 00000000..f0b67b72
--- /dev/null
+++ b/packages/server/classes/FFMPEGLib/index.js
@@ -0,0 +1,110 @@
+import { EventEmitter } from "node:events"
+import child_process from "node:child_process"
+
+function getBinaryPath(name) {
+ try {
+ return child_process
+ .execSync(`which ${name}`, { encoding: "utf8" })
+ .trim()
+ } catch (error) {
+ return null
+ }
+}
+
+export class FFMPEGLib extends EventEmitter {
+ constructor() {
+ super()
+ this.ffmpegBin = getBinaryPath("ffmpeg")
+ this.ffprobeBin = getBinaryPath("ffprobe")
+ }
+
+ handleProgress(stdout, endTime, onProgress = () => {}) {
+ let currentTime = 0
+
+ stdout.on("data", (data) => {
+ for (const line of data.toString().split("\n")) {
+ if (line.startsWith("out_time_ms=")) {
+ currentTime = parseInt(line.split("=")[1]) / 1000000
+ } else if (line.startsWith("progress=")) {
+ const status = line.split("=")[1]
+
+ if (status === "end") {
+ onProgress(100)
+ } else if (endTime > 0 && currentTime > 0) {
+ onProgress(
+ Math.min(
+ 100,
+ Math.round((currentTime / endTime) * 100),
+ ),
+ )
+ }
+ }
+ }
+ })
+ }
+
+ ffmpeg(payload) {
+ return this.exec(this.ffmpegBin, payload)
+ }
+
+ ffprobe(payload) {
+ return this.exec(this.ffprobeBin, payload)
+ }
+
+ exec(bin, { args, onProcess, cwd }) {
+ if (Array.isArray(args)) {
+ args = args.join(" ")
+ }
+
+ return new Promise((resolve, reject) => {
+ const process = child_process.exec(
+ `${bin} ${args}`,
+ {
+ cwd: cwd,
+ },
+ (error, stdout, stderr) => {
+ if (error) {
+ reject(stderr)
+ } else {
+ resolve(stdout.toString())
+ }
+ },
+ )
+
+ if (typeof onProcess === "function") {
+ onProcess(process)
+ }
+ })
+ }
+}
+
+export class Utils {
+ static async probe(input) {
+ const lib = new FFMPEGLib()
+
+ const result = await lib
+ .ffprobe({
+ args: [
+ "-v",
+ "error",
+ "-print_format",
+ "json",
+ "-show_format",
+ "-show_streams",
+ input,
+ ],
+ })
+ .catch((err) => {
+ console.log(err)
+ return null
+ })
+
+ if (!result) {
+ return null
+ }
+
+ return JSON.parse(result)
+ }
+}
+
+export default FFMPEGLib
diff --git a/packages/server/classes/MultiqualityHLSJob/index.js b/packages/server/classes/MultiqualityHLSJob/index.js
index 0bbf0e74..55eb1d28 100644
--- a/packages/server/classes/MultiqualityHLSJob/index.js
+++ b/packages/server/classes/MultiqualityHLSJob/index.js
@@ -1,147 +1,138 @@
import fs from "node:fs"
import path from "node:path"
-import { exec } from "node:child_process"
-import { EventEmitter } from "node:events"
-export default class MultiqualityHLSJob {
- constructor({
- input,
- outputDir,
- outputMasterName = "master.m3u8",
- levels,
- }) {
- this.input = input
- this.outputDir = outputDir
- this.levels = levels
- this.outputMasterName = outputMasterName
+import { FFMPEGLib, Utils } from "../FFMPEGLib"
- this.bin = require("ffmpeg-static")
+export default class MultiqualityHLSJob extends FFMPEGLib {
+ constructor(params = {}) {
+ super()
- return this
- }
+ this.params = {
+ outputMasterName: "master.m3u8",
+ levels: [
+ {
+ original: true,
+ codec: "libx264",
+ bitrate: "10M",
+ preset: "ultrafast",
+ },
+ ],
+ ...params,
+ }
+ }
- events = new EventEmitter()
+ buildArgs = () => {
+ const cmdStr = [
+ `-v error -hide_banner -progress pipe:1`,
+ `-i ${this.params.input}`,
+ `-filter_complex`,
+ ]
- buildCommand = () => {
- const cmdStr = [
- this.bin,
- `-v quiet -stats`,
- `-i ${this.input}`,
- `-filter_complex`,
- ]
+ // set split args
+ let splitLevels = [`[0:v]split=${this.params.levels.length}`]
- // set split args
- let splitLevels = [
- `[0:v]split=${this.levels.length}`
- ]
+ this.params.levels.forEach((level, i) => {
+ splitLevels[0] += `[v${i + 1}]`
+ })
- this.levels.forEach((level, i) => {
- splitLevels[0] += (`[v${i + 1}]`)
- })
+ for (const [index, level] of this.params.levels.entries()) {
+ if (level.original) {
+ splitLevels.push(`[v1]copy[v1out]`)
+ continue
+ }
- for (const [index, level] of this.levels.entries()) {
- if (level.original) {
- splitLevels.push(`[v1]copy[v1out]`)
- continue
- }
+ let scaleFilter = `[v${index + 1}]scale=w=${level.width}:h=trunc(ow/a/2)*2[v${index + 1}out]`
- let scaleFilter = `[v${index + 1}]scale=w=${level.width}:h=trunc(ow/a/2)*2[v${index + 1}out]`
+ splitLevels.push(scaleFilter)
+ }
- splitLevels.push(scaleFilter)
- }
+ cmdStr.push(`"${splitLevels.join(";")}"`)
- cmdStr.push(`"${splitLevels.join(";")}"`)
+ // set levels map
+ for (const [index, level] of this.params.levels.entries()) {
+ let mapArgs = [
+ `-map "[v${index + 1}out]"`,
+ `-x264-params "nal-hrd=cbr:force-cfr=1"`,
+ `-c:v:${index} ${level.codec}`,
+ `-b:v:${index} ${level.bitrate}`,
+ `-maxrate:v:${index} ${level.bitrate}`,
+ `-minrate:v:${index} ${level.bitrate}`,
+ `-bufsize:v:${index} ${level.bitrate}`,
+ `-preset ${level.preset}`,
+ `-g 48`,
+ `-sc_threshold 0`,
+ `-keyint_min 48`,
+ ]
- // set levels map
- for (const [index, level] of this.levels.entries()) {
- let mapArgs = [
- `-map "[v${index + 1}out]"`,
- `-x264-params "nal-hrd=cbr:force-cfr=1"`,
- `-c:v:${index} ${level.codec}`,
- `-b:v:${index} ${level.bitrate}`,
- `-maxrate:v:${index} ${level.bitrate}`,
- `-minrate:v:${index} ${level.bitrate}`,
- `-bufsize:v:${index} ${level.bitrate}`,
- `-preset ${level.preset}`,
- `-g 48`,
- `-sc_threshold 0`,
- `-keyint_min 48`,
- ]
+ cmdStr.push(...mapArgs)
+ }
- cmdStr.push(...mapArgs)
- }
+ // set output
+ cmdStr.push(`-f hls`)
+ cmdStr.push(`-hls_time 2`)
+ cmdStr.push(`-hls_playlist_type vod`)
+ cmdStr.push(`-hls_flags independent_segments`)
+ cmdStr.push(`-hls_segment_type mpegts`)
+ cmdStr.push(`-hls_segment_filename stream_%v/data%02d.ts`)
+ cmdStr.push(`-master_pl_name ${this.params.outputMasterName}`)
- // set output
- cmdStr.push(`-f hls`)
- cmdStr.push(`-hls_time 2`)
- cmdStr.push(`-hls_playlist_type vod`)
- cmdStr.push(`-hls_flags independent_segments`)
- cmdStr.push(`-hls_segment_type mpegts`)
- cmdStr.push(`-hls_segment_filename stream_%v/data%02d.ts`)
- cmdStr.push(`-master_pl_name ${this.outputMasterName}`)
+ cmdStr.push(`-var_stream_map`)
- cmdStr.push(`-var_stream_map`)
+ let streamMapVar = []
- let streamMapVar = []
+ for (const [index, level] of this.params.levels.entries()) {
+ streamMapVar.push(`v:${index}`)
+ }
- for (const [index, level] of this.levels.entries()) {
- streamMapVar.push(`v:${index}`)
- }
+ cmdStr.push(`"${streamMapVar.join(" ")}"`)
+ cmdStr.push(`"stream_%v/stream.m3u8"`)
- cmdStr.push(`"${streamMapVar.join(" ")}"`)
- cmdStr.push(`"stream_%v/stream.m3u8"`)
+ return cmdStr.join(" ")
+ }
- return cmdStr.join(" ")
- }
+ run = async () => {
+ const cmdStr = this.buildArgs()
- run = () => {
- const cmdStr = this.buildCommand()
+ const outputPath =
+ this.params.outputDir ??
+ path.join(path.dirname(this.params.input), "hls")
+ const outputFile = path.join(outputPath, this.params.outputMasterName)
- console.log(cmdStr)
+ this.emit("start", {
+ input: this.params.input,
+ output: outputPath,
+ params: this.params,
+ })
- const cwd = `${path.dirname(this.input)}/hls`
+ if (!fs.existsSync(outputPath)) {
+ fs.mkdirSync(outputPath, { recursive: true })
+ }
- if (!fs.existsSync(cwd)) {
- fs.mkdirSync(cwd, { recursive: true })
- }
+ const inputProbe = await Utils.probe(this.params.input)
- console.log(`[HLS] Started multiquality transcode`, {
- input: this.input,
- cwd: cwd,
- })
+ try {
+ const result = await this.ffmpeg({
+ args: cmdStr,
+ cwd: outputPath,
+ onProcess: (process) => {
+ this.handleProgress(
+ process.stdout,
+ parseFloat(inputProbe.format.duration),
+ (progress) => {
+ this.emit("progress", progress)
+ },
+ )
+ },
+ })
- const process = exec(
- cmdStr,
- {
- cwd: cwd,
- },
- (error, stdout, stderr) => {
- if (error) {
- console.log(`[HLS] Failed to transcode >`, error)
+ this.emit("end", {
+ outputPath: outputPath,
+ outputFile: outputFile,
+ })
- return this.events.emit("error", error)
- }
-
- if (stderr) {
- //return this.events.emit("error", stderr)
- }
-
- console.log(`[HLS] Finished transcode >`, cwd)
-
- return this.events.emit("end", {
- filepath: path.join(cwd, this.outputMasterName),
- isDirectory: true,
- })
- }
- )
-
- process.stdout.on("data", (data) => {
- console.log(data.toString())
- })
- }
-
- on = (key, cb) => {
- this.events.on(key, cb)
- return this
- }
-}
\ No newline at end of file
+ return result
+ } catch (err) {
+ return this.emit("error", err)
+ }
+ }
+}
diff --git a/packages/server/classes/RedisClient/index.js b/packages/server/classes/RedisClient/index.js
index 54813702..b2eecc05 100755
--- a/packages/server/classes/RedisClient/index.js
+++ b/packages/server/classes/RedisClient/index.js
@@ -1,79 +1,72 @@
import Redis from "ioredis"
-export function composeURL({
- host,
- port,
- username,
- password,
-} = {}) {
- let url = "redis://"
+export function composeURL({ host, port, username, password } = {}) {
+ let url = "redis://"
- if (username && password) {
- url += username + ":" + password + "@"
- }
+ if (username && password) {
+ url += username + ":" + password + "@"
+ }
- url += host ?? "localhost"
+ url += host ?? "localhost"
- if (port) {
- url += ":" + port
- }
+ if (port) {
+ url += ":" + port
+ }
- return url
+ return url
}
-export default () => {
- let { REDIS_HOST, REDIS_PORT, REDIS_NO_AUTH, REDIS_AUTH, REDIS_DB } = process.env
+export default (params = {}) => {
+ let { REDIS_HOST, REDIS_PORT, REDIS_NO_AUTH, REDIS_AUTH, REDIS_DB } =
+ process.env
- REDIS_NO_AUTH = ToBoolean(REDIS_NO_AUTH)
+ let clientOptions = {
+ host: REDIS_HOST ?? "localhost",
+ port: REDIS_PORT ?? 6379,
+ lazyConnect: true,
+ autoConnect: false,
+ ...params,
+ }
- let clientOptions = {
- host: REDIS_HOST,
- port: REDIS_PORT,
- lazyConnect: true,
- autoConnect: false
- }
+ // if redis auth is provided, set username and password
+ if (!ToBoolean(REDIS_NO_AUTH) && REDIS_AUTH) {
+ const [user, password] = REDIS_AUTH.split(":")
- if (!REDIS_NO_AUTH) {
- if (REDIS_AUTH) {
- const [user, password] = REDIS_AUTH.split(":")
+ clientOptions.username = user
+ clientOptions.password = password
+ } else {
+ console.log("⚠️ Redis auth is disabled")
+ }
- clientOptions.username = user
- clientOptions.password = password
- }
- } else {
- console.log("⚠️ Redis auth is disabled")
- }
+ // if redis db is provided, set db
+ if (REDIS_DB) {
+ clientOptions.db = REDIS_DB
+ }
- if (REDIS_DB) {
- clientOptions.db = REDIS_DB
- }
+ let client = new Redis(clientOptions)
- clientOptions = composeURL(clientOptions)
+ client.on("error", (error) => {
+ console.error("❌ Redis client error:", error)
+ })
- let client = new Redis(clientOptions.host, clientOptions.port, clientOptions)
+ client.on("connect", () => {
+ console.log(`✅ Redis client connected [${process.env.REDIS_HOST}]`)
+ })
- client.on("error", (error) => {
- console.error("❌ Redis client error:", error)
- })
+ client.on("reconnecting", () => {
+ console.log("🔄 Redis client reconnecting...")
+ })
- client.on("connect", () => {
- console.log(`✅ Redis client connected [${process.env.REDIS_HOST}]`)
- })
+ const initialize = async () => {
+ return await new Promise((resolve, reject) => {
+ console.log(`🔌 Connecting to Redis client [${REDIS_HOST}]`)
- client.on("reconnecting", () => {
- console.log("🔄 Redis client reconnecting...")
- })
+ client.connect(resolve)
+ })
+ }
- const initialize = async () => {
- return await new Promise((resolve, reject) => {
- console.log(`🔌 Connecting to Redis client [${REDIS_HOST}]`)
-
- client.connect(resolve)
- })
- }
-
- return {
- client,
- initialize
- }
-}
\ No newline at end of file
+ return {
+ client,
+ initialize,
+ }
+}
diff --git a/packages/server/classes/SegmentedAudioMPDJob/index.js b/packages/server/classes/SegmentedAudioMPDJob/index.js
index 55b8cdc9..365660ca 100644
--- a/packages/server/classes/SegmentedAudioMPDJob/index.js
+++ b/packages/server/classes/SegmentedAudioMPDJob/index.js
@@ -1,112 +1,108 @@
import fs from "node:fs"
import path from "node:path"
-import { exec } from "node:child_process"
-import { EventEmitter } from "node:events"
-export default class SegmentedAudioMPDJob {
- constructor({
- input,
- outputDir,
- outputMasterName = "master.mpd",
+import { FFMPEGLib, Utils } from "../FFMPEGLib"
- audioCodec = "aac",
- audioBitrate = undefined,
- audioSampleRate = undefined,
- segmentTime = 10,
- }) {
- this.input = input
- this.outputDir = outputDir
- this.outputMasterName = outputMasterName
+export default class SegmentedAudioMPDJob extends FFMPEGLib {
+ constructor(params = {}) {
+ super()
- this.audioCodec = audioCodec
- this.audioBitrate = audioBitrate
- this.segmentTime = segmentTime
- this.audioSampleRate = audioSampleRate
+ this.params = {
+ outputMasterName: "master.mpd",
+ audioCodec: "libopus",
+ audioBitrate: "320k",
+ audioSampleRate: "48000",
+ segmentTime: 10,
+ includeMetadata: true,
+ ...params,
+ }
+ }
- this.bin = require("ffmpeg-static")
+ buildSegmentationArgs = () => {
+ const args = [
+ //`-threads 1`, // limits to one thread
+ `-v error -hide_banner -progress pipe:1`,
+ `-i ${this.params.input}`,
+ `-c:a ${this.params.audioCodec}`,
+ `-map 0:a`,
+ `-f dash`,
+ `-dash_segment_type mp4`,
+ `-segment_time ${this.params.segmentTime}`,
+ `-use_template 1`,
+ `-use_timeline 1`,
+ `-init_seg_name "init.m4s"`,
+ ]
- return this
- }
+ if (this.params.includeMetadata === false) {
+ args.push(`-map_metadata -1`)
+ }
- events = new EventEmitter()
+ if (
+ typeof this.params.audioBitrate === "string" &&
+ this.params.audioBitrate !== "default"
+ ) {
+ args.push(`-b:a ${this.params.audioBitrate}`)
+ }
- buildCommand = () => {
- const cmdStr = [
- this.bin,
- `-v quiet -stats`,
- `-i ${this.input}`,
- `-c:a ${this.audioCodec}`,
- `-map 0:a`,
- `-map_metadata -1`,
- `-f dash`,
- `-dash_segment_type mp4`,
- `-segment_time ${this.segmentTime}`,
- `-use_template 1`,
- `-use_timeline 1`,
- `-init_seg_name "init.m4s"`,
- ]
+ if (
+ typeof this.params.audioSampleRate !== "undefined" &&
+ this.params.audioSampleRate !== "default"
+ ) {
+ args.push(`-ar ${this.params.audioSampleRate}`)
+ }
- if (typeof this.audioBitrate !== "undefined") {
- cmdStr.push(`-b:a ${this.audioBitrate}`)
- }
+ args.push(this.params.outputMasterName)
- if (typeof this.audioSampleRate !== "undefined") {
- cmdStr.push(`-ar ${this.audioSampleRate}`)
- }
+ return args
+ }
- cmdStr.push(this.outputMasterName)
+ run = async () => {
+ const segmentationCmd = this.buildSegmentationArgs()
+ const outputPath =
+ this.params.outputDir ?? `${path.dirname(this.params.input)}/dash`
+ const outputFile = path.join(outputPath, this.params.outputMasterName)
- return cmdStr.join(" ")
- }
+ this.emit("start", {
+ input: this.params.input,
+ output: outputPath,
+ params: this.params,
+ })
- run = () => {
- const cmdStr = this.buildCommand()
+ if (!fs.existsSync(outputPath)) {
+ fs.mkdirSync(outputPath, { recursive: true })
+ }
- console.log(cmdStr)
+ const inputProbe = await Utils.probe(this.params.input)
- const cwd = `${path.dirname(this.input)}/dash`
+ try {
+ const result = await this.ffmpeg({
+ args: segmentationCmd,
+ onProcess: (process) => {
+ this.handleProgress(
+ process.stdout,
+ parseFloat(inputProbe.format.duration),
+ (progress) => {
+ this.emit("progress", progress)
+ },
+ )
+ },
+ cwd: outputPath,
+ })
- if (!fs.existsSync(cwd)) {
- fs.mkdirSync(cwd, { recursive: true })
- }
+ let outputProbe = await Utils.probe(outputFile)
- console.log(`[DASH] Started audio segmentation`, {
- input: this.input,
- cwd: cwd,
- })
+ this.emit("end", {
+ probe: {
+ input: inputProbe,
+ output: outputProbe,
+ },
+ outputPath: outputPath,
+ outputFile: outputFile,
+ })
- const process = exec(
- cmdStr,
- {
- cwd: cwd,
- },
- (error, stdout, stderr) => {
- if (error) {
- console.log(`[DASH] Failed to segment audio >`, error)
-
- return this.events.emit("error", error)
- }
-
- if (stderr) {
- //return this.events.emit("error", stderr)
- }
-
- console.log(`[DASH] Finished segmenting audio >`, cwd)
-
- return this.events.emit("end", {
- filepath: path.join(cwd, this.outputMasterName),
- isDirectory: true,
- })
- }
- )
-
- process.stdout.on("data", (data) => {
- console.log(data.toString())
- })
- }
-
- on = (key, cb) => {
- this.events.on(key, cb)
- return this
- }
-}
\ No newline at end of file
+ return result
+ } catch (err) {
+ return this.emit("error", err)
+ }
+ }
+}
diff --git a/packages/server/classes/StorageClient/index.js b/packages/server/classes/StorageClient/index.js
index 810d54ce..c0c97a10 100755
--- a/packages/server/classes/StorageClient/index.js
+++ b/packages/server/classes/StorageClient/index.js
@@ -1,106 +1,118 @@
-const Minio = require("minio")
-import path from "path"
+import path from "node:path"
+import { Client } from "minio"
export const generateDefaultBucketPolicy = (payload) => {
- const { bucketName } = payload
+ const { bucketName } = payload
- if (!bucketName) {
- throw new Error("bucketName is required")
- }
+ if (!bucketName) {
+ throw new Error("bucketName is required")
+ }
- return {
- Version: "2012-10-17",
- Statement: [
- {
- Action: [
- "s3:GetObject"
- ],
- Effect: "Allow",
- Principal: {
- AWS: [
- "*"
- ]
- },
- Resource: [
- `arn:aws:s3:::${bucketName}/*`
- ],
- Sid: ""
- }
- ]
- }
+ return {
+ Version: "2012-10-17",
+ Statement: [
+ {
+ Action: ["s3:GetObject"],
+ Effect: "Allow",
+ Principal: {
+ AWS: ["*"],
+ },
+ Resource: [`arn:aws:s3:::${bucketName}/*`],
+ Sid: "",
+ },
+ ],
+ }
}
-export class StorageClient extends Minio.Client {
- constructor(options) {
- super(options)
+export class StorageClient extends Client {
+ constructor(options) {
+ super(options)
- this.defaultBucket = String(options.defaultBucket)
- this.defaultRegion = String(options.defaultRegion)
- }
+ this.defaultBucket = String(options.defaultBucket)
+ this.defaultRegion = String(options.defaultRegion)
+ this.setupBucket = Boolean(options.setupBucket)
+ this.cdnUrl = options.cdnUrl
+ }
- composeRemoteURL = (key, extraKey) => {
- let _path = path.join(this.defaultBucket, key)
+ composeRemoteURL = (key, extraKey) => {
+ let _path = path.join(this.defaultBucket, key)
- if (typeof extraKey === "string") {
- _path = path.join(_path, extraKey)
- }
+ if (typeof extraKey === "string") {
+ _path = path.join(_path, extraKey)
+ }
- return `${this.protocol}//${this.host}:${this.port}/${_path}`
- }
+ if (this.cdnUrl) {
+ return `${this.cdnUrl}/${_path}`
+ }
- setDefaultBucketPolicy = async (bucketName) => {
- const policy = generateDefaultBucketPolicy({ bucketName })
+ return `${this.protocol}//${this.host}:${this.port}/${_path}`
+ }
- return this.setBucketPolicy(bucketName, JSON.stringify(policy))
- }
+ setDefaultBucketPolicy = async (bucketName) => {
+ const policy = generateDefaultBucketPolicy({ bucketName })
- initialize = async () => {
- console.log("🔌 Checking if storage client have default bucket...")
+ return this.setBucketPolicy(bucketName, JSON.stringify(policy))
+ }
- try {
- const bucketExists = await this.bucketExists(this.defaultBucket)
+ initialize = async () => {
+ console.log("🔌 Checking if storage client have default bucket...")
- if (!bucketExists) {
- console.warn("🪣 Default bucket not exists! Creating new bucket...")
+ if (this.setupBucket !== false) {
+ try {
+ const bucketExists = await this.bucketExists(this.defaultBucket)
- await this.makeBucket(this.defaultBucket, "s3")
+ if (!bucketExists) {
+ console.warn(
+ "🪣 Default bucket not exists! Creating new bucket...",
+ )
- // set default bucket policy
- await this.setDefaultBucketPolicy(this.defaultBucket)
- }
- } catch (error) {
- console.error(`Failed to check if default bucket exists or create default bucket >`, error)
- }
+ await this.makeBucket(this.defaultBucket, "s3")
- try {
- // check if default bucket policy exists
- const bucketPolicy = await this.getBucketPolicy(this.defaultBucket).catch(() => {
- return null
- })
+ // set default bucket policy
+ await this.setDefaultBucketPolicy(this.defaultBucket)
+ }
+ } catch (error) {
+ console.error(
+ `Failed to check if default bucket exists or create default bucket >`,
+ error,
+ )
+ }
- if (!bucketPolicy) {
- // set default bucket policy
- await this.setDefaultBucketPolicy(this.defaultBucket)
- }
- } catch (error) {
- console.error(`Failed to get or set default bucket policy >`, error)
- }
+ try {
+ // check if default bucket policy exists
+ const bucketPolicy = await this.getBucketPolicy(
+ this.defaultBucket,
+ ).catch(() => {
+ return null
+ })
- console.log("✅ Storage client is ready.")
- }
+ if (!bucketPolicy) {
+ // set default bucket policy
+ await this.setDefaultBucketPolicy(this.defaultBucket)
+ }
+ } catch (error) {
+ console.error(
+ `Failed to get or set default bucket policy >`,
+ error,
+ )
+ }
+ }
+
+ console.log("✅ Storage client is ready.")
+ }
}
export const createStorageClientInstance = (options) => {
- return new StorageClient({
- endPoint: process.env.S3_ENDPOINT,
- port: Number(process.env.S3_PORT),
- useSSL: ToBoolean(process.env.S3_USE_SSL),
- accessKey: process.env.S3_ACCESS_KEY,
- secretKey: process.env.S3_SECRET_KEY,
- defaultBucket: process.env.S3_BUCKET,
- defaultRegion: process.env.S3_REGION,
- ...options,
- })
+ return new StorageClient({
+ endPoint: process.env.S3_ENDPOINT,
+ port: Number(process.env.S3_PORT),
+ useSSL: ToBoolean(process.env.S3_USE_SSL),
+ accessKey: process.env.S3_ACCESS_KEY,
+ secretKey: process.env.S3_SECRET_KEY,
+ defaultBucket: process.env.S3_BUCKET,
+ defaultRegion: process.env.S3_REGION,
+ ...options,
+ })
}
-export default createStorageClientInstance
\ No newline at end of file
+export default createStorageClientInstance
diff --git a/packages/server/classes/TaskQueueManager/index.js b/packages/server/classes/TaskQueueManager/index.js
index 0fbf8a69..f8d446bb 100644
--- a/packages/server/classes/TaskQueueManager/index.js
+++ b/packages/server/classes/TaskQueueManager/index.js
@@ -56,14 +56,8 @@ export default class TaskQueueManager {
registerQueueEvents = (worker) => {
worker.on("progress", (job, progress) => {
try {
- console.log(`Job ${job.id} reported progress: ${progress}%`)
-
if (job.data.sseChannelId) {
- global.sse.sendToChannel(job.data.sseChannelId, {
- status: "progress",
- events: "job_progress",
- progress,
- })
+ global.sse.sendToChannel(job.data.sseChannelId, progress)
}
} catch (error) {
// manejar error
@@ -76,8 +70,9 @@ export default class TaskQueueManager {
if (job.data.sseChannelId) {
global.sse.sendToChannel(job.data.sseChannelId, {
- status: "done",
- result,
+ event: "done",
+ state: "done",
+ result: result,
})
}
} catch (error) {}
@@ -89,7 +84,8 @@ export default class TaskQueueManager {
if (job.data.sseChannelId) {
global.sse.sendToChannel(job.data.sseChannelId, {
- status: "error",
+ event: "error",
+ state: "error",
result: error.message,
})
}
@@ -122,9 +118,9 @@ export default class TaskQueueManager {
)
await global.sse.sendToChannel(sseChannelId, {
- status: "progress",
- events: "job_queued",
- progress: 5,
+ event: "job_queued",
+ state: "progress",
+ percent: 5,
})
}
diff --git a/packages/server/classes/Transformation/handlers/a-dash.js b/packages/server/classes/Transformation/handlers/a-dash.js
new file mode 100644
index 00000000..b286bc45
--- /dev/null
+++ b/packages/server/classes/Transformation/handlers/a-dash.js
@@ -0,0 +1,33 @@
+import path from "node:path"
+import SegmentedAudioMPDJob from "@shared-classes/SegmentedAudioMPDJob"
+
+export default async ({ filePath, workPath, onProgress }) => {
+ return new Promise(async (resolve, reject) => {
+ const outputDir = path.resolve(workPath, "a-dash")
+
+ const job = new SegmentedAudioMPDJob({
+ input: filePath,
+ outputDir: outputDir,
+
+ // set to default as raw flac
+ audioCodec: "flac",
+ audioBitrate: "default",
+ audioSampleRate: "default",
+ })
+
+ job.on("end", (data) => {
+ resolve(data)
+ })
+
+ job.on("progress", (progress) => {
+ if (typeof onProgress === "function") {
+ onProgress({
+ percent: progress,
+ state: "transmuxing",
+ })
+ }
+ })
+
+ job.run()
+ })
+}
diff --git a/packages/server/classes/Transformation/handlers/mq-hls.js b/packages/server/classes/Transformation/handlers/mq-hls.js
new file mode 100644
index 00000000..e10ea1d0
--- /dev/null
+++ b/packages/server/classes/Transformation/handlers/mq-hls.js
@@ -0,0 +1,45 @@
+import path from "node:path"
+import MultiqualityHLSJob from "@shared-classes/MultiqualityHLSJob"
+
+export default async ({ filePath, workPath, onProgress }) => {
+ return new Promise(async (resolve, reject) => {
+ const outputDir = path.resolve(workPath, "mqhls")
+
+ const job = new MultiqualityHLSJob({
+ input: filePath,
+ outputDir: outputDir,
+
+ // set default
+ outputMasterName: "master.m3u8",
+ levels: [
+ {
+ original: true,
+ codec: "libx264",
+ bitrate: "10M",
+ preset: "ultrafast",
+ },
+ {
+ codec: "libx264",
+ width: 1280,
+ bitrate: "3M",
+ preset: "ultrafast",
+ },
+ ],
+ })
+
+ job.on("end", (data) => {
+ resolve(data)
+ })
+
+ job.on("progress", (progress) => {
+ if (typeof onProgress === "function") {
+ onProgress({
+ percent: progress,
+ state: "transmuxing",
+ })
+ }
+ })
+
+ job.run()
+ })
+}
diff --git a/packages/server/classes/Transformation/handlers/optimize.js b/packages/server/classes/Transformation/handlers/optimize.js
new file mode 100644
index 00000000..7a20f3b1
--- /dev/null
+++ b/packages/server/classes/Transformation/handlers/optimize.js
@@ -0,0 +1,63 @@
+import fs from "node:fs"
+import path from "node:path"
+import { fileTypeFromBuffer } from "file-type"
+
+import readChunk from "@shared-utils/readChunk"
+
+import Sharp from "sharp"
+
+const thresholds = {
+ size: 10 * 1024 * 1024,
+}
+
+const sharpConfigs = {
+ png: {
+ compressionLevel: 6,
+ //quality: 80,
+ },
+ jpeg: {
+ quality: 80,
+ mozjpeg: true,
+ },
+ default: {
+ quality: 80,
+ },
+}
+
+export default async ({ filePath, workPath, onProgress }) => {
+ const stat = await fs.promises.stat(filePath)
+
+ const firstBuffer = await readChunk(filePath, {
+ length: 4100,
+ })
+ const fileType = await fileTypeFromBuffer(firstBuffer)
+
+ // first check if size over threshold
+ if (stat.size < thresholds.size) {
+ return {
+ outputFile: filePath,
+ }
+ }
+
+ // get the type of the file mime
+ const type = fileType.mime.split("/")[0]
+
+ switch (type) {
+ case "image": {
+ let image = Sharp(filePath)
+
+ const metadata = await image.metadata()
+ const config = sharpConfigs[metadata.format] ?? sharpConfigs.default
+
+ image = await image[metadata.format](config).withMetadata()
+
+ filePath = path.resolve(workPath, `${path.basename(filePath)}_ff`)
+
+ await image.toFile(filePath)
+ }
+ }
+
+ return {
+ outputFile: filePath,
+ }
+}
diff --git a/packages/server/classes/Transformation/index.ts b/packages/server/classes/Transformation/index.ts
new file mode 100644
index 00000000..f99be3b5
--- /dev/null
+++ b/packages/server/classes/Transformation/index.ts
@@ -0,0 +1,26 @@
+const Handlers = {
+ "a-dash": require("./handlers/a-dash").default,
+ "mq-hls": require("./handlers/mq-hls").default,
+ optimize: require("./handlers/optimize").default,
+}
+
+export type TransformationPayloadType = {
+ filePath: string
+ workPath: string
+ handler: string
+ onProgress?: function
+}
+
+class Transformation {
+ static async transform(payload: TransformationPayloadType) {
+ const handler = Handlers[payload.handler]
+
+ if (typeof handler !== "function") {
+ throw new Error(`Invalid handler: ${payload.handler}`)
+ }
+
+ return await handler(payload)
+ }
+}
+
+export default Transformation
diff --git a/packages/server/classes/Upload/index.ts b/packages/server/classes/Upload/index.ts
new file mode 100644
index 00000000..b2d42eab
--- /dev/null
+++ b/packages/server/classes/Upload/index.ts
@@ -0,0 +1,154 @@
+import fs from "node:fs"
+import path from "node:path"
+import { fileTypeFromBuffer } from "file-type"
+
+import readChunk from "@shared-utils/readChunk"
+import getFileHash from "@shared-utils/readFileHash"
+
+import putObject from "./putObject"
+import Transformation from "../Transformation"
+
+export type FileHandlePayload = {
+ user_id: string
+ filePath: string
+ workPath: string
+ targetPath?: string // mostly provided by processed results
+ //uploadId?: string
+ transformations?: Array
+ useCompression?: boolean
+ s3Provider?: string
+ onProgress?: Function
+}
+
+export type S3UploadPayload = {
+ filePath: string
+ basePath: string
+ targetPath?: string
+ s3Provider?: string
+ onProgress?: Function
+}
+
+export default class Upload {
+ static fileHandle = async (payload: FileHandlePayload) => {
+ if (!payload.transformations) {
+ payload.transformations = []
+ }
+
+ // if compression is enabled and no transformations are provided, add basic transformations for images or videos
+ if (
+ payload.useCompression === true &&
+ payload.transformations.length === 0
+ ) {
+ payload.transformations.push("optimize")
+ }
+
+ // process file upload if transformations are provided
+ if (payload.transformations.length > 0) {
+ // process
+ const processed = await Upload.transform(payload)
+
+ // overwrite filePath
+ payload.filePath = processed.filePath
+ }
+
+ // upload
+ const result = await Upload.toS3({
+ filePath: payload.filePath,
+ targetPath: payload.targetPath,
+ basePath: payload.user_id,
+ onProgress: payload.onProgress,
+ s3Provider: payload.s3Provider,
+ })
+
+ // delete workpath
+ await fs.promises.rm(payload.workPath, { recursive: true, force: true })
+
+ return result
+ }
+
+ static transform = async (payload: FileHandlePayload) => {
+ if (Array.isArray(payload.transformations)) {
+ for await (const transformation of payload.transformations) {
+ const transformationResult = await Transformation.transform({
+ filePath: payload.filePath,
+ workPath: payload.workPath,
+ onProgress: payload.onProgress,
+ handler: transformation,
+ })
+
+ // if is a file, overwrite filePath
+ if (transformationResult.outputFile) {
+ payload.filePath = transformationResult.outputFile
+ }
+
+ // if is a directory, overwrite filePath to upload entire directory
+ if (transformationResult.outputPath) {
+ payload.filePath = transformationResult.outputPath
+ payload.targetPath = transformationResult.outputFile
+ //payload.isDirectory = true
+ }
+ }
+ }
+
+ return payload
+ }
+
+ static toS3 = async (payload: S3UploadPayload) => {
+ const { filePath, basePath, targetPath, s3Provider, onProgress } =
+ payload
+
+ // if targetPath is provided, means its a directory
+ const isDirectory = !!targetPath
+
+ const metadata = await this.buildFileMetadata(
+ isDirectory ? targetPath : filePath,
+ )
+
+ let uploadPath = path.join(basePath, metadata["File-Hash"])
+
+ if (isDirectory) {
+ uploadPath = path.join(basePath, global.nanoid())
+ }
+
+ if (typeof onProgress === "function") {
+ onProgress({
+ percent: 0,
+ state: "uploading_s3",
+ })
+ }
+
+ // console.log("Uploading to S3:", {
+ // filePath: filePath,
+ // basePath: basePath,
+ // uploadPath: uploadPath,
+ // targetPath: targetPath,
+ // metadata: metadata,
+ // s3Provider: s3Provider,
+ // })
+
+ const result = await putObject({
+ filePath: filePath,
+ uploadPath: uploadPath,
+ metadata: metadata,
+ targetFilename: isDirectory ? path.basename(targetPath) : null,
+ provider: s3Provider,
+ })
+
+ return result
+ }
+
+ static async buildFileMetadata(filePath: string) {
+ const firstBuffer = await readChunk(filePath, {
+ length: 4100,
+ })
+ const fileHash = await getFileHash(fs.createReadStream(filePath))
+ const fileType = await fileTypeFromBuffer(firstBuffer)
+
+ const metadata = {
+ "File-Hash": fileHash,
+ "Content-Type": fileType?.mime ?? "application/octet-stream",
+ }
+
+ return metadata
+ }
+}
diff --git a/packages/server/classes/Upload/putObject.js b/packages/server/classes/Upload/putObject.js
new file mode 100644
index 00000000..91c81f74
--- /dev/null
+++ b/packages/server/classes/Upload/putObject.js
@@ -0,0 +1,65 @@
+import fs from "node:fs"
+import path from "node:path"
+import pMap from "p-map"
+
+export default async function putObject({
+ filePath,
+ uploadPath,
+ metadata = {},
+ targetFilename,
+ onFinish,
+ provider = "standard",
+}) {
+ const providerClass = global.storages[provider]
+
+ if (!providerClass) {
+ throw new Error(`Provider [${provider}] not found`)
+ }
+
+ const isDirectory = await fs.promises
+ .lstat(filePath)
+ .then((stats) => stats.isDirectory())
+
+ if (isDirectory) {
+ let files = await fs.promises.readdir(filePath)
+
+ files = files.map((file) => {
+ const newPath = path.join(filePath, file)
+
+ return {
+ filePath: newPath,
+ uploadPath: path.join(uploadPath, file),
+ }
+ })
+
+ await pMap(files, putObject, {
+ concurrency: 3,
+ })
+
+ return {
+ id: uploadPath,
+ url: providerClass.composeRemoteURL(uploadPath, targetFilename),
+ metadata: metadata,
+ }
+ }
+
+ // upload to storage
+ await providerClass.fPutObject(
+ process.env.S3_BUCKET,
+ uploadPath,
+ filePath,
+ metadata,
+ )
+
+ const result = {
+ id: uploadPath,
+ url: providerClass.composeRemoteURL(uploadPath),
+ metadata: metadata,
+ }
+
+ if (typeof onFinish === "function") {
+ await onFinish(result)
+ }
+
+ return result
+}
diff --git a/packages/server/db_models/musicRelease/index.js b/packages/server/db_models/musicRelease/index.js
index 1182879f..0a5585f6 100755
--- a/packages/server/db_models/musicRelease/index.js
+++ b/packages/server/db_models/musicRelease/index.js
@@ -1,35 +1,36 @@
export default {
- name: "MusicRelease",
- collection: "music_releases",
- schema: {
- user_id: {
- type: String,
- required: true
- },
- title: {
- type: String,
- required: true
- },
- type: {
- type: String,
- required: true,
- },
- list: {
- type: Object,
- default: [],
- required: true
- },
- cover: {
- type: String,
- default: "https://storage.ragestudio.net/comty-static-assets/default_song.png"
- },
- created_at: {
- type: Date,
- required: true
- },
- public: {
- type: Boolean,
- default: true,
- },
- }
-}
\ No newline at end of file
+ name: "MusicRelease",
+ collection: "music_releases",
+ schema: {
+ user_id: {
+ type: String,
+ required: true,
+ },
+ title: {
+ type: String,
+ required: true,
+ },
+ type: {
+ type: String,
+ required: true,
+ },
+ items: {
+ type: Array,
+ default: [],
+ required: true,
+ },
+ cover: {
+ type: String,
+ default:
+ "https://storage.ragestudio.net/comty-static-assets/default_song.png",
+ },
+ created_at: {
+ type: Date,
+ required: true,
+ },
+ public: {
+ type: Boolean,
+ default: true,
+ },
+ },
+}
diff --git a/packages/server/db_models/track/index.js b/packages/server/db_models/track/index.js
index 57d97664..7a440e0e 100755
--- a/packages/server/db_models/track/index.js
+++ b/packages/server/db_models/track/index.js
@@ -1,46 +1,43 @@
export default {
- name: "Track",
- collection: "tracks",
- schema: {
- source: {
- type: String,
- required: true,
- },
- title: {
- type: String,
- required: true,
- },
- album: {
- type: String,
- },
- artists: {
- type: Array,
- },
- metadata: {
- type: Object,
- },
- explicit: {
- type: Boolean,
- default: false,
- },
- public: {
- type: Boolean,
- default: true,
- },
- publish_date: {
- type: Date,
- },
- cover: {
- type: String,
- default: "https://storage.ragestudio.net/comty-static-assets/default_song.png"
- },
- publisher: {
- type: Object,
- required: true,
- },
- lyrics_enabled: {
- type: Boolean,
- default: false
- }
- }
-}
\ No newline at end of file
+ name: "Track",
+ collection: "tracks",
+ schema: {
+ source: {
+ type: String,
+ required: true,
+ },
+ title: {
+ type: String,
+ required: true,
+ },
+ album: {
+ type: String,
+ },
+ artist: {
+ type: String,
+ },
+ metadata: {
+ type: Object,
+ },
+ explicit: {
+ type: Boolean,
+ default: false,
+ },
+ public: {
+ type: Boolean,
+ default: true,
+ },
+ publish_date: {
+ type: Date,
+ },
+ cover: {
+ type: String,
+ default:
+ "https://storage.ragestudio.net/comty-static-assets/default_song.png",
+ },
+ publisher: {
+ type: Object,
+ required: true,
+ },
+ },
+}
diff --git a/packages/server/db_models/user/index.js b/packages/server/db_models/user/index.js
index fa85fd6e..9a6f2156 100755
--- a/packages/server/db_models/user/index.js
+++ b/packages/server/db_models/user/index.js
@@ -1,77 +1,76 @@
export default {
- name: "User",
- collection: "accounts",
- schema: {
- username: {
- type: String,
- required: true
- },
- password: {
- type: String,
- required: true,
- select: false
- },
- email: {
- type: String,
- required: true,
- select: false
- },
- description: {
- type: String,
- default: null
- },
- created_at: {
- type: String
- },
- public_name: {
- type: String,
- default: null
- },
- cover: {
- type: String,
- default: null
- },
- avatar: {
- type:
- String,
- default: null
- },
- roles: {
- type: Array,
- default: []
- },
- verified: {
- type: Boolean,
- default: false
- },
- badges: {
- type: Array,
- default: []
- },
- links: {
- type: Array,
- default: []
- },
- location: {
- type: String,
- default: null
- },
- birthday: {
- type: Date,
- default: null,
- select: false
- },
- accept_tos: {
- type: Boolean,
- default: false
- },
- activated: {
- type: Boolean,
- default: false,
- },
- disabled: {
- type: Boolean,
- default: false
- },
- }
-}
\ No newline at end of file
+ name: "User",
+ collection: "accounts",
+ schema: {
+ username: {
+ type: String,
+ required: true,
+ },
+ password: {
+ type: String,
+ required: true,
+ select: false,
+ },
+ email: {
+ type: String,
+ required: true,
+ select: false,
+ },
+ description: {
+ type: String,
+ default: null,
+ },
+ created_at: {
+ type: String,
+ },
+ public_name: {
+ type: String,
+ default: null,
+ },
+ cover: {
+ type: String,
+ default: null,
+ },
+ avatar: {
+ type: String,
+ default: null,
+ },
+ roles: {
+ type: Array,
+ default: [],
+ },
+ verified: {
+ type: Boolean,
+ default: false,
+ },
+ badges: {
+ type: Array,
+ default: [],
+ },
+ links: {
+ type: Array,
+ default: [],
+ },
+ location: {
+ type: String,
+ default: null,
+ },
+ birthday: {
+ type: Date,
+ default: null,
+ select: false,
+ },
+ accept_tos: {
+ type: Boolean,
+ default: false,
+ },
+ activated: {
+ type: Boolean,
+ default: false,
+ },
+ disabled: {
+ type: Boolean,
+ default: false,
+ },
+ },
+}
diff --git a/packages/server/dev.sh b/packages/server/dev.sh
new file mode 100755
index 00000000..0d78dc56
--- /dev/null
+++ b/packages/server/dev.sh
@@ -0,0 +1,2 @@
+#!/bin/bash
+node --run dev
diff --git a/packages/server/gateway/index.js b/packages/server/gateway/index.js
index 559f5ec7..fba8d70c 100755
--- a/packages/server/gateway/index.js
+++ b/packages/server/gateway/index.js
@@ -181,19 +181,11 @@ export default class Gateway {
serviceId: id,
path: path,
target: `${http.proto}://${listen.ip}:${listen.port}${path}`,
+ websocket: !!websocket,
})
}
}
- if (websocket && websocket.enabled === true) {
- await this.gateway.register({
- serviceId: id,
- websocket: true,
- path: websocket.path,
- target: `${http.proto}://${listen.ip}:${listen.port}${websocket.path}`,
- })
- }
-
if (this.state.allReady) {
if (typeof this.gateway.applyConfiguration === "function") {
await this.gateway.applyConfiguration()
diff --git a/packages/server/gateway/managers/nginx/index.js b/packages/server/gateway/managers/nginx/index.js
index 3b500e3b..ec9d6127 100755
--- a/packages/server/gateway/managers/nginx/index.js
+++ b/packages/server/gateway/managers/nginx/index.js
@@ -6,6 +6,7 @@ import defaults from "linebridge/dist/defaults"
const localNginxBinary = path.resolve(process.cwd(), "nginx-bin")
const serverPkg = require("../../../package.json")
+
/**
* NginxManager - Optimized version that batches configurations
* Waits for all services to register before applying configuration
@@ -253,7 +254,7 @@ http {
if (debugFlag) {
console.log(
- `🔍 Registering route for [${serviceId}]: ${normalizedPath} -> ${target} (${websocket ? "WebSocket" : "HTTP"})`,
+ `🔍 Registering route for [${serviceId}]: ${normalizedPath} -> ${target}`,
)
}
@@ -261,8 +262,8 @@ http {
const effectivePathRewrite = pathRewrite || {}
this.routes.set(normalizedPath, {
- serviceId,
- target,
+ serviceId: serviceId,
+ target: target,
pathRewrite: effectivePathRewrite,
websocket: !!websocket,
})
diff --git a/packages/server/gateway/services/service.js b/packages/server/gateway/services/service.js
index a4666db2..ee2e9a17 100755
--- a/packages/server/gateway/services/service.js
+++ b/packages/server/gateway/services/service.js
@@ -55,7 +55,8 @@ export default class Service {
this.instance = await spawnService({
id: this.id,
- service: this.path,
+ service: this,
+ path: this.path,
cwd: this.cwd,
onClose: this.handleClose.bind(this),
onError: this.handleError.bind(this),
@@ -140,8 +141,7 @@ export default class Service {
// Kill the current process if is running
if (this.instance.exitCode === null) {
- console.log(`[${this.id}] Killing current process...`)
- await this.instance.kill("SIGKILL")
+ await this.instance.kill()
}
// Start a new process
@@ -153,17 +153,13 @@ export default class Service {
/**
* Stop the service
*/
- async stop() {
+ stop() {
console.log(`[${this.id}] Stopping service...`)
- if (this.fileWatcher) {
- await this.fileWatcher.close()
- this.fileWatcher = null
- }
+ this.instance.kill()
- if (this.instance) {
- await this.instance.kill("SIGKILL")
- this.instance = null
+ if (this.fileWatcher) {
+ this.fileWatcher.close()
}
}
diff --git a/packages/server/gateway/utils/spawnService.js b/packages/server/gateway/utils/spawnService.js
index ea1268b2..70212704 100755
--- a/packages/server/gateway/utils/spawnService.js
+++ b/packages/server/gateway/utils/spawnService.js
@@ -3,21 +3,29 @@ import createServiceLogTransformer from "./createServiceLogTransformer"
import Vars from "../vars"
-export default async ({ id, service, cwd, onClose, onError, onIPCData }) => {
+export default async ({
+ id,
+ service,
+ path,
+ cwd,
+ onClose,
+ onError,
+ onIPCData,
+}) => {
const instanceEnv = {
...process.env,
- lb_service: {
- id: service.id,
- index: service.index,
- },
+ lb_service_id: service.id,
+ lb_service_path: service.path,
+ lb_service_version: service.version,
+ lb_service_cwd: service.cwd,
+ lb_service: true,
}
- let instance = ChildProcess.fork(Vars.bootloaderBin, [service], {
+ let instance = ChildProcess.fork(Vars.bootloaderBin, [path], {
detached: false,
silent: true,
cwd: cwd,
env: instanceEnv,
- killSignal: "SIGTERM",
})
instance.logs = {
diff --git a/packages/server/package.json b/packages/server/package.json
index 4c8a1e98..989d16bd 100755
--- a/packages/server/package.json
+++ b/packages/server/package.json
@@ -1,6 +1,6 @@
{
"name": "@comty/server",
- "version": "1.31.0@alpha",
+ "version": "1.38.0@alpha",
"license": "ComtyLicense",
"private": true,
"workspaces": [
diff --git a/packages/server/scripts/installLatestFfmpeg.sh b/packages/server/scripts/installLatestFfmpeg.sh
new file mode 100755
index 00000000..81f5abc7
--- /dev/null
+++ b/packages/server/scripts/installLatestFfmpeg.sh
@@ -0,0 +1,165 @@
+#!/bin/bash
+
+GREEN='\033[0;32m'
+RED='\033[0;31m'
+YELLOW='\033[0;33m'
+NC='\033[0m'
+
+detect_os() {
+ if [[ "$OSTYPE" == "linux-gnu"* ]]; then
+ echo "linux"
+ else
+ echo "unsupported"
+ fi
+}
+
+detect_arch() {
+ ARCH=$(uname -m)
+ if [[ "$ARCH" == "x86_64" ]]; then
+ echo "amd64"
+ elif [[ "$ARCH" == "aarch64" ]] || [[ "$ARCH" == "arm64" ]]; then
+ echo "arm64"
+ elif [[ "$ARCH" == "armv7l" ]]; then
+ echo "armhf"
+ else
+ echo "unsupported"
+ fi
+}
+
+OS=$(detect_os)
+ARCH=$(detect_arch)
+
+if [[ "$OS" == "unsupported" ]] || [[ "$ARCH" == "unsupported" ]]; then
+ echo -e "${RED}Operating system or architecture not supported. This script only supports Linux on amd64, arm64, or armhf architectures.${NC}"
+ exit 1
+fi
+
+INSTALL_DIR="$HOME/.local/bin"
+TEMP_DIR="/tmp/ffmpegdl"
+
+if [[ -d "$TEMP_DIR" ]]; then
+ rm -rf "$TEMP_DIR"
+fi
+
+mkdir -p "$TEMP_DIR"
+
+if [[ ! -d "$INSTALL_DIR" ]]; then
+ echo -e "${RED}$INSTALL_DIR is not a directory.${NC}"
+ exit 1
+fi
+
+download_ffmpeg() {
+ echo -e "${YELLOW}Downloading the latest stable version of FFmpeg...${NC}"
+
+ # Base URL for downloads from John van Sickle's FFmpeg builds
+ BASE_URL="https://johnvansickle.com/ffmpeg/releases"
+
+ # Map architecture to the expected format in the URL
+ if [[ "$ARCH" == "amd64" ]]; then
+ URL_ARCH="amd64"
+ elif [[ "$ARCH" == "arm64" ]]; then
+ URL_ARCH="arm64"
+ elif [[ "$ARCH" == "armhf" ]]; then
+ URL_ARCH="armhf"
+ fi
+
+ # Create the download URL for the latest release
+ FFMPEG_URL="$BASE_URL/ffmpeg-release-$URL_ARCH-static.tar.xz"
+
+ if [[ -z "$FFMPEG_URL" ]]; then
+ echo -e "${RED}Could not determine the download URL for your system.${NC}"
+ exit 1
+ fi
+
+ # Download the file
+ ARCHIVE_FILE="$TEMP_DIR/ffmpeg.tar.xz"
+ echo -e "${YELLOW}Downloading from: $FFMPEG_URL${NC}"
+
+ if command -v wget > /dev/null; then
+ wget -q --show-progress -O "$ARCHIVE_FILE" "$FFMPEG_URL"
+ elif command -v curl > /dev/null; then
+ curl -L -o "$ARCHIVE_FILE" "$FFMPEG_URL"
+ else
+ echo -e "${RED}wget or curl is required to download FFmpeg.${NC}"
+ exit 1
+ fi
+
+ if [[ $? -ne 0 ]]; then
+ echo -e "${RED}Error downloading FFmpeg.${NC}"
+ exit 1
+ fi
+
+ echo -e "${GREEN}Download completed.${NC}"
+
+ # Extract the file
+ echo -e "${YELLOW}Extracting files...${NC}"
+ cd "$TEMP_DIR"
+ tar -xf "$ARCHIVE_FILE"
+
+ if [[ $? -ne 0 ]]; then
+ echo -e "${RED}Error extracting the file.${NC}"
+ exit 1
+ fi
+
+ echo -e "${GREEN}Extraction completed.${NC}"
+
+ # Clean up downloaded file
+ rm "$ARCHIVE_FILE"
+}
+
+install_binaries() {
+ echo -e "${YELLOW}Installing binaries...${NC}"
+
+ # Find the extracted directory
+ EXTRACTED_DIR=$(find "$TEMP_DIR" -maxdepth 1 -type d -name "ffmpeg-*" | head -n 1)
+
+ if [[ -z "$EXTRACTED_DIR" ]]; then
+ echo -e "${RED}FFmpeg extracted directory not found.${NC}"
+ exit 1
+ fi
+
+ # Find the binaries
+ FFMPEG_BIN="$EXTRACTED_DIR/ffmpeg"
+ FFPROBE_BIN="$EXTRACTED_DIR/ffprobe"
+
+ # Verify binaries exist
+ if [[ ! -f "$FFMPEG_BIN" ]] || [[ ! -f "$FFPROBE_BIN" ]]; then
+ echo -e "${RED}FFmpeg and FFprobe binaries not found.${NC}"
+ exit 1
+ fi
+
+ # Copy binaries to the bin folder
+ mv "$FFMPEG_BIN" "$INSTALL_DIR/ffmpeg"
+ mv "$FFPROBE_BIN" "$INSTALL_DIR/ffprobe"
+
+ # Make binaries executable
+ chmod +x "$INSTALL_DIR/ffmpeg"
+ chmod +x "$INSTALL_DIR/ffprobe"
+
+ echo -e "${GREEN}Binaries installed in $INSTALL_DIR${NC}"
+
+ # Clean up extracted directory
+ rm -rf "$EXTRACTED_DIR"
+ rm -rf "$TEMP_DIR"
+}
+
+show_versions() {
+ echo -e "${YELLOW}Verifying the installation...${NC}"
+
+ FFMPEG_PATH="$INSTALL_DIR/ffmpeg"
+ FFPROBE_PATH="$INSTALL_DIR/ffprobe"
+
+ echo -e "${GREEN}FFmpeg installed at: $FFMPEG_PATH${NC}"
+ if [[ -x "$FFMPEG_PATH" ]]; then
+ "$FFMPEG_PATH" -version | head -n 1
+ fi
+
+ echo -e "${GREEN}FFprobe installed at: $FFPROBE_PATH${NC}"
+ if [[ -x "$FFPROBE_PATH" ]]; then
+ "$FFPROBE_PATH" -version | head -n 1
+ fi
+}
+
+download_ffmpeg
+install_binaries
+show_versions
diff --git a/packages/server/services/auth/classes/account/methods/loginStrategy.js b/packages/server/services/auth/classes/account/methods/loginStrategy.js
index 7655d1a2..537a2a27 100644
--- a/packages/server/services/auth/classes/account/methods/loginStrategy.js
+++ b/packages/server/services/auth/classes/account/methods/loginStrategy.js
@@ -2,31 +2,25 @@ import bcrypt from "bcrypt"
import { User } from "@db_models"
export default async ({ username, password, hash }, user) => {
- if (typeof user === "undefined") {
- let isEmail = username.match(/^[^\s@]+@[^\s@]+\.[^\s@]+$/)
+ if (typeof user === "undefined") {
+ let isEmail = username.match(/^[^\s@]+@[^\s@]+\.[^\s@]+$/)
- let query = isEmail ? { email: username } : { username: username }
+ let query = isEmail ? { email: username } : { username: username }
- user = await User.findOne(query).select("+email").select("+password")
- }
+ user = await User.findOne(query).select("+email").select("+password")
+ }
- if (!user) {
- throw new OperationError(401, "User not found")
- }
+ if (!user) {
+ throw new OperationError(401, "User not found")
+ }
- if (user.disabled == true) {
- throw new OperationError(401, "User is disabled")
- }
+ if (user.disabled == true) {
+ throw new OperationError(401, "User is disabled")
+ }
- if (typeof hash !== "undefined") {
- if (user.password !== hash) {
- throw new OperationError(401, "Invalid credentials")
- }
- } else {
- if (!bcrypt.compareSync(password, user.password)) {
- throw new OperationError(401, "Invalid credentials")
- }
- }
+ if (!bcrypt.compareSync(password, user.password)) {
+ throw new OperationError(401, "Invalid credentials")
+ }
- return user
-}
\ No newline at end of file
+ return user
+}
diff --git a/packages/server/services/auth/package.json b/packages/server/services/auth/package.json
index 932e2eb2..2c943a0e 100644
--- a/packages/server/services/auth/package.json
+++ b/packages/server/services/auth/package.json
@@ -1,4 +1,3 @@
{
- "name": "auth",
- "version": "1.0.0"
+ "name": "auth"
}
diff --git a/packages/server/services/chats/package.json b/packages/server/services/chats/package.json
index 4e136e1b..9a34bc34 100755
--- a/packages/server/services/chats/package.json
+++ b/packages/server/services/chats/package.json
@@ -1,4 +1,3 @@
{
- "name": "chats",
- "version": "0.60.2"
-}
\ No newline at end of file
+ "name": "chats"
+}
diff --git a/packages/server/services/ems/ems.service.js b/packages/server/services/ems/ems.service.js
index 3efc85af..42eaf6c4 100644
--- a/packages/server/services/ems/ems.service.js
+++ b/packages/server/services/ems/ems.service.js
@@ -3,40 +3,42 @@ import nodemailer from "nodemailer"
import DbManager from "@shared-classes/DbManager"
import SharedMiddlewares from "@shared-middlewares"
+
export default class API extends Server {
- static refName = "ems"
- static useEngine = "hyper-express"
- static routesPath = `${__dirname}/routes`
- static listen_port = process.env.HTTP_LISTEN_PORT ?? 3007
+ static refName = "ems"
+ static useEngine = "hyper-express"
+ static routesPath = `${__dirname}/routes`
+ static listen_port = process.env.HTTP_LISTEN_PORT ?? 3007
- middlewares = {
- ...SharedMiddlewares
- }
+ middlewares = {
+ ...SharedMiddlewares,
+ }
- contexts = {
- db: new DbManager(),
- mailTransporter: nodemailer.createTransport({
- host: process.env.SMTP_HOSTNAME,
- port: process.env.SMTP_PORT ?? 587,
- secure: ToBoolean(process.env.SMTP_SECURE) ?? false,
- auth: {
- user: process.env.SMTP_USERNAME,
- pass: process.env.SMTP_PASSWORD,
- },
- }),
- }
+ contexts = {
+ db: new DbManager(),
+ mailTransporter: nodemailer.createTransport({
+ host: process.env.SMTP_HOSTNAME,
+ port: process.env.SMTP_PORT ?? 587,
+ secure: ToBoolean(process.env.SMTP_SECURE) ?? false,
+ auth: {
+ user: process.env.SMTP_USERNAME,
+ pass: process.env.SMTP_PASSWORD,
+ },
+ }),
+ }
- ipcEvents = {
- "account:activation:send": require("./ipcEvents/accountActivation").default,
- "new:login": require("./ipcEvents/newLogin").default,
- "mfa:send": require("./ipcEvents/mfaSend").default,
- "apr:send": require("./ipcEvents/aprSend").default,
- "password:changed": require("./ipcEvents/passwordChanged").default,
- }
+ ipcEvents = {
+ "account:activation:send": require("./ipcEvents/accountActivation")
+ .default,
+ "new:login": require("./ipcEvents/newLogin").default,
+ "mfa:send": require("./ipcEvents/mfaSend").default,
+ "apr:send": require("./ipcEvents/aprSend").default,
+ "password:changed": require("./ipcEvents/passwordChanged").default,
+ }
- async onInitialize() {
- await this.contexts.db.initialize()
- }
+ async onInitialize() {
+ await this.contexts.db.initialize()
+ }
}
-Boot(API)
\ No newline at end of file
+Boot(API)
diff --git a/packages/server/services/ems/package.json b/packages/server/services/ems/package.json
index 0f28a259..142583aa 100644
--- a/packages/server/services/ems/package.json
+++ b/packages/server/services/ems/package.json
@@ -1,10 +1,7 @@
{
- "name": "ems",
- "description": "External Messaging Service (SMS, EMAIL, PUSH)",
- "version": "0.1.0",
- "dependencies": {
- "handlebars": "^4.7.8",
- "nodemailer": "^6.9.11",
- "web-push": "^3.6.7"
- }
+ "name": "ems",
+ "dependencies": {
+ "handlebars": "^4.7.8",
+ "nodemailer": "^6.9.11"
+ }
}
diff --git a/packages/server/services/files/classes/ChunkFileUpload/index.js b/packages/server/services/files/classes/ChunkFile/index.js
similarity index 81%
rename from packages/server/services/files/classes/ChunkFileUpload/index.js
rename to packages/server/services/files/classes/ChunkFile/index.js
index 8c677970..b480016d 100755
--- a/packages/server/services/files/classes/ChunkFileUpload/index.js
+++ b/packages/server/services/files/classes/ChunkFile/index.js
@@ -104,11 +104,9 @@ export function createAssembleChunksPromise({
export async function handleChunkFile(
fileStream,
- { tmpDir, headers, maxFileSize, maxChunkSize },
+ { chunksPath, outputDir, headers, maxFileSize, maxChunkSize },
) {
return await new Promise(async (resolve, reject) => {
- const workPath = path.join(tmpDir, headers["uploader-file-id"])
- const chunksPath = path.join(workPath, "chunks")
const chunkPath = path.join(
chunksPath,
headers["uploader-chunk-number"],
@@ -125,17 +123,6 @@ export async function handleChunkFile(
return reject(new OperationError(500, "Chunk is out of range"))
}
- // if is the first chunk check if dir exists before write things
- if (chunkCount === 0) {
- try {
- if (!(await fs.promises.stat(chunksPath).catch(() => false))) {
- await fs.promises.mkdir(chunksPath, { recursive: true })
- }
- } catch (error) {
- return reject(new OperationError(500, error.message))
- }
- }
-
let dataWritten = 0
let writeStream = fs.createWriteStream(chunkPath)
@@ -172,25 +159,18 @@ export async function handleChunkFile(
}
if (isLast) {
- const mimetype = mimetypes.lookup(
- headers["uploader-original-name"],
- )
- const extension = mimetypes.extension(mimetype)
+ // const mimetype = mimetypes.lookup(
+ // headers["uploader-original-name"],
+ // )
+ // const extension = mimetypes.extension(mimetype)
- let filename = headers["uploader-file-id"]
-
- if (headers["uploader-use-date"] === "true") {
- filename = `${filename}_${Date.now()}`
- }
+ let filename = nanoid()
return resolve(
createAssembleChunksPromise({
// build data
chunksPath: chunksPath,
- filePath: path.resolve(
- workPath,
- `${filename}.${extension}`,
- ),
+ filePath: path.resolve(outputDir, filename),
maxFileSize: maxFileSize,
}),
)
diff --git a/packages/server/services/files/file.service.js b/packages/server/services/files/file.service.js
index 0e0db8b2..7c2ce7f1 100755
--- a/packages/server/services/files/file.service.js
+++ b/packages/server/services/files/file.service.js
@@ -1,21 +1,21 @@
import { Server } from "linebridge"
-import B2 from "backblaze-b2"
-
import DbManager from "@shared-classes/DbManager"
+import RedisClient from "@shared-classes/RedisClient"
import StorageClient from "@shared-classes/StorageClient"
import CacheService from "@shared-classes/CacheService"
import SSEManager from "@shared-classes/SSEManager"
-import SharedMiddlewares from "@shared-middlewares"
import LimitsClass from "@shared-classes/Limits"
import TaskQueueManager from "@shared-classes/TaskQueueManager"
+import SharedMiddlewares from "@shared-middlewares"
+
class API extends Server {
static refName = "files"
- static useEngine = "hyper-express"
+ static useEngine = "hyper-express-ng"
static routesPath = `${__dirname}/routes`
static listen_port = process.env.HTTP_LISTEN_PORT ?? 3002
- static enableWebsockets = true
+ //static enableWebsockets = true
middlewares = {
...SharedMiddlewares,
@@ -24,10 +24,13 @@ class API extends Server {
contexts = {
db: new DbManager(),
cache: new CacheService(),
+ SSEManager: new SSEManager(),
+ redis: RedisClient({
+ maxRetriesPerRequest: null,
+ }),
+ limits: {},
storage: StorageClient(),
b2Storage: null,
- SSEManager: new SSEManager(),
- limits: {},
}
queuesManager = new TaskQueueManager(
@@ -41,27 +44,35 @@ class API extends Server {
global.sse = this.contexts.SSEManager
if (process.env.B2_KEY_ID && process.env.B2_APP_KEY) {
- this.contexts.b2Storage = new B2({
- applicationKeyId: process.env.B2_KEY_ID,
- applicationKey: process.env.B2_APP_KEY,
+ this.contexts.b2Storage = StorageClient({
+ endPoint: process.env.B2_ENDPOINT,
+ cdnUrl: process.env.B2_CDN_ENDPOINT,
+ defaultBucket: process.env.B2_BUCKET,
+ accessKey: process.env.B2_KEY_ID,
+ secretKey: process.env.B2_APP_KEY,
+ port: 443,
+ useSSL: true,
+ setupBucket: false,
})
- global.b2Storage = this.contexts.b2Storage
-
- await this.contexts.b2Storage.authorize()
+ await this.contexts.b2Storage.initialize()
} else {
console.warn(
"B2 storage not configured on environment, skipping...",
)
}
+ await this.contexts.redis.initialize()
await this.queuesManager.initialize({
- redisOptions: this.engine.ws.redis.options,
+ redisOptions: this.contexts.redis.client,
})
await this.contexts.db.initialize()
await this.contexts.storage.initialize()
- global.storage = this.contexts.storage
+ global.storages = {
+ standard: this.contexts.storage,
+ b2: this.contexts.b2Storage,
+ }
global.queues = this.queuesManager
this.contexts.limits = await LimitsClass.get()
diff --git a/packages/server/services/files/package.json b/packages/server/services/files/package.json
index a2a06202..c9a52227 100755
--- a/packages/server/services/files/package.json
+++ b/packages/server/services/files/package.json
@@ -1,20 +1,10 @@
{
- "name": "files",
- "version": "0.60.2",
- "dependencies": {
- "backblaze-b2": "^1.7.0",
- "busboy": "^1.6.0",
- "content-range": "^2.0.2",
- "ffmpeg-static": "^5.2.0",
- "fluent-ffmpeg": "^2.1.2",
- "merge-files": "^0.1.2",
- "mime-types": "^2.1.35",
- "minio": "^7.0.32",
- "normalize-url": "^8.0.0",
- "p-map": "4",
- "p-queue": "^7.3.4",
- "redis": "^4.6.6",
- "sharp": "0.32.6",
- "split-chunk-merge": "^1.0.0"
- }
+ "name": "files",
+ "dependencies": {
+ "file-type": "^20.4.1",
+ "fluent-ffmpeg": "^2.1.2",
+ "mime-types": "^2.1.35",
+ "p-map": "4",
+ "sharp": "0.32.6"
+ }
}
diff --git a/packages/server/services/files/queues/TransmuxedUpload/index.js b/packages/server/services/files/queues/TransmuxedUpload/index.js
deleted file mode 100644
index 18b2d029..00000000
--- a/packages/server/services/files/queues/TransmuxedUpload/index.js
+++ /dev/null
@@ -1,49 +0,0 @@
-import path from "node:path"
-
-import fs from "node:fs"
-import RemoteUpload from "@services/remoteUpload"
-
-export default {
- id: "remote_upload",
- maxJobs: 10,
- process: async (job) => {
- const {
- filePath,
- parentDir,
- service,
- useCompression,
- cachePath,
- transmux,
- transmuxOptions,
- } = job.data
-
- console.log("[JOB][remote_upload] Processing job >", job.data)
-
- try {
- const result = await RemoteUpload({
- parentDir: parentDir,
- source: filePath,
- service: service,
- useCompression: useCompression,
- transmux: transmux,
- transmuxOptions: transmuxOptions,
- cachePath: cachePath,
- onProgress: (progress) => {
- job.updateProgress(progress)
- },
- })
-
- await fs.promises
- .rm(filePath, { recursive: true, force: true })
- .catch(() => null)
-
- return result
- } catch (error) {
- await fs.promises
- .rm(filePath, { recursive: true, force: true })
- .catch(() => null)
-
- throw error
- }
- },
-}
diff --git a/packages/server/services/files/queues/TransmuxedUpload/worker.js b/packages/server/services/files/queues/TransmuxedUpload/worker.js
deleted file mode 100644
index 39bf8728..00000000
--- a/packages/server/services/files/queues/TransmuxedUpload/worker.js
+++ /dev/null
@@ -1,43 +0,0 @@
-import RemoteUpload from "@services/remoteUpload"
-import fs from "node:fs"
-
-module.exports = async (job) => {
- const {
- filePath,
- parentDir,
- service,
- useCompression,
- cachePath,
- transmux,
- transmuxOptions,
- } = job.data
-
- console.log("[JOB][remote_upload] Processing job >", job.data)
-
- try {
- const result = await RemoteUpload({
- parentDir: parentDir,
- source: filePath,
- service: service,
- useCompression: useCompression,
- transmux: transmux,
- transmuxOptions: transmuxOptions,
- cachePath: cachePath,
- onProgress: (progress) => {
- job.progress(progress)
- },
- })
-
- await fs.promises
- .rm(filePath, { recursive: true, force: true })
- .catch(() => null)
-
- return result
- } catch (error) {
- await fs.promises
- .rm(filePath, { recursive: true, force: true })
- .catch(() => null)
-
- throw error
- }
-}
diff --git a/packages/server/services/files/queues/fileProcess/index.js b/packages/server/services/files/queues/fileProcess/index.js
new file mode 100644
index 00000000..663a0d10
--- /dev/null
+++ b/packages/server/services/files/queues/fileProcess/index.js
@@ -0,0 +1,29 @@
+import path from "node:path"
+import fs from "node:fs"
+
+import Upload from "@shared-classes/Upload"
+
+export default {
+ id: "file-process",
+ maxJobs: 2,
+ process: async (job) => {
+ console.log("[JOB][file-process] starting... >", job.data)
+
+ try {
+ const result = await Upload.fileHandle({
+ ...job.data,
+ onProgress: (progress) => {
+ job.updateProgress(progress)
+ },
+ })
+
+ return result
+ } catch (error) {
+ await fs.promises
+ .rm(job.workPath, { recursive: true, force: true })
+ .catch(() => null)
+
+ throw error
+ }
+ },
+}
diff --git a/packages/server/services/files/routes/stream/[$]/get.js b/packages/server/services/files/routes/stream/[$]/get.js
deleted file mode 100644
index 8ecf5ffc..00000000
--- a/packages/server/services/files/routes/stream/[$]/get.js
+++ /dev/null
@@ -1,25 +0,0 @@
-import mimetypes from "mime-types"
-
-export default {
- useContext: ["storage"],
- fn: async (req, res) => {
- const streamPath = req.path.replace(req.route.pattern.replace("*", ""), "/")
-
- this.default.contexts.storage.getObject(process.env.S3_BUCKET, streamPath, (err, dataStream) => {
- if (err) {
- return res.status(404).end()
- }
-
- const extname = mimetypes.lookup(streamPath)
-
- // send chunked response
- res.status(200)
-
- // set headers
- res.setHeader("Content-Type", extname)
- res.setHeader("Accept-Ranges", "bytes")
-
- return dataStream.pipe(res)
- })
- }
-}
\ No newline at end of file
diff --git a/packages/server/services/files/routes/transcode/get.js b/packages/server/services/files/routes/transcode/get.js
deleted file mode 100644
index b358bf41..00000000
--- a/packages/server/services/files/routes/transcode/get.js
+++ /dev/null
@@ -1,88 +0,0 @@
-import path from "node:path"
-import fs from "node:fs"
-import axios from "axios"
-
-import MultiqualityHLSJob from "@shared-classes/MultiqualityHLSJob"
-import { standardUpload } from "@services/remoteUpload"
-
-export default {
- useContext: ["cache", "limits"],
- middlewares: ["withAuthentication"],
- fn: async (req, res) => {
- const { url } = req.query
-
- const userPath = path.join(this.default.contexts.cache.constructor.cachePath, req.auth.session.user_id)
-
- const jobId = String(new Date().getTime())
- const jobPath = path.resolve(userPath, "jobs", jobId)
-
- const sourcePath = path.resolve(jobPath, `${jobId}.source`)
-
- if (!fs.existsSync(jobPath)) {
- fs.mkdirSync(jobPath, { recursive: true })
- }
-
- const sourceStream = fs.createWriteStream(sourcePath)
-
- const response = await axios({
- method: "get",
- url,
- responseType: "stream",
- })
-
- response.data.pipe(sourceStream)
-
- await new Promise((resolve, reject) => {
- sourceStream.on("finish", () => {
- resolve()
- })
- sourceStream.on("error", (err) => {
- reject(err)
- })
- })
-
- const job = new MultiqualityHLSJob({
- input: sourcePath,
- outputDir: jobPath,
- levels: [
- {
- original: true,
- codec: "libx264",
- bitrate: "10M",
- preset: "ultrafast",
- },
- {
- codec: "libx264",
- width: 1280,
- bitrate: "3M",
- preset: "ultrafast",
- }
- ]
- })
-
- await new Promise((resolve, reject) => {
- job
- .on("error", (err) => {
- console.error(`[TRANSMUX] Transmuxing failed`, err)
- reject(err)
- })
- .on("end", () => {
- console.debug(`[TRANSMUX] Finished transmuxing > ${sourcePath}`)
- resolve()
- })
- .run()
- })
-
- const result = await standardUpload({
- isDirectory: true,
- source: path.join(jobPath, "hls"),
- remotePath: `${req.auth.session.user_id}/jobs/${jobId}`,
- })
-
- fs.rmSync(jobPath, { recursive: true, force: true })
-
- return {
- result
- }
- }
-}
\ No newline at end of file
diff --git a/packages/server/services/files/routes/upload/chunk/post.js b/packages/server/services/files/routes/upload/chunk/post.js
index 63ff90fd..9fd08b53 100644
--- a/packages/server/services/files/routes/upload/chunk/post.js
+++ b/packages/server/services/files/routes/upload/chunk/post.js
@@ -1,21 +1,12 @@
-import { Duplex } from "node:stream"
import path from "node:path"
import fs from "node:fs"
-import RemoteUpload from "@services/remoteUpload"
-import {
- checkChunkUploadHeaders,
- handleChunkFile,
-} from "@classes/ChunkFileUpload"
+
+import { checkChunkUploadHeaders, handleChunkFile } from "@classes/ChunkFile"
+import Upload from "@shared-classes/Upload"
+import bufferToStream from "@shared-utils/bufferToStream"
const availableProviders = ["b2", "standard"]
-function bufferToStream(bf) {
- let tmp = new Duplex()
- tmp.push(bf)
- tmp.push(null)
- return tmp
-}
-
export default {
useContext: ["cache", "limits"],
middlewares: ["withAuthentication"],
@@ -25,14 +16,16 @@ export default {
return
}
- const uploadId = `${req.headers["uploader-file-id"]}_${Date.now()}`
+ const uploadId = `${req.headers["uploader-file-id"]}`
- const tmpPath = path.resolve(
+ const workPath = path.resolve(
this.default.contexts.cache.constructor.cachePath,
- req.auth.session.user_id,
+ `${req.auth.session.user_id}-${uploadId}`,
)
+ const chunksPath = path.join(workPath, "chunks")
+ const assembledPath = path.join(workPath, "assembled")
- const limits = {
+ const config = {
maxFileSize:
parseInt(this.default.contexts.limits.maxFileSizeInMB) *
1024 *
@@ -42,93 +35,91 @@ export default {
1024 *
1024,
useCompression: true,
- useProvider: "standard",
+ useProvider: req.headers["use-provider"] ?? "standard",
}
// const user = await req.auth.user()
-
// if (user.roles.includes("admin")) {
// // maxFileSize for admins 100GB
// limits.maxFileSize = 100 * 1024 * 1024 * 1024
-
// // optional compression for admins
// limits.useCompression = req.headers["use-compression"] ?? false
-
// limits.useProvider = req.headers["provider-type"] ?? "b2"
// }
// check if provider is valid
- if (!availableProviders.includes(limits.useProvider)) {
+ if (!availableProviders.includes(config.useProvider)) {
throw new OperationError(400, "Invalid provider")
}
- // create a readable stream from req.body(buffer)
+ await fs.promises.mkdir(workPath, { recursive: true })
+ await fs.promises.mkdir(chunksPath, { recursive: true })
+ await fs.promises.mkdir(assembledPath, { recursive: true })
+
+ // create a readable stream
const dataStream = bufferToStream(await req.buffer())
- let result = await handleChunkFile(dataStream, {
- tmpDir: tmpPath,
+ let assemble = await handleChunkFile(dataStream, {
+ chunksPath: chunksPath,
+ outputDir: assembledPath,
headers: req.headers,
- maxFileSize: limits.maxFileSize,
- maxChunkSize: limits.maxChunkSize,
+ maxFileSize: config.maxFileSize,
+ maxChunkSize: config.maxChunkSize,
})
- if (typeof result === "function") {
+ if (typeof assemble === "function") {
try {
- result = await result()
+ assemble = await assemble()
- if (req.headers["transmux"] || limits.useCompression === true) {
- // add a background task
+ let transformations = req.headers["transformations"]
+
+ if (transformations) {
+ transformations = transformations
+ .split(",")
+ .map((t) => t.trim())
+ }
+
+ const payload = {
+ user_id: req.auth.session.user_id,
+ uploadId: uploadId,
+ filePath: assemble.filePath,
+ workPath: workPath,
+ transformations: transformations,
+ s3Provider: config.useProvider,
+ useCompression: config.useCompression,
+ }
+
+ // if has transformations, use background job
+ if (
+ (transformations && transformations.length > 0) ||
+ config.useCompression
+ ) {
const job = await global.queues.createJob(
- "remote_upload",
- {
- filePath: result.filePath,
- parentDir: req.auth.session.user_id,
- service: limits.useProvider,
- useCompression: limits.useCompression,
- transmux: req.headers["transmux"] ?? false,
- transmuxOptions: req.headers["transmux-options"],
- cachePath: tmpPath,
- },
+ "file-process",
+ payload,
{
useSSE: true,
},
)
- const sseChannelId = job.sseChannelId
-
return {
- uploadId: uploadId,
- sseChannelId: sseChannelId,
- eventChannelURL: `${req.headers["x-forwarded-proto"] || req.protocol}://${req.get("host")}/upload/sse_events/${sseChannelId}`,
+ uploadId: payload.uploadId,
+ sseChannelId: job.sseChannelId,
+ sseUrl: `${req.headers["x-forwarded-proto"] || req.protocol}://${req.get("host")}/upload/sse_events/${job.sseChannelId}`,
}
- } else {
- const result = await RemoteUpload({
- source: result.filePath,
- parentDir: req.auth.session.user_id,
- service: limits.useProvider,
- useCompression: limits.useCompression,
- cachePath: tmpPath,
- })
-
- return result
}
- } catch (error) {
- await fs.promises
- .rm(tmpPath, { recursive: true, force: true })
- .catch(() => {
- return false
- })
- throw new OperationError(
- error.code ?? 500,
- error.message ?? "Failed to upload file",
- )
+ return await Upload.fileHandle(payload)
+ } catch (error) {
+ await fs.promises.rm(workPath, { recursive: true })
+ throw error
}
}
return {
- ok: 1,
+ next: true,
chunkNumber: req.headers["uploader-chunk-number"],
+ config: config,
}
},
}
diff --git a/packages/server/services/files/routes/upload/file/post.js b/packages/server/services/files/routes/upload/file/post.js
index 37a0a687..b4c79071 100644
--- a/packages/server/services/files/routes/upload/file/post.js
+++ b/packages/server/services/files/routes/upload/file/post.js
@@ -1,48 +1,50 @@
import path from "node:path"
import fs from "node:fs"
-import RemoteUpload from "@services/remoteUpload"
+import Upload from "@shared-classes/Upload"
export default {
- useContext: ["cache"],
- middlewares: [
- "withAuthentication",
- ],
- fn: async (req, res) => {
- const { cache } = this.default.contexts
+ useContext: ["cache"],
+ middlewares: ["withAuthentication"],
+ fn: async (req, res) => {
+ const workPath = path.resolve(
+ this.default.contexts.cache.constructor.cachePath,
+ `${req.auth.session.user_id}-${nanoid()}`,
+ )
- const providerType = req.headers["provider-type"] ?? "standard"
+ await fs.promises.mkdir(workPath, { recursive: true })
- const userPath = path.join(cache.constructor.cachePath, req.auth.session.user_id)
+ let localFilepath = null
- let localFilepath = null
- let tmpPath = path.resolve(userPath, `${Date.now()}`)
+ await req.multipart(async (field) => {
+ if (!field.file) {
+ throw new OperationError(400, "Missing file")
+ }
- await req.multipart(async (field) => {
- if (!field.file) {
- throw new OperationError(400, "Missing file")
- }
+ localFilepath = path.join(workPath, "file")
- localFilepath = path.join(tmpPath, field.file.name)
+ await field.write(localFilepath)
+ })
- const existTmpDir = await fs.promises.stat(tmpPath).then(() => true).catch(() => false)
+ let transformations = req.headers["transformations"]
- if (!existTmpDir) {
- await fs.promises.mkdir(tmpPath, { recursive: true })
- }
+ if (transformations) {
+ transformations = transformations.split(",").map((t) => t.trim())
+ }
- await field.write(localFilepath)
- })
+ const result = await Upload.fileHandle({
+ user_id: req.auth.session.user_id,
+ filePath: localFilepath,
+ workPath: workPath,
+ transformations: transformations,
+ })
- const result = await RemoteUpload({
- parentDir: req.auth.session.user_id,
- source: localFilepath,
- service: providerType,
- useCompression: ToBoolean(req.headers["use-compression"]) ?? true,
- })
+ res.header("deprecated", "true")
+ res.header(
+ "deprecation-replacement",
+ "Use the new chunked upload API endpoint",
+ )
- fs.promises.rm(tmpPath, { recursive: true, force: true })
-
- return result
- }
+ return result
+ },
}
diff --git a/packages/server/services/files/services/post-process/audio/index.js b/packages/server/services/files/services/post-process/audio/index.js
deleted file mode 100755
index 5794e567..00000000
--- a/packages/server/services/files/services/post-process/audio/index.js
+++ /dev/null
@@ -1,30 +0,0 @@
-const ffmpeg = require("fluent-ffmpeg")
-
-export default async (file) => {
- // analize metadata
- let metadata = await new Promise((resolve, reject) => {
- ffmpeg.ffprobe(file.filepath, (err, data) => {
- if (err) {
- return reject(err)
- }
-
- resolve(data)
- })
- }).catch((err) => {
- console.error(err)
-
- return {}
- })
-
- if (metadata.format) {
- metadata = metadata.format
- }
-
- file.metadata = {
- duration: metadata.duration,
- bitrate: metadata.bit_rate,
- size: metadata.size,
- }
-
- return file
-}
\ No newline at end of file
diff --git a/packages/server/services/files/services/post-process/image/index.js b/packages/server/services/files/services/post-process/image/index.js
deleted file mode 100755
index 9ba1aacc..00000000
--- a/packages/server/services/files/services/post-process/image/index.js
+++ /dev/null
@@ -1,59 +0,0 @@
-import fs from "node:fs"
-import path from "node:path"
-import Sharp from "sharp"
-
-const imageProcessingConf = {
- // TODO: Get image sizeThreshold from DB
- sizeThreshold: 10 * 1024 * 1024,
- // TODO: Get image quality from DB
- imageQuality: 80,
-}
-
-const imageTypeToConfig = {
- png: {
- compressionLevel: Math.floor(imageProcessingConf.imageQuality / 100),
- },
- default: {
- quality: imageProcessingConf.imageQuality
- }
-}
-
-/**
- * Processes an image file and transforms it if it's above a certain size threshold.
- *
- * @async
- * @function
- * @param {Object} file - The file to be processed.
- * @param {string} file.filepath - The path of the file to be processed.
- * @param {string} file.hash - The hash of the file to be processed.
- * @param {string} file.cachePath - The cache path of the file to be processed.
- * @throws {Error} If the file parameter is not provided.
- * @return {Object} The processed file object.
- */
-async function processImage(file) {
- if (!file) {
- throw new Error("file is required")
- }
-
- const stat = await fs.promises.stat(file.filepath)
-
- if (stat.size < imageProcessingConf.sizeThreshold) {
- return file
- }
-
- let image = await Sharp(file.filepath)
-
- const { format } = await image.metadata()
-
- image = await image[format](imageTypeToConfig[format] ?? imageTypeToConfig.default).withMetadata()
-
- const outputFilepath = path.resolve(file.cachePath, `${file.hash}_transformed.${format}`)
-
- await image.toFile(outputFilepath)
-
- file.filepath = outputFilepath
-
- return file
-}
-
-export default processImage
\ No newline at end of file
diff --git a/packages/server/services/files/services/post-process/index.js b/packages/server/services/files/services/post-process/index.js
deleted file mode 100755
index 17726714..00000000
--- a/packages/server/services/files/services/post-process/index.js
+++ /dev/null
@@ -1,53 +0,0 @@
-import fs from "node:fs"
-import mimetypes from "mime-types"
-
-import processVideo from "./video"
-import processImage from "./image"
-import processAudio from "./audio"
-
-const fileTransformer = {
- // video
- "video/avi": processVideo,
- "video/quicktime": processVideo,
- "video/mp4": processVideo,
- "video/webm": processVideo,
- //image
- "image/jpeg": processImage,
- "image/png": processImage,
- "image/gif": processImage,
- "image/bmp": processImage,
- "image/tiff": processImage,
- "image/webp": processImage,
- "image/jfif": processImage,
- // audio
- "audio/flac": processAudio,
- "audio/x-flac": processAudio,
- "audio/mp3": processAudio,
- "audio/x-mp3": processAudio,
- "audio/mpeg": processAudio,
- "audio/x-mpeg": processAudio,
- "audio/ogg": processAudio,
- "audio/x-ogg": processAudio,
- "audio/wav": processAudio,
- "audio/x-wav": processAudio,
-}
-
-export default async (file) => {
- if (!file) {
- throw new Error("file is required")
- }
-
- if (!fs.existsSync(file.filepath)) {
- throw new Error(`File ${file.filepath} not found`)
- }
-
- const fileMimetype = mimetypes.lookup(file.filepath)
-
- if (typeof fileTransformer[fileMimetype] !== "function") {
- console.debug(`File (${file.filepath}) has mimetype ${fileMimetype} and will not be processed`)
-
- return file
- }
-
- return await fileTransformer[fileMimetype](file)
-}
\ No newline at end of file
diff --git a/packages/server/services/files/services/post-process/video/index.js b/packages/server/services/files/services/post-process/video/index.js
deleted file mode 100755
index b4f1b644..00000000
--- a/packages/server/services/files/services/post-process/video/index.js
+++ /dev/null
@@ -1,43 +0,0 @@
-import videoTranscode from "@services/videoTranscode"
-
-/**
- * Processes a video file based on the specified options.
- *
- * @async
- * @param {Object} file - The video file to process.
- * @param {Object} [options={}] - The options object to use for processing.
- * @param {string} [options.videoCodec="libx264"] - The video codec to use.
- * @param {string} [options.format="mp4"] - The format to use.
- * @param {number} [options.audioBitrate=128] - The audio bitrate to use.
- * @param {number} [options.videoBitrate=2024] - The video bitrate to use.
- * @throws {Error} Throws an error if file parameter is not provided.
- * @return {Object} The processed video file object.
- */
-async function processVideo(file, options = {}) {
- if (!file) {
- throw new Error("file is required")
- }
-
- // TODO: Get values from db
- const {
- videoCodec = "libx264",
- format = "mp4",
- audioBitrate = 128,
- videoBitrate = 3000,
- } = options
-
- const result = await videoTranscode(file.filepath, {
- videoCodec,
- format,
- audioBitrate,
- videoBitrate: [videoBitrate, true],
- extraOptions: ["-threads 2"],
- })
-
- file.filepath = result.filepath
- file.filename = result.filename
-
- return file
-}
-
-export default processVideo
diff --git a/packages/server/services/files/services/remoteUpload/index.js b/packages/server/services/files/services/remoteUpload/index.js
deleted file mode 100644
index d2c904e0..00000000
--- a/packages/server/services/files/services/remoteUpload/index.js
+++ /dev/null
@@ -1,162 +0,0 @@
-import fs from "node:fs"
-import path from "node:path"
-import mimeTypes from "mime-types"
-import getFileHash from "@shared-utils/readFileHash"
-
-import PostProcess from "../post-process"
-import Transmux from "../transmux"
-
-import StandardUpload from "./providers/standard"
-import B2Upload from "./providers/b2"
-
-export default async ({
- source,
- parentDir,
- service,
- useCompression,
- cachePath,
- transmux,
- transmuxOptions,
- isDirectory,
- onProgress,
-}) => {
- if (!source) {
- throw new OperationError(500, "source is required")
- }
-
- if (!service) {
- service = "standard"
- }
-
- if (!parentDir) {
- parentDir = "/"
- }
-
- if (transmuxOptions) {
- transmuxOptions = JSON.parse(transmuxOptions)
- }
-
- if (useCompression) {
- if (typeof onProgress === "function") {
- onProgress(10, {
- event: "post_processing",
- })
- }
-
- try {
- const processOutput = await PostProcess({
- filepath: source,
- cachePath: cachePath,
- })
-
- if (processOutput) {
- if (processOutput.filepath) {
- source = processOutput.filepath
- }
- }
- } catch (error) {
- console.error(error)
- throw new OperationError(500, `Failed to process file`)
- }
- }
-
- if (transmux) {
- if (typeof onProgress === "function") {
- onProgress(30, {
- event: "transmuxing",
- })
- }
-
- try {
- const processOutput = await Transmux({
- transmuxer: transmux,
- transmuxOptions: transmuxOptions,
- filepath: source,
- cachePath: cachePath,
- })
-
- if (processOutput) {
- if (processOutput.filepath) {
- source = processOutput.filepath
- }
-
- if (processOutput.isDirectory) {
- isDirectory = true
- }
- }
- } catch (error) {
- console.error(error)
- throw new OperationError(500, `Failed to transmux file`)
- }
- }
-
- const type = mimeTypes.lookup(path.basename(source))
- const hash = await getFileHash(fs.createReadStream(source))
-
- let fileId = `${hash}`
-
- // FIXME: This is a walkaround to avoid to hashing the entire directories
- if (isDirectory) {
- fileId = global.nanoid()
- }
-
- let remotePath = path.join(parentDir, fileId)
-
- let result = {}
-
- const metadata = {
- "Content-Type": type,
- "File-Hash": hash,
- }
-
- if (typeof onProgress === "function") {
- onProgress(80, {
- event: "uploading_s3",
- service: service,
- })
- }
-
- try {
- switch (service) {
- case "b2":
- if (!global.b2Storage) {
- throw new OperationError(
- 500,
- "B2 storage not configured on environment, unsupported service. Please use `standard` service.",
- )
- }
-
- result = await B2Upload({
- source: isDirectory ? path.dirname(source) : source,
- remotePath: remotePath,
- metadata: metadata,
- isDirectory: isDirectory,
- targetFilename: isDirectory ? path.basename(source) : null,
- })
- break
- case "standard":
- result = await StandardUpload({
- source: isDirectory ? path.dirname(source) : source,
- remotePath: remotePath,
- metadata: metadata,
- isDirectory: isDirectory,
- targetFilename: isDirectory ? path.basename(source) : null,
- })
- break
- default:
- throw new OperationError(500, "Unsupported service")
- }
- } catch (error) {
- console.error(error)
- throw new OperationError(500, "Failed to upload to storage")
- }
-
- if (typeof onProgress === "function") {
- onProgress(100, {
- event: "done",
- result: result,
- })
- }
-
- return result
-}
diff --git a/packages/server/services/files/services/remoteUpload/providers/b2/index.js b/packages/server/services/files/services/remoteUpload/providers/b2/index.js
deleted file mode 100644
index fb0c7c0e..00000000
--- a/packages/server/services/files/services/remoteUpload/providers/b2/index.js
+++ /dev/null
@@ -1,90 +0,0 @@
-import fs from "node:fs"
-import path from "node:path"
-import pMap from "p-map"
-
-export default async function b2Upload({
- source,
- remotePath,
- metadata = {},
- targetFilename,
- isDirectory,
- retryNumber = 0
-}) {
- if (isDirectory) {
- let files = await fs.promises.readdir(source)
-
- files = files.map((file) => {
- const filePath = path.join(source, file)
-
- const isTargetDirectory = fs.lstatSync(filePath).isDirectory()
-
- return {
- source: filePath,
- remotePath: path.join(remotePath, file),
- isDirectory: isTargetDirectory,
- }
- })
-
- await pMap(
- files,
- b2Upload,
- {
- concurrency: 5
- }
- )
-
- return {
- id: remotePath,
- url: `https://${process.env.B2_CDN_ENDPOINT}/${process.env.B2_BUCKET}/${remotePath}/${targetFilename}`,
- metadata: metadata,
- }
- }
-
- try {
- await global.b2Storage.authorize()
-
- if (!fs.existsSync(source)) {
- throw new OperationError(500, "File not found")
- }
-
- const uploadUrl = await global.b2Storage.getUploadUrl({
- bucketId: process.env.B2_BUCKET_ID,
- })
-
- console.debug(`Uploading object to B2 Storage >`, {
- source: source,
- remote: remotePath,
- })
-
- const data = await fs.promises.readFile(source)
-
- await global.b2Storage.uploadFile({
- uploadUrl: uploadUrl.data.uploadUrl,
- uploadAuthToken: uploadUrl.data.authorizationToken,
- fileName: remotePath,
- data: data,
- info: metadata
- })
- } catch (error) {
- console.error(error)
-
- if (retryNumber < 5) {
- return await b2Upload({
- source,
- remotePath,
- metadata,
- targetFilename,
- isDirectory,
- retryNumber: retryNumber + 1
- })
- }
-
- throw new OperationError(500, "B2 upload failed")
- }
-
- return {
- id: remotePath,
- url: `https://${process.env.B2_CDN_ENDPOINT}/${process.env.B2_BUCKET}/${remotePath}`,
- metadata: metadata,
- }
-}
\ No newline at end of file
diff --git a/packages/server/services/files/services/remoteUpload/providers/standard/index.js b/packages/server/services/files/services/remoteUpload/providers/standard/index.js
deleted file mode 100644
index 3dfa3fcf..00000000
--- a/packages/server/services/files/services/remoteUpload/providers/standard/index.js
+++ /dev/null
@@ -1,58 +0,0 @@
-import fs from "node:fs"
-import path from "node:path"
-import pMap from "p-map"
-
-export default async function standardUpload({
- source,
- remotePath,
- metadata = {},
- targetFilename,
- isDirectory,
-}) {
- if (isDirectory) {
- let files = await fs.promises.readdir(source)
-
- files = files.map((file) => {
- const filePath = path.join(source, file)
-
- const isTargetDirectory = fs.lstatSync(filePath).isDirectory()
-
- return {
- source: filePath,
- remotePath: path.join(remotePath, file),
- isDirectory: isTargetDirectory,
- }
- })
-
- await pMap(
- files,
- standardUpload,
- {
- concurrency: 3
- }
- )
-
- return {
- id: remotePath,
- url: global.storage.composeRemoteURL(remotePath, targetFilename),
- metadata: metadata,
- }
- }
-
- console.debug(`Uploading object to S3 Minio >`, {
- source: source,
- remote: remotePath,
- })
-
- // upload to storage
- await global.storage.fPutObject(process.env.S3_BUCKET, remotePath, source, metadata)
-
- // compose url
- const url = global.storage.composeRemoteURL(remotePath)
-
- return {
- id: remotePath,
- url: url,
- metadata: metadata,
- }
-}
\ No newline at end of file
diff --git a/packages/server/services/files/services/transmux/index.js b/packages/server/services/files/services/transmux/index.js
deleted file mode 100644
index 1b4d3cae..00000000
--- a/packages/server/services/files/services/transmux/index.js
+++ /dev/null
@@ -1,108 +0,0 @@
-import fs from "node:fs"
-import path from "node:path"
-
-import MultiqualityHLSJob from "@shared-classes/MultiqualityHLSJob"
-import SegmentedAudioMPDJob from "@shared-classes/SegmentedAudioMPDJob"
-
-const transmuxers = [
- {
- id: "mq-hls",
- container: "hls",
- extension: "m3u8",
- multipleOutput: true,
- buildCommand: (input, outputDir) => {
- return new MultiqualityHLSJob({
- input: input,
- outputDir: outputDir,
- outputMasterName: "master.m3u8",
- levels: [
- {
- original: true,
- codec: "libx264",
- bitrate: "10M",
- preset: "ultrafast",
- },
- {
- codec: "libx264",
- width: 1280,
- bitrate: "3M",
- preset: "ultrafast",
- }
- ]
- })
- },
- },
- {
- id: "a-dash",
- container: "dash",
- extension: "mpd",
- multipleOutput: true,
- buildCommand: (input, outputDir) => {
- return new SegmentedAudioMPDJob({
- input: input,
- outputDir: outputDir,
- outputMasterName: "master.mpd",
-
- audioCodec: "flac",
- //audioBitrate: "1600k",
- //audioSampleRate: 96000,
- segmentTime: 10,
- })
- }
- },
-]
-
-export default async (params) => {
- if (!params) {
- throw new Error("params is required")
- }
-
- if (!params.filepath) {
- throw new Error("filepath is required")
- }
-
- if (!params.cachePath) {
- throw new Error("cachePath is required")
- }
-
- if (!params.transmuxer) {
- throw new Error("transmuxer is required")
- }
-
- if (!fs.existsSync(params.filepath)) {
- throw new Error(`File ${params.filepath} not found`)
- }
-
- const transmuxer = transmuxers.find((item) => item.id === params.transmuxer)
-
- if (!transmuxer) {
- throw new Error(`Transmuxer ${params.transmuxer} not found`)
- }
-
- const jobPath = path.dirname(params.filepath)
-
- if (!fs.existsSync(path.dirname(jobPath))) {
- fs.mkdirSync(path.dirname(jobPath), { recursive: true })
- }
-
- return await new Promise((resolve, reject) => {
- try {
- const command = transmuxer.buildCommand(params.filepath, jobPath)
-
- command
- .on("progress", function (progress) {
- console.log("Processing: " + progress.percent + "% done")
- })
- .on("error", (err) => {
- reject(err)
- })
- .on("end", (data) => {
- resolve(data)
- })
- .run()
- } catch (error) {
- console.error(`[TRANSMUX] Transmuxing failed`, error)
- reject(error)
- }
- })
-}
\ No newline at end of file
diff --git a/packages/server/services/files/services/videoTranscode/index.js b/packages/server/services/files/services/videoTranscode/index.js
deleted file mode 100755
index 5d74f2a4..00000000
--- a/packages/server/services/files/services/videoTranscode/index.js
+++ /dev/null
@@ -1,98 +0,0 @@
-import path from "path"
-
-const ffmpeg = require("fluent-ffmpeg")
-
-const defaultParams = {
- audioBitrate: 128,
- videoBitrate: 1024,
- videoCodec: "libvpx",
- audioCodec: "libvorbis",
- format: "mp4",
-}
-
-const maxTasks = 5
-
-export default (input, params = defaultParams) => {
- return new Promise((resolve, reject) => {
- if (!global.ffmpegTasks) {
- global.ffmpegTasks = []
- }
-
- if (global.ffmpegTasks.length >= maxTasks) {
- return reject(new Error("Too many transcoding tasks"))
- }
-
- const outputFilename = `${path.basename(input).split(".")[0]}_ff.${params.format ?? "webm"}`
- const outputFilepath = `${path.dirname(input)}/${outputFilename}`
-
- console.debug(`[TRANSCODING] Transcoding ${input} to ${outputFilepath}`)
-
- const onEnd = async () => {
- console.debug(
- `[TRANSCODING] Finished transcode ${input} to ${outputFilepath}`,
- )
-
- return resolve({
- filename: outputFilename,
- filepath: outputFilepath,
- })
- }
-
- const onError = (err) => {
- console.error(
- `[TRANSCODING] Transcoding ${input} to ${outputFilepath} failed`,
- err,
- )
-
- return reject(err)
- }
-
- let exec = null
-
- const commands = {
- input: input,
- ...params,
- output: outputFilepath,
- outputOptions: ["-preset veryfast"],
- }
-
- // chain methods
- for (let key in commands) {
- if (exec === null) {
- exec = ffmpeg(commands[key])
- continue
- }
-
- if (key === "extraOptions" && Array.isArray(commands[key])) {
- for (const option of commands[key]) {
- exec = exec.inputOptions(option)
- }
-
- continue
- }
-
- if (key === "outputOptions" && Array.isArray(commands[key])) {
- for (const option of commands[key]) {
- exec = exec.outputOptions(option)
- }
-
- continue
- }
-
- if (typeof exec[key] !== "function") {
- console.warn(`[TRANSCODING] Method ${key} is not a function`)
- return false
- }
-
- if (Array.isArray(commands[key])) {
- exec = exec[key](...commands[key])
- } else {
- exec = exec[key](commands[key])
- }
-
- continue
- }
-
- exec.on("error", onError).on("end", onEnd).run()
- })
-}
diff --git a/packages/server/services/files/utils/downloadFFMPEG/index.js b/packages/server/services/files/utils/downloadFFMPEG/index.js
deleted file mode 100644
index 030a68d1..00000000
--- a/packages/server/services/files/utils/downloadFFMPEG/index.js
+++ /dev/null
@@ -1,20 +0,0 @@
-import fs from "node:fs"
-import os from "node:os"
-import axios from "axios"
-
-export default async (outputDir) => {
- const arch = os.arch()
-
- console.log(`Downloading ffmpeg for ${arch}...`)
- const baseURL = `https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-${arch}-static.tar.xz`
-
-
- const response = await axios.get(baseURL, {
- responseType: "stream"
- })
-
- const ffmpegPath = path.join(outputDir, `ffmpeg-${arch}.tar.xz`)
- const ffmpegFile = fs.createWriteStream(ffmpegPath)
-
- response.data.pipe(ffmpegFile)
-}
\ No newline at end of file
diff --git a/packages/server/services/files/utils/resolveUrl/index.js b/packages/server/services/files/utils/resolveUrl/index.js
deleted file mode 100755
index a9a33785..00000000
--- a/packages/server/services/files/utils/resolveUrl/index.js
+++ /dev/null
@@ -1,20 +0,0 @@
-export default (from, to) => {
- const resolvedUrl = new URL(to, new URL(from, "resolve://"))
-
- if (resolvedUrl.protocol === "resolve:") {
- let { pathname, search, hash } = resolvedUrl
-
- if (to.includes("@")) {
- const fromUrl = new URL(from)
- const toUrl = new URL(to, fromUrl.origin)
-
- pathname = toUrl.pathname
- search = toUrl.search
- hash = toUrl.hash
- }
-
- return pathname + search + hash
- }
-
- return resolvedUrl.toString()
-}
\ No newline at end of file
diff --git a/packages/server/services/files/utils/syncDirToRemote/index.js b/packages/server/services/files/utils/syncDirToRemote/index.js
deleted file mode 100755
index 7481c2a7..00000000
--- a/packages/server/services/files/utils/syncDirToRemote/index.js
+++ /dev/null
@@ -1,23 +0,0 @@
-import fs from "fs"
-import path from "path"
-
-async function syncFolder(dir, destPath) {
- const files = await fs.promises.readdir(dir)
-
- for await (const file of files) {
- const filePath = path.resolve(dir, file)
- const desitinationFilePath = `${destPath}/${file}`
-
- const stat = fs.statSync(filePath)
-
- if (stat.isDirectory()) {
- await syncFolder(filePath, desitinationFilePath)
- } else {
- const fileContent = await fs.promises.readFile(filePath)
-
- await global.storage.putObject(process.env.S3_BUCKET, desitinationFilePath, fileContent)
- }
- }
-}
-
-export default syncFolder
\ No newline at end of file
diff --git a/packages/server/services/main/main.service.js b/packages/server/services/main/main.service.js
index 896a090d..817f6a22 100755
--- a/packages/server/services/main/main.service.js
+++ b/packages/server/services/main/main.service.js
@@ -7,9 +7,10 @@ import SharedMiddlewares from "@shared-middlewares"
export default class API extends Server {
static refName = "main"
- static enableWebsockets = true
+ static useEngine = "hyper-express-ng"
static routesPath = `${__dirname}/routes`
static listen_port = process.env.HTTP_LISTEN_PORT || 3000
+ static enableWebsockets = false
middlewares = {
...require("@middlewares").default,
@@ -26,8 +27,6 @@ export default class API extends Server {
await this.contexts.db.initialize()
await StartupDB()
}
-
- handleWsAuth = require("@shared-lib/handleWsAuth").default
}
Boot(API)
diff --git a/packages/server/services/main/package.json b/packages/server/services/main/package.json
index aaa6e4ae..0baaaa2d 100755
--- a/packages/server/services/main/package.json
+++ b/packages/server/services/main/package.json
@@ -1,7 +1,6 @@
{
- "name": "main",
- "version": "0.60.2",
- "dependencies": {
- "@octokit/rest": "^20.0.2"
- }
+ "name": "main",
+ "dependencies": {
+ "@octokit/rest": "^20.0.2"
+ }
}
diff --git a/packages/server/services/marketplace/classes/extension/methods/resolve.js b/packages/server/services/marketplace/classes/extension/methods/resolve.js
index cc61dcc5..ee542f04 100644
--- a/packages/server/services/marketplace/classes/extension/methods/resolve.js
+++ b/packages/server/services/marketplace/classes/extension/methods/resolve.js
@@ -1,24 +1,27 @@
import { Extension } from "@db_models"
export default async function resolve(payload) {
- let { user_id, pkg } = payload
+ let { user_id, pkg } = payload
- const [pkgName, pkgVersion] = pkg.split("@")
+ let [pkgName, pkgVersion] = pkg.split("@")
- if (!pkgVersion) {
- pkgVersion = "latest"
- }
+ if (!pkgVersion) {
+ pkgVersion = "latest"
+ }
- if (pkgVersion === "latest") {
- return await Extension.findOne({
- user_id,
- name: pkgName,
- }).sort({ version: -1 }).limit(1).exec()
- }
+ if (pkgVersion === "latest") {
+ return await Extension.findOne({
+ user_id,
+ name: pkgName,
+ })
+ .sort({ version: -1 })
+ .limit(1)
+ .exec()
+ }
- return await Extension.findOne({
- user_id,
- name: pkgName,
- version: pkgVersion,
- })
-}
\ No newline at end of file
+ return await Extension.findOne({
+ user_id,
+ name: pkgName,
+ version: pkgVersion,
+ })
+}
diff --git a/packages/server/services/marketplace/marketplace.service.js b/packages/server/services/marketplace/marketplace.service.js
index a01cd9da..a9d7b338 100755
--- a/packages/server/services/marketplace/marketplace.service.js
+++ b/packages/server/services/marketplace/marketplace.service.js
@@ -1,41 +1,47 @@
import { Server } from "linebridge"
-import B2 from "backblaze-b2"
import DbManager from "@shared-classes/DbManager"
import CacheService from "@shared-classes/CacheService"
+import StorageClient from "@shared-classes/StorageClient"
import SharedMiddlewares from "@shared-middlewares"
class API extends Server {
- static refName = "marketplace"
- static wsRoutesPath = `${__dirname}/ws_routes`
- static routesPath = `${__dirname}/routes`
- static listen_port = process.env.HTTP_LISTEN_PORT ?? 3005
+ static refName = "marketplace"
+ static useEngine = "hyper-express-ng"
+ static routesPath = `${__dirname}/routes`
+ static listen_port = process.env.HTTP_LISTEN_PORT ?? 3005
- middlewares = {
- ...SharedMiddlewares
- }
+ middlewares = {
+ ...SharedMiddlewares,
+ }
- contexts = {
- db: new DbManager(),
- b2: new B2({
- applicationKeyId: process.env.B2_KEY_ID,
- applicationKey: process.env.B2_APP_KEY,
- }),
- cache: new CacheService({
- fsram: false
- }),
- }
+ contexts = {
+ db: new DbManager(),
+ cache: new CacheService({
+ fsram: false,
+ }),
+ storage: StorageClient({
+ endPoint: process.env.B2_ENDPOINT,
+ cdnUrl: process.env.B2_CDN_ENDPOINT,
+ defaultBucket: process.env.B2_BUCKET,
+ accessKey: process.env.B2_KEY_ID,
+ secretKey: process.env.B2_APP_KEY,
+ port: 443,
+ useSSL: true,
+ setupBucket: false,
+ }),
+ }
- async onInitialize() {
- await this.contexts.db.initialize()
- await this.contexts.b2.authorize()
+ async onInitialize() {
+ await this.contexts.db.initialize()
+ await this.contexts.storage.initialize()
- global.cache = this.contexts.cache
- global.b2 = this.contexts.b2
- }
-
- handleWsAuth = require("@shared-lib/handleWsAuth").default
+ global.cache = this.contexts.cache
+ global.storages = {
+ standard: this.contexts.storage,
+ }
+ }
}
-Boot(API)
\ No newline at end of file
+Boot(API)
diff --git a/packages/server/services/marketplace/package.json b/packages/server/services/marketplace/package.json
index cff27833..9f272677 100755
--- a/packages/server/services/marketplace/package.json
+++ b/packages/server/services/marketplace/package.json
@@ -1,9 +1,6 @@
{
"name": "marketplace",
"dependencies": {
- "7zip-min": "^1.4.4",
- "backblaze-b2": "^1.7.0",
- "sucrase": "^3.32.0",
- "uglify-js": "^3.17.4"
+ "7zip-min": "^1.4.4"
}
}
diff --git a/packages/server/services/marketplace/routes/extensions/[user_id]/[pkg]/get.js b/packages/server/services/marketplace/routes/extensions/[user_id]/[pkg]/get.js
index a24347b4..f02f43c7 100644
--- a/packages/server/services/marketplace/routes/extensions/[user_id]/[pkg]/get.js
+++ b/packages/server/services/marketplace/routes/extensions/[user_id]/[pkg]/get.js
@@ -1,10 +1,10 @@
import ExtensionClass from "@classes/extension"
export default async (req) => {
- const { user_id, pkg } = req.params
+ const { user_id, pkg } = req.params
- return await ExtensionClass.resolveManifest({
- user_id,
- pkg,
- })
-}
\ No newline at end of file
+ return await ExtensionClass.resolve({
+ user_id,
+ pkg,
+ })
+}
diff --git a/packages/server/services/marketplace/routes/extensions/[user_id]/[pkg]/main/get.js b/packages/server/services/marketplace/routes/extensions/[user_id]/[pkg]/main/get.js
deleted file mode 100644
index 70a190ba..00000000
--- a/packages/server/services/marketplace/routes/extensions/[user_id]/[pkg]/main/get.js
+++ /dev/null
@@ -1,12 +0,0 @@
-import ExtensionClass from "@classes/extension"
-
-export default async (req, res) => {
- const { user_id, pkg } = req.params
-
- const manifest = await ExtensionClass.resolve({
- user_id,
- pkg,
- })
-
- return manifest
-}
\ No newline at end of file
diff --git a/packages/server/services/marketplace/routes/extensions/list/get.js b/packages/server/services/marketplace/routes/extensions/list/get.js
index 679325da..ae6c5cc9 100644
--- a/packages/server/services/marketplace/routes/extensions/list/get.js
+++ b/packages/server/services/marketplace/routes/extensions/list/get.js
@@ -1,7 +1,33 @@
import { Extension } from "@db_models"
export default async (req) => {
- const extensions = await Extension.find()
+ const { limit = 10, offset = 0 } = req.query
- return extensions
+ const totalItems = await Extension.countDocuments()
+
+ const extensions = await Extension.aggregate([
+ {
+ $sort: { registryId: 1, version: -1 },
+ },
+ {
+ $group: {
+ _id: "$registryId",
+ doc: { $first: "$$ROOT" },
+ },
+ },
+ {
+ $replaceRoot: { newRoot: "$doc" },
+ },
+ {
+ $skip: parseInt(offset),
+ },
+ {
+ $limit: parseInt(limit),
+ },
+ ])
+
+ return {
+ items: extensions,
+ total_items: totalItems,
+ }
}
diff --git a/packages/server/services/marketplace/routes/extensions/publish/put.js b/packages/server/services/marketplace/routes/extensions/publish/put.js
index 18337084..e070e038 100644
--- a/packages/server/services/marketplace/routes/extensions/publish/put.js
+++ b/packages/server/services/marketplace/routes/extensions/publish/put.js
@@ -4,47 +4,7 @@ import fs from "node:fs"
import path from "node:path"
import sevenzip from "7zip-min"
-async function uploadFolderToB2(bucketId, folderPath, b2Directory) {
- try {
- const uploadFiles = async (dir) => {
- const files = fs.readdirSync(dir)
-
- for (const file of files) {
- const fullPath = path.join(dir, file)
- const stats = fs.statSync(fullPath)
-
- if (stats.isDirectory()) {
- await uploadFiles(fullPath)
- } else {
- const fileData = fs.readFileSync(fullPath)
- const b2FileName = path
- .join(b2Directory, path.relative(folderPath, fullPath))
- .replace(/\\/g, "/")
-
- console.log(`Uploading ${b2FileName}...`)
-
- const uploadUrl = await b2.getUploadUrl({
- bucketId: bucketId,
- })
-
- await b2.uploadFile({
- uploadUrl: uploadUrl.data.uploadUrl,
- uploadAuthToken: uploadUrl.data.authorizationToken,
- fileName: b2FileName,
- data: fileData,
- })
-
- console.log(`Uploaded ${b2FileName}`)
- }
- }
- }
-
- await uploadFiles(folderPath)
- console.log("All files uploaded successfully.")
- } catch (error) {
- console.error("Error uploading folder:", error)
- }
-}
+import putObject from "@shared-classes/Upload/putObject"
export default {
middlewares: ["withAuthentication"],
@@ -62,7 +22,7 @@ export default {
pkg = JSON.parse(pkg)
const { user_id } = req.auth.session
- const registryId = `${user_id}/${pkg.name}@${pkg.version}`
+ const registryId = `${user_id}/${pkg.name}`
const s3Path = `extensions/${pkg.name}/${pkg.version}`
const workPath = path.resolve(
@@ -84,7 +44,7 @@ export default {
let extensionRegistry = await Extension.findOne({
user_id: user_id,
- registryId: registryId,
+ name: pkg.name,
version: pkg.version,
})
@@ -116,16 +76,20 @@ export default {
})
})
- await uploadFolderToB2(process.env.B2_BUCKET_ID, pkgPath, s3Path)
+ await putObject({
+ filePath: pkgPath,
+ uploadPath: s3Path,
+ })
fs.promises.rm(workPath, { recursive: true, force: true })
- const assetsUrl = `https://${process.env.B2_CDN_ENDPOINT}/${process.env.B2_BUCKET}/${s3Path}`
+ const assetsUrl = `${process.env.B2_CDN_ENDPOINT}/${process.env.B2_BUCKET}/${s3Path}`
extensionRegistry = await Extension.create({
user_id: user_id,
name: pkg.name,
version: pkg.version,
+ description: pkg.description,
registryId: registryId,
assetsUrl: assetsUrl,
srcUrl: `${assetsUrl}/src`,
diff --git a/packages/server/services/marketplace/utils/b2Upload/index.js b/packages/server/services/marketplace/utils/b2Upload/index.js
deleted file mode 100644
index 82f80239..00000000
--- a/packages/server/services/marketplace/utils/b2Upload/index.js
+++ /dev/null
@@ -1,86 +0,0 @@
-import fs from "node:fs"
-import path from "node:path"
-import pMap from "p-map"
-
-export default async function b2Upload({
- source,
- remotePath,
- metadata = {},
- targetFilename,
- isDirectory,
- retryNumber = 0,
-}) {
- if (isDirectory) {
- let files = await fs.promises.readdir(source)
-
- files = files.map((file) => {
- const filePath = path.join(source, file)
-
- const isTargetDirectory = fs.lstatSync(filePath).isDirectory()
-
- return {
- source: filePath,
- remotePath: path.join(remotePath, file),
- isDirectory: isTargetDirectory,
- }
- })
-
- await pMap(files, b2Upload, {
- concurrency: 5,
- })
-
- return {
- id: remotePath,
- url: `https://${process.env.B2_CDN_ENDPOINT}/${process.env.B2_BUCKET}/${remotePath}/${targetFilename}`,
- metadata: metadata,
- }
- }
-
- try {
- //await global.b2.authorize()
-
- if (!fs.existsSync(source)) {
- throw new OperationError(500, "File not found")
- }
-
- const uploadUrl = await global.b2.getUploadUrl({
- bucketId: process.env.B2_BUCKET_ID,
- })
-
- console.debug(`Uploading object to B2 Storage >`, {
- source: source,
- remote: remotePath,
- })
-
- const data = await fs.promises.readFile(source)
-
- await global.b2.uploadFile({
- uploadUrl: uploadUrl.data.uploadUrl,
- uploadAuthToken: uploadUrl.data.authorizationToken,
- fileName: remotePath,
- data: data,
- info: metadata,
- })
- } catch (error) {
- console.error(error)
-
- if (retryNumber < 5) {
- return await b2Upload({
- source,
- remotePath,
- metadata,
- targetFilename,
- isDirectory,
- retryNumber: retryNumber + 1,
- })
- }
-
- throw new OperationError(500, "B2 upload failed")
- }
-
- return {
- id: remotePath,
- url: `https://${process.env.B2_CDN_ENDPOINT}/${process.env.B2_BUCKET}/${remotePath}`,
- metadata: metadata,
- }
-}
diff --git a/packages/server/services/marketplace/utils/syncDirToRemote/index.js b/packages/server/services/marketplace/utils/syncDirToRemote/index.js
deleted file mode 100755
index 7481c2a7..00000000
--- a/packages/server/services/marketplace/utils/syncDirToRemote/index.js
+++ /dev/null
@@ -1,23 +0,0 @@
-import fs from "fs"
-import path from "path"
-
-async function syncFolder(dir, destPath) {
- const files = await fs.promises.readdir(dir)
-
- for await (const file of files) {
- const filePath = path.resolve(dir, file)
- const desitinationFilePath = `${destPath}/${file}`
-
- const stat = fs.statSync(filePath)
-
- if (stat.isDirectory()) {
- await syncFolder(filePath, desitinationFilePath)
- } else {
- const fileContent = await fs.promises.readFile(filePath)
-
- await global.storage.putObject(process.env.S3_BUCKET, desitinationFilePath, fileContent)
- }
- }
-}
-
-export default syncFolder
\ No newline at end of file
diff --git a/packages/server/services/music/classes/release/index.js b/packages/server/services/music/classes/release/index.js
index d1c10676..b4740bbb 100644
--- a/packages/server/services/music/classes/release/index.js
+++ b/packages/server/services/music/classes/release/index.js
@@ -1,86 +1,144 @@
-import { MusicRelease, User } from "@db_models"
+import { MusicRelease, Track } from "@db_models"
+import TrackClass from "../track"
const AllowedUpdateFields = [
- "title",
- "cover",
- "album",
- "artist",
- "type",
- "public",
- "list",
+ "title",
+ "cover",
+ "album",
+ "artist",
+ "type",
+ "public",
+ "items",
]
export default class Release {
- static async create(payload) {
- console.log(payload)
- if (!payload.title) {
- throw new OperationError(400, "Release title is required")
- }
+ // TODO: implement pagination
+ static async data(id, { user_id = null, limit = 10, offset = 0 } = {}) {
+ let release = await MusicRelease.findOne({
+ _id: id,
+ })
- if (!payload.list) {
- throw new OperationError(400, "Release list is required")
- }
+ if (!release) {
+ throw new OperationError(404, "Release not found")
+ }
- // ensure list is an array of strings with tracks ids only
- payload.list = payload.list.map((item) => {
- if (typeof item !== "string") {
- item = item._id
- }
+ release = release.toObject()
- return item
- })
+ const items = release.items ?? release.list
- const release = new MusicRelease({
- user_id: payload.user_id,
- created_at: Date.now(),
- title: payload.title,
- cover: payload.cover,
- explicit: payload.explicit,
- type: payload.type,
- public: payload.public,
- list: payload.list,
- public: payload.public,
- })
+ const totalTracks = await Track.countDocuments({
+ _id: items,
+ })
- await release.save()
+ const tracks = await TrackClass.get(items, {
+ user_id: user_id,
+ onlyList: true,
+ })
- return release
- }
+ release.total_items = totalTracks
+ release.items = tracks
- static async update(id, payload) {
- let release = await MusicRelease.findById(id).catch((err) => {
- return false
- })
+ return release
+ }
- if (!release) {
- throw new OperationError(404, "Release not found")
- }
+ static async create(payload) {
+ if (!payload.title) {
+ throw new OperationError(400, "Release title is required")
+ }
- if (release.user_id !== payload.user_id) {
- throw new PermissionError(403, "You dont have permission to edit this release")
- }
+ if (!payload.items) {
+ throw new OperationError(400, "Release items is required")
+ }
- for (const field of AllowedUpdateFields) {
- if (payload[field]) {
- release[field] = payload[field]
- }
- }
+ // ensure list is an array of strings with tracks ids only
+ payload.items = payload.items.map((item) => {
+ return item._id ?? item
+ })
- // ensure list is an array of strings with tracks ids only
- release.list = release.list.map((item) => {
- if (typeof item !== "string") {
- item = item._id
- }
+ const release = new MusicRelease({
+ user_id: payload.user_id,
+ created_at: Date.now(),
+ title: payload.title,
+ cover: payload.cover,
+ explicit: payload.explicit,
+ type: payload.type,
+ public: payload.public,
+ items: payload.items,
+ public: payload.public,
+ })
- return item
- })
+ await release.save()
- release = await MusicRelease.findByIdAndUpdate(id, release)
+ return release
+ }
- return release
- }
+ static async update(id, payload) {
+ let release = await MusicRelease.findById(id).catch((err) => {
+ return false
+ })
- static async fullfillItemData(release) {
- return release
- }
-}
\ No newline at end of file
+ if (!release) {
+ throw new OperationError(404, "Release not found")
+ }
+
+ if (release.user_id !== payload.user_id) {
+ throw new PermissionError(
+ 403,
+ "You dont have permission to edit this release",
+ )
+ }
+
+ for (const field of AllowedUpdateFields) {
+ if (typeof payload[field] !== "undefined") {
+ release[field] = payload[field]
+ }
+ }
+
+ // ensure list is an array of strings with tracks ids only
+ release.items = release.items.map((item) => {
+ return item._id ?? item
+ })
+
+ await MusicRelease.findByIdAndUpdate(id, release)
+
+ return release
+ }
+
+ static async delete(id, payload = {}) {
+ let release = await MusicRelease.findById(id).catch((err) => {
+ return false
+ })
+
+ if (!release) {
+ throw new OperationError(404, "Release not found")
+ }
+
+ // check permission
+ if (release.user_id !== payload.user_id) {
+ throw new PermissionError(
+ 403,
+ "You dont have permission to edit this release",
+ )
+ }
+
+ const items = release.items ?? release.list
+
+ const items_ids = items.map((item) => item._id)
+
+ // delete all releated tracks
+ await Track.deleteMany({
+ _id: { $in: items_ids },
+ })
+
+ // delete release
+ await MusicRelease.deleteOne({
+ _id: id,
+ })
+
+ return release
+ }
+
+ static async fullfillItemData(release) {
+ return release
+ }
+}
diff --git a/packages/server/services/music/classes/track/methods/create.js b/packages/server/services/music/classes/track/methods/create.js
index 2faca6aa..86d7daae 100644
--- a/packages/server/services/music/classes/track/methods/create.js
+++ b/packages/server/services/music/classes/track/methods/create.js
@@ -1,58 +1,52 @@
import { Track } from "@db_models"
import requiredFields from "@shared-utils/requiredFields"
-import MusicMetadata from "music-metadata"
-import axios from "axios"
+import * as FFMPEGLib from "@shared-classes/FFMPEGLib"
import ModifyTrack from "./modify"
export default async (payload = {}) => {
- requiredFields(["title", "source", "user_id"], payload)
+ if (typeof payload.title !== "string") {
+ payload.title = undefined
+ }
- let stream = null
- let headers = null
+ if (typeof payload.album !== "string") {
+ payload.album = undefined
+ }
+
+ if (typeof payload.artist !== "string") {
+ payload.artist = undefined
+ }
+
+ if (typeof payload.cover !== "string") {
+ payload.cover = undefined
+ }
+
+ if (typeof payload.source !== "string") {
+ payload.source = undefined
+ }
+
+ if (typeof payload.user_id !== "string") {
+ payload.user_id = undefined
+ }
+
+ requiredFields(["title", "source", "user_id"], payload)
if (typeof payload._id === "string") {
return await ModifyTrack(payload._id, payload)
}
- let metadata = Object()
+ const probe = await FFMPEGLib.Utils.probe(payload.source)
- try {
- const sourceStream = await axios({
- url: payload.source,
- method: "GET",
- responseType: "stream",
- })
-
- stream = sourceStream.data
- headers = sourceStream.headers
-
- const streamMetadata = await MusicMetadata.parseStream(stream, {
- mimeType: headers["content-type"],
- })
-
- metadata = {
- ...metadata,
- format: streamMetadata.format.codec,
- channels: streamMetadata.format.numberOfChannels,
- sampleRate: streamMetadata.format.sampleRate,
- bits: streamMetadata.format.bitsPerSample,
- lossless: streamMetadata.format.lossless,
- duration: streamMetadata.format.duration,
-
- title: streamMetadata.common.title,
- artists: streamMetadata.common.artists,
- album: streamMetadata.common.album,
- }
- } catch (error) {
- // sowy :(
- }
-
- if (typeof payload.metadata === "object") {
- metadata = {
- ...metadata,
- ...payload.metadata,
- }
+ let metadata = {
+ format: probe.streams[0].codec_name,
+ channels: probe.streams[0].channels,
+ bitrate: probe.streams[0].bit_rate ?? probe.format.bit_rate,
+ sampleRate: probe.streams[0].sample_rate,
+ bits:
+ probe.streams[0].bits_per_sample ??
+ probe.streams[0].bits_per_raw_sample,
+ duration: probe.format.duration,
+ tags: probe.format.tags ?? {},
}
if (metadata.format) {
@@ -68,53 +62,28 @@ export default async (payload = {}) => {
}
const obj = {
- title: payload.title,
- album: payload.album,
- cover: payload.cover,
- artists: [],
+ title: payload.title ?? metadata.tags["Title"],
+ album: payload.album ?? metadata.tags["Album"],
+ artist: payload.artist ?? metadata.tags["Artist"],
+ cover:
+ payload.cover ??
+ "https://storage.ragestudio.net/comty-static-assets/default_song.png",
source: payload.source,
metadata: metadata,
- lyrics_enabled: payload.lyrics_enabled,
}
if (Array.isArray(payload.artists)) {
- obj.artists = payload.artists
+ obj.artist = payload.artists.join(", ")
}
- if (typeof payload.artists === "string") {
- obj.artists.push(payload.artists)
- }
+ let track = new Track({
+ ...obj,
+ publisher: {
+ user_id: payload.user_id,
+ },
+ })
- if (typeof payload.artist === "string") {
- obj.artists.push(payload.artist)
- }
+ await track.save()
- if (obj.artists.length === 0 || !obj.artists) {
- obj.artists = metadata.artists
- }
-
- let track = null
-
- if (payload._id) {
- track = await Track.findById(payload._id)
-
- if (!track) {
- throw new OperationError(404, "Track not found, cannot update")
- }
-
- throw new OperationError(501, "Not implemented")
- } else {
- track = new Track({
- ...obj,
- publisher: {
- user_id: payload.user_id,
- },
- })
-
- await track.save()
- }
-
- track = track.toObject()
-
- return track
+ return track.toObject()
}
diff --git a/packages/server/services/music/classes/track/methods/modify.js b/packages/server/services/music/classes/track/methods/modify.js
index d8714f91..ba7eea50 100644
--- a/packages/server/services/music/classes/track/methods/modify.js
+++ b/packages/server/services/music/classes/track/methods/modify.js
@@ -1,25 +1,32 @@
import { Track } from "@db_models"
+const allowedFields = ["title", "artist", "album", "cover"]
+
export default async (track_id, payload) => {
- if (!track_id) {
- throw new OperationError(400, "Missing track_id")
- }
+ if (!track_id) {
+ throw new OperationError(400, "Missing track_id")
+ }
- const track = await Track.findById(track_id)
+ const track = await Track.findById(track_id)
- if (!track) {
- throw new OperationError(404, "Track not found")
- }
+ if (!track) {
+ throw new OperationError(404, "Track not found")
+ }
- if (track.publisher.user_id !== payload.user_id) {
- throw new PermissionError(403, "You dont have permission to edit this track")
- }
+ if (track.publisher.user_id !== payload.user_id) {
+ throw new PermissionError(
+ 403,
+ "You dont have permission to edit this track",
+ )
+ }
- for (const field of Object.keys(payload)) {
- track[field] = payload[field]
- }
+ for (const field of allowedFields) {
+ if (payload[field] !== undefined) {
+ track[field] = payload[field]
+ }
+ }
- track.modified_at = Date.now()
+ track.modified_at = Date.now()
- return await track.save()
-}
\ No newline at end of file
+ return await track.save()
+}
diff --git a/packages/server/services/music/classes/track/methods/toggleFavourite.js b/packages/server/services/music/classes/track/methods/toggleFavourite.js
index 3f60fff9..4c7051a3 100644
--- a/packages/server/services/music/classes/track/methods/toggleFavourite.js
+++ b/packages/server/services/music/classes/track/methods/toggleFavourite.js
@@ -1,62 +1,65 @@
import { Track, TrackLike } from "@db_models"
export default async (user_id, track_id, to) => {
- if (!user_id) {
- throw new OperationError(400, "Missing user_id")
- }
+ if (!user_id) {
+ throw new OperationError(400, "Missing user_id")
+ }
- if (!track_id) {
- throw new OperationError(400, "Missing track_id")
- }
+ if (!track_id) {
+ throw new OperationError(400, "Missing track_id")
+ }
- const track = await Track.findById(track_id)
+ const track = await Track.findById(track_id)
- if (!track) {
- throw new OperationError(404, "Track not found")
- }
+ if (!track) {
+ throw new OperationError(404, "Track not found")
+ }
- let trackLike = await TrackLike.findOne({
- user_id: user_id,
- track_id: track_id,
- }).catch(() => null)
+ let trackLike = await TrackLike.findOne({
+ user_id: user_id,
+ track_id: track_id,
+ }).catch(() => null)
- if (typeof to === "undefined") {
- to = !!!trackLike
- }
+ if (typeof to === "undefined") {
+ to = !!!trackLike
+ }
- if (to) {
- if (!trackLike) {
- trackLike = new TrackLike({
- user_id: user_id,
- track_id: track_id,
- created_at: Date.now(),
- })
+ if (to) {
+ if (!trackLike) {
+ trackLike = new TrackLike({
+ user_id: user_id,
+ track_id: track_id,
+ created_at: Date.now(),
+ })
- await trackLike.save()
- }
- } else {
- if (trackLike) {
- await TrackLike.deleteOne({
- user_id: user_id,
- track_id: track_id,
- })
+ await trackLike.save()
+ }
+ } else {
+ if (trackLike) {
+ await TrackLike.deleteOne({
+ user_id: user_id,
+ track_id: track_id,
+ })
- trackLike = null
- }
- }
+ trackLike = null
+ }
+ }
- const targetSocket = await global.websocket.find.socketByUserId(user_id)
+ if (global.websockets) {
+ const targetSocket =
+ await global.websockets.find.clientsByUserId(user_id)
- if (targetSocket) {
- await targetSocket.emit("music:track:toggle:like", {
- track_id: track_id,
- action: trackLike ? "liked" : "unliked"
- })
- }
+ if (targetSocket) {
+ await targetSocket.emit("music:track:toggle:like", {
+ track_id: track_id,
+ action: trackLike ? "liked" : "unliked",
+ })
+ }
+ }
- return {
- liked: trackLike ? true : false,
- track_like_id: trackLike ? trackLike._id : null,
- track_id: track._id.toString(),
- }
-}
\ No newline at end of file
+ return {
+ liked: trackLike ? true : false,
+ track_like_id: trackLike ? trackLike._id : null,
+ track_id: track._id.toString(),
+ }
+}
diff --git a/packages/server/services/music/music.service.js b/packages/server/services/music/music.service.js
index 4e24f38a..15f1fec2 100755
--- a/packages/server/services/music/music.service.js
+++ b/packages/server/services/music/music.service.js
@@ -2,12 +2,14 @@ import { Server } from "linebridge"
import DbManager from "@shared-classes/DbManager"
import SSEManager from "@shared-classes/SSEManager"
+import RedisClient from "@shared-classes/RedisClient"
import SharedMiddlewares from "@shared-middlewares"
import LimitsClass from "@shared-classes/Limits"
export default class API extends Server {
static refName = "music"
+ static useEngine = "hyper-express-ng"
static enableWebsockets = true
static routesPath = `${__dirname}/routes`
static listen_port = process.env.HTTP_LISTEN_PORT ?? 3003
@@ -19,12 +21,15 @@ export default class API extends Server {
contexts = {
db: new DbManager(),
SSEManager: new SSEManager(),
+ redis: RedisClient(),
}
async onInitialize() {
global.sse = this.contexts.SSEManager
+ global.redis = this.contexts.redis.client
await this.contexts.db.initialize()
+ await this.contexts.redis.initialize()
this.contexts.limits = await LimitsClass.get()
}
diff --git a/packages/server/services/music/package.json b/packages/server/services/music/package.json
index e230c3e0..07e7f330 100755
--- a/packages/server/services/music/package.json
+++ b/packages/server/services/music/package.json
@@ -1,9 +1,3 @@
{
- "name": "music",
- "version": "0.60.2",
- "dependencies": {
- "ms": "^2.1.3",
- "music-metadata": "^7.14.0",
- "openai": "^4.47.2"
- }
+ "name": "music"
}
diff --git a/packages/server/services/music/routes/music/radio/[radio_id]/get.js b/packages/server/services/music/routes/music/radio/[radio_id]/get.js
index 90969443..4fc39c16 100644
--- a/packages/server/services/music/routes/music/radio/[radio_id]/get.js
+++ b/packages/server/services/music/routes/music/radio/[radio_id]/get.js
@@ -1,9 +1,7 @@
export default async (req, res) => {
const radioId = req.params.radio_id
- let redisData = await global.websocket.redis
- .hgetall(`radio-${radioId}`)
- .catch(() => null)
+ let redisData = await redis.hgetall(`radio-${radioId}`).catch(() => null)
return redisData
}
diff --git a/packages/server/services/music/routes/music/radio/list/get.js b/packages/server/services/music/routes/music/radio/list/get.js
index c10a069c..186bf1ed 100644
--- a/packages/server/services/music/routes/music/radio/list/get.js
+++ b/packages/server/services/music/routes/music/radio/list/get.js
@@ -1,19 +1,13 @@
import { RadioProfile } from "@db_models"
async function scanKeysWithPagination(pattern, count = 10, cursor = "0") {
- const result = await global.websocket.redis.scan(
- cursor,
- "MATCH",
- pattern,
- "COUNT",
- count,
- )
+ const result = await redis.scan(cursor, "MATCH", pattern, "COUNT", count)
return result[1]
}
async function getHashData(hashKey) {
- const hashData = await global.websocket.redis.hgetall(hashKey)
+ const hashData = await redis.hgetall(hashKey)
return hashData
}
diff --git a/packages/server/services/music/routes/music/radio/sse/[channel_id]/get.js b/packages/server/services/music/routes/music/radio/sse/[channel_id]/get.js
index 6fc963bb..5e1be783 100644
--- a/packages/server/services/music/routes/music/radio/sse/[channel_id]/get.js
+++ b/packages/server/services/music/routes/music/radio/sse/[channel_id]/get.js
@@ -3,9 +3,7 @@ export default async (req, res) => {
const radioId = channel_id.split("radio:")[1]
- let redisData = await global.websocket.redis
- .hgetall(`radio-${radioId}`)
- .catch(() => null)
+ let redisData = await redis.hgetall(`radio-${radioId}`).catch(() => null)
global.sse.connectToChannelStream(channel_id, req, res, {
initialData: {
diff --git a/packages/server/services/music/routes/music/radio/webhook/post.js b/packages/server/services/music/routes/music/radio/webhook/post.js
index 0a44b1d9..24827732 100644
--- a/packages/server/services/music/routes/music/radio/webhook/post.js
+++ b/packages/server/services/music/routes/music/radio/webhook/post.js
@@ -63,20 +63,17 @@ export default async (req) => {
const redis_id = `radio-${data.radio_id}`
- const existMember = await global.websocket.redis.hexists(
- redis_id,
- "radio_id",
- )
+ const existMember = await redis.hexists(redis_id, "radio_id")
if (data.online) {
- await global.websocket.redis.hset(redis_id, {
+ await redis.hset(redis_id, {
...data,
now_playing: JSON.stringify(data.now_playing),
})
}
if (!data.online && existMember) {
- await global.websocket.redis.hdel(redis_id)
+ await redis.hdel(redis_id)
}
console.log(`[${data.radio_id}] Updating radio data`)
@@ -85,7 +82,6 @@ export default async (req) => {
event: "update",
data: data,
})
- global.websocket.io.to(`radio:${data.radio_id}`).emit(`update`, data)
return data
}
diff --git a/packages/server/services/music/routes/music/releases/[release_id]/data/get.js b/packages/server/services/music/routes/music/releases/[release_id]/data/get.js
index 257daebc..4e0caca5 100644
--- a/packages/server/services/music/routes/music/releases/[release_id]/data/get.js
+++ b/packages/server/services/music/routes/music/releases/[release_id]/data/get.js
@@ -1,5 +1,4 @@
-import { MusicRelease, Track } from "@db_models"
-import TrackClass from "@classes/track"
+import ReleaseClass from "@classes/release"
export default {
middlewares: ["withOptionalAuthentication"],
@@ -7,29 +6,10 @@ export default {
const { release_id } = req.params
const { limit = 50, offset = 0 } = req.query
- let release = await MusicRelease.findOne({
- _id: release_id,
- })
-
- if (!release) {
- throw new OperationError(404, "Release not found")
- }
-
- release = release.toObject()
-
- const totalTracks = await Track.countDocuments({
- _id: release.list,
- })
-
- const tracks = await TrackClass.get(release.list, {
+ return await ReleaseClass.data(release_id, {
user_id: req.auth?.session?.user_id,
- onlyList: true,
+ limit,
+ offset,
})
-
- release.listLength = totalTracks
- release.items = tracks
- release.list = tracks
-
- return release
},
}
diff --git a/packages/server/services/music/routes/music/releases/[release_id]/delete.js b/packages/server/services/music/routes/music/releases/[release_id]/delete.js
index 4b7adb5b..30a40b86 100644
--- a/packages/server/services/music/routes/music/releases/[release_id]/delete.js
+++ b/packages/server/services/music/routes/music/releases/[release_id]/delete.js
@@ -1,26 +1,10 @@
-import { MusicRelease, Track } from "@db_models"
+import ReleaseClass from "@classes/release"
export default {
- middlewares: ["withAuthentication"],
- fn: async (req) => {
- const { release_id } = req.params
-
- let release = await MusicRelease.findOne({
- _id: release_id
- })
-
- if (!release) {
- throw new OperationError(404, "Release not found")
- }
-
- if (release.user_id !== req.auth.session.user_id) {
- throw new OperationError(403, "Unauthorized")
- }
-
- await MusicRelease.deleteOne({
- _id: release_id
- })
-
- return release
- }
-}
\ No newline at end of file
+ middlewares: ["withAuthentication"],
+ fn: async (req) => {
+ return await ReleaseClass.delete(req.params.release_id, {
+ user_id: req.auth.session.user_id,
+ })
+ },
+}
diff --git a/packages/server/services/music/routes/music/releases/self/get.js b/packages/server/services/music/routes/music/releases/self/get.js
index 55eacba2..4c696b69 100644
--- a/packages/server/services/music/routes/music/releases/self/get.js
+++ b/packages/server/services/music/routes/music/releases/self/get.js
@@ -29,7 +29,7 @@ export default {
if (req.query.resolveItemsData === "true") {
releases = await Promise.all(
playlists.map(async (playlist) => {
- playlist.items = await Track.find({
+ playlist.list = await Track.find({
_id: [...playlist.list],
})
@@ -39,7 +39,7 @@ export default {
}
return {
- total_length: await MusicRelease.countDocuments(searchQuery),
+ total_items: await MusicRelease.countDocuments(searchQuery),
items: releases,
}
},
diff --git a/packages/server/services/music/routes/music/tracks/put.js b/packages/server/services/music/routes/music/tracks/put.js
index d7720e47..415bb16a 100644
--- a/packages/server/services/music/routes/music/tracks/put.js
+++ b/packages/server/services/music/routes/music/tracks/put.js
@@ -2,36 +2,36 @@ import requiredFields from "@shared-utils/requiredFields"
import TrackClass from "@classes/track"
export default {
- middlewares: ["withAuthentication"],
- fn: async (req) => {
- if (Array.isArray(req.body.list)) {
- let results = []
+ middlewares: ["withAuthentication"],
+ fn: async (req) => {
+ if (Array.isArray(req.body.items)) {
+ let results = []
- for await (const item of req.body.list) {
- if (!item.source || !item.title) {
- continue
- }
+ for await (const item of req.body.items) {
+ if (!item.source || !item.title) {
+ continue
+ }
- const track = await TrackClass.create({
- ...item,
- user_id: req.auth.session.user_id,
- })
+ const track = await TrackClass.create({
+ ...item,
+ user_id: req.auth.session.user_id,
+ })
- results.push(track)
- }
+ results.push(track)
+ }
- return {
- list: results
- }
- }
+ return {
+ items: results,
+ }
+ }
- requiredFields(["title", "source"], req.body)
+ requiredFields(["title", "source"], req.body)
- const track = await TrackClass.create({
- ...req.body,
- user_id: req.auth.session.user_id,
- })
+ const track = await TrackClass.create({
+ ...req.body,
+ user_id: req.auth.session.user_id,
+ })
- return track
- }
-}
\ No newline at end of file
+ return track
+ },
+}
diff --git a/packages/server/services/notifications/package.json b/packages/server/services/notifications/package.json
index b4692cff..71499320 100644
--- a/packages/server/services/notifications/package.json
+++ b/packages/server/services/notifications/package.json
@@ -1,4 +1,3 @@
{
- "name": "notifications",
- "version": "1.0.0"
+ "name": "notifications"
}
diff --git a/packages/server/services/posts/classes/posts/methods/create.js b/packages/server/services/posts/classes/posts/methods/create.js
index 0b91c36f..a469a177 100644
--- a/packages/server/services/posts/classes/posts/methods/create.js
+++ b/packages/server/services/posts/classes/posts/methods/create.js
@@ -89,11 +89,15 @@ export default async (payload = {}, req) => {
// broadcast post to all users
if (visibility === "public") {
- global.websocket.senders.toTopic("realtime:feed", "post:new", result[0])
+ global.websockets.senders.toTopic(
+ "realtime:feed",
+ "post:new",
+ result[0],
+ )
}
if (visibility === "private") {
- const userSockets = await global.websocket.find.clientsByUserId(
+ const userSockets = await global.websockets.find.clientsByUserId(
post.user_id,
)
diff --git a/packages/server/services/posts/classes/posts/methods/data.js b/packages/server/services/posts/classes/posts/methods/data.js
index dbbc158b..97c6e590 100644
--- a/packages/server/services/posts/classes/posts/methods/data.js
+++ b/packages/server/services/posts/classes/posts/methods/data.js
@@ -8,8 +8,8 @@ export default async (payload = {}) => {
for_user_id,
post_id,
query = {},
- trim = 0,
limit = 20,
+ page = 0,
sort = { created_at: -1 },
} = payload
@@ -31,8 +31,8 @@ export default async (payload = {}) => {
} else {
posts = await Post.find({ ...query })
.sort(sort)
- .skip(trim)
.limit(limit)
+ .skip(limit * page)
}
// fullfill data
diff --git a/packages/server/services/posts/classes/posts/methods/delete.js b/packages/server/services/posts/classes/posts/methods/delete.js
index 4895be6f..246b166c 100644
--- a/packages/server/services/posts/classes/posts/methods/delete.js
+++ b/packages/server/services/posts/classes/posts/methods/delete.js
@@ -42,7 +42,7 @@ export default async (payload = {}) => {
// broadcast post to all users
if (post.visibility === "public") {
- global.websocket.senders.toTopic(
+ global.websockets.senders.toTopic(
"realtime:feed",
"post:delete",
post_id,
@@ -50,7 +50,7 @@ export default async (payload = {}) => {
}
if (post.visibility === "private") {
- const userSockets = await global.websocket.find.clientsByUserId(
+ const userSockets = await global.websockets.find.clientsByUserId(
post.user_id,
)
diff --git a/packages/server/services/posts/classes/posts/methods/deletePollVote.js b/packages/server/services/posts/classes/posts/methods/deletePollVote.js
index 05125a2c..182a80f4 100644
--- a/packages/server/services/posts/classes/posts/methods/deletePollVote.js
+++ b/packages/server/services/posts/classes/posts/methods/deletePollVote.js
@@ -1,33 +1,33 @@
import { VotePoll } from "@db_models"
export default async (payload = {}) => {
- if (!payload.user_id) {
- throw new OperationError(400, "Missing user_id")
- }
+ if (!payload.user_id) {
+ throw new OperationError(400, "Missing user_id")
+ }
- if (!payload.post_id) {
- throw new OperationError(400, "Missing post_id")
- }
+ if (!payload.post_id) {
+ throw new OperationError(400, "Missing post_id")
+ }
- if (!payload.option_id) {
- throw new OperationError(400, "Missing option_id")
- }
+ if (!payload.option_id) {
+ throw new OperationError(400, "Missing option_id")
+ }
- let vote = await VotePoll.find({
- user_id: payload.user_id,
- post_id: payload.post_id,
- option_id: payload.option_id,
- })
+ let vote = await VotePoll.find({
+ user_id: payload.user_id,
+ post_id: payload.post_id,
+ option_id: payload.option_id,
+ })
- if (!vote) {
- throw new OperationError(404, "Poll vote not found")
- }
+ if (!vote) {
+ throw new OperationError(404, "Poll vote not found")
+ }
- await VotePoll.deleteOne({
- _id: vote._id
- })
+ await VotePoll.deleteOne({
+ _id: vote._id,
+ })
- global.websocket.io.of("/").emit(`post.poll.vote.deleted`, vote)
+ global.websockets.io.of("/").emit(`post.poll.vote.deleted`, vote)
- return vote
-}
\ No newline at end of file
+ return vote
+}
diff --git a/packages/server/services/posts/classes/posts/methods/fromUserId.js b/packages/server/services/posts/classes/posts/methods/fromUserId.js
index 38f5c731..1b00b60e 100644
--- a/packages/server/services/posts/classes/posts/methods/fromUserId.js
+++ b/packages/server/services/posts/classes/posts/methods/fromUserId.js
@@ -1,25 +1,20 @@
import GetData from "./data"
export default async (payload = {}) => {
- const {
- for_user_id,
- user_id,
- trim,
- limit,
- } = payload
+ const { for_user_id, user_id, page, limit } = payload
- if (!user_id) {
- throw new OperationError(400, "Missing user_id")
- }
+ if (!user_id) {
+ throw new OperationError(400, "Missing user_id")
+ }
- return await GetData({
- for_user_id: for_user_id,
- trim: trim,
- limit: limit,
- query: {
- user_id: {
- $in: user_id
- }
- }
- })
-}
\ No newline at end of file
+ return await GetData({
+ for_user_id: for_user_id,
+ page: page,
+ limit: limit,
+ query: {
+ user_id: {
+ $in: user_id,
+ },
+ },
+ })
+}
diff --git a/packages/server/services/posts/classes/posts/methods/getLiked.js b/packages/server/services/posts/classes/posts/methods/getLiked.js
index 84ed4a6a..e443348b 100644
--- a/packages/server/services/posts/classes/posts/methods/getLiked.js
+++ b/packages/server/services/posts/classes/posts/methods/getLiked.js
@@ -2,25 +2,24 @@ import { PostLike } from "@db_models"
import GetData from "./data"
export default async (payload = {}) => {
- let { user_id, trim, limit } = payload
+ let { user_id, page, limit } = payload
- if (!user_id) {
- throw new OperationError(400, "Missing user_id")
- }
+ if (!user_id) {
+ throw new OperationError(400, "Missing user_id")
+ }
- let ids = await PostLike.find({ user_id })
+ let ids = await PostLike.find({ user_id })
- ids = ids.map((item) => item.post_id)
+ ids = ids.map((item) => item.post_id)
- return await GetData({
- trim: trim,
- limit: limit,
- for_user_id: user_id,
- query: {
- _id: {
- $in: ids
- }
- }
- })
+ return await GetData({
+ page: page,
+ limit: limit,
+ for_user_id: user_id,
+ query: {
+ _id: {
+ $in: ids,
+ },
+ },
+ })
}
-
diff --git a/packages/server/services/posts/classes/posts/methods/getSaved.js b/packages/server/services/posts/classes/posts/methods/getSaved.js
index e3697df1..56f79bc2 100644
--- a/packages/server/services/posts/classes/posts/methods/getSaved.js
+++ b/packages/server/services/posts/classes/posts/methods/getSaved.js
@@ -2,28 +2,28 @@ import { PostSave } from "@db_models"
import GetData from "./data"
export default async (payload = {}) => {
- let { user_id, trim, limit } = payload
+ let { user_id, page, limit } = payload
- if (!user_id) {
- throw new OperationError(400, "Missing user_id")
- }
+ if (!user_id) {
+ throw new OperationError(400, "Missing user_id")
+ }
- let ids = await PostSave.find({ user_id })
+ let ids = await PostSave.find({ user_id })
- if (ids.length === 0) {
- return []
- }
+ if (ids.length === 0) {
+ return []
+ }
- ids = ids.map((item) => item.post_id)
+ ids = ids.map((item) => item.post_id)
- return await GetData({
- trim: trim,
- limit: limit,
- for_user_id: user_id,
- query: {
- _id: {
- $in: ids
- }
- }
- })
-}
\ No newline at end of file
+ return await GetData({
+ page: page,
+ limit: limit,
+ for_user_id: user_id,
+ query: {
+ _id: {
+ $in: ids,
+ },
+ },
+ })
+}
diff --git a/packages/server/services/posts/classes/posts/methods/globalTimeline.js b/packages/server/services/posts/classes/posts/methods/globalTimeline.js
index e08fd6af..31af49cc 100644
--- a/packages/server/services/posts/classes/posts/methods/globalTimeline.js
+++ b/packages/server/services/posts/classes/posts/methods/globalTimeline.js
@@ -1,17 +1,7 @@
import GetPostData from "./data"
export default async (payload = {}) => {
- let {
- user_id,
- trim,
- limit,
- } = payload
+ const posts = await GetPostData(payload)
- const posts = await GetPostData({
- for_user_id: user_id,
- trim,
- limit,
- })
-
- return posts
-}
\ No newline at end of file
+ return posts
+}
diff --git a/packages/server/services/posts/classes/posts/methods/replies.js b/packages/server/services/posts/classes/posts/methods/replies.js
index d245d2a7..b7a9fe95 100644
--- a/packages/server/services/posts/classes/posts/methods/replies.js
+++ b/packages/server/services/posts/classes/posts/methods/replies.js
@@ -2,7 +2,7 @@ import { Post } from "@db_models"
import stage from "./stage"
export default async (payload = {}) => {
- const { post_id, for_user_id, trim = 0, limit = 50 } = payload
+ const { post_id, for_user_id, page = 0, limit = 50 } = payload
if (!post_id) {
throw new OperationError(400, "Post ID is required")
@@ -12,7 +12,7 @@ export default async (payload = {}) => {
reply_to: post_id,
})
.limit(limit)
- .skip(trim)
+ .skip(limit * page)
.sort({ created_at: -1 })
posts = await stage({
diff --git a/packages/server/services/posts/classes/posts/methods/timeline.js b/packages/server/services/posts/classes/posts/methods/timeline.js
index 30702f61..9be97801 100644
--- a/packages/server/services/posts/classes/posts/methods/timeline.js
+++ b/packages/server/services/posts/classes/posts/methods/timeline.js
@@ -3,41 +3,32 @@ import { UserFollow } from "@db_models"
import GetPostData from "./data"
export default async (payload = {}) => {
- let {
- user_id,
- trim,
- limit,
- } = payload
+ payload.query = {}
- let query = {}
+ //TODO: include posts from groups
+ //TODO: include promotional posts
+ if (payload.for_user_id) {
+ const from_users = []
- //TODO: include posts from groups
- //TODO: include promotional posts
- if (user_id) {
- const from_users = []
+ from_users.push(payload.for_user_id)
- from_users.push(user_id)
+ // get post from users that the user follows
+ const followingUsers = await UserFollow.find({
+ user_id: payload.for_user_id,
+ })
- // get post from users that the user follows
- const followingUsers = await UserFollow.find({
- user_id: user_id
- })
+ const followingUserIds = followingUsers.map(
+ (followingUser) => followingUser.to,
+ )
- const followingUserIds = followingUsers.map((followingUser) => followingUser.to)
+ from_users.push(...followingUserIds)
- from_users.push(...followingUserIds)
+ payload.query.user_id = {
+ $in: from_users,
+ }
+ }
- query.user_id = {
- $in: from_users
- }
- }
+ const posts = await GetPostData(payload)
- const posts = await GetPostData({
- for_user_id: user_id,
- trim,
- limit,
- query: query,
- })
-
- return posts
-}
\ No newline at end of file
+ return posts
+}
diff --git a/packages/server/services/posts/classes/posts/methods/update.js b/packages/server/services/posts/classes/posts/methods/update.js
index 26e3b891..ccb94b31 100644
--- a/packages/server/services/posts/classes/posts/methods/update.js
+++ b/packages/server/services/posts/classes/posts/methods/update.js
@@ -37,7 +37,7 @@ export default async (post_id, update) => {
})
if (post.visibility === "public") {
- global.websocket.senders.toTopic(
+ global.websockets.senders.toTopic(
"realtime:feed",
`post:update`,
result[0],
@@ -45,7 +45,7 @@ export default async (post_id, update) => {
}
if (post.visibility === "private") {
- const userSockets = await global.websocket.find.clientsByUserId(
+ const userSockets = await global.websockets.find.clientsByUserId(
post.user_id,
)
diff --git a/packages/server/services/posts/classes/posts/methods/votePoll.js b/packages/server/services/posts/classes/posts/methods/votePoll.js
index 61290fcc..6660e721 100644
--- a/packages/server/services/posts/classes/posts/methods/votePoll.js
+++ b/packages/server/services/posts/classes/posts/methods/votePoll.js
@@ -51,7 +51,7 @@ export default async (payload = {}) => {
post = (await stage({ posts: post, for_user_id: payload.user_id }))[0]
if (post.visibility === "public") {
- global.websocket.senders.toTopic("realtime:feed", `post:update`, post)
+ global.websockets.senders.toTopic("realtime:feed", `post:update`, post)
}
return {
diff --git a/packages/server/services/posts/package.json b/packages/server/services/posts/package.json
index 56952f06..925e670e 100644
--- a/packages/server/services/posts/package.json
+++ b/packages/server/services/posts/package.json
@@ -1,7 +1,7 @@
{
- "name": "posts",
- "version": "1.0.0",
- "dependencies": {
- "moment-timezone": "^0.5.45"
- }
+ "name": "posts",
+ "dependencies": {
+ "linebridge": "^1.0.0-a3",
+ "moment-timezone": "^0.5.45"
+ }
}
diff --git a/packages/server/services/posts/posts.service.js b/packages/server/services/posts/posts.service.js
index 90e9ae25..1a226219 100644
--- a/packages/server/services/posts/posts.service.js
+++ b/packages/server/services/posts/posts.service.js
@@ -10,10 +10,10 @@ import SharedMiddlewares from "@shared-middlewares"
export default class API extends Server {
static refName = "posts"
- static useEngine = "hyper-express-ng"
- static enableWebsockets = true
-
- static listen_port = process.env.HTTP_LISTEN_PORT ?? 3001
+ static websockets = true
+ static listenPort = process.env.HTTP_LISTEN_PORT ?? 3001
+ static useMiddlewares = ["logs"]
+ static bypassCors = true
middlewares = {
...SharedMiddlewares,
diff --git a/packages/server/services/posts/routes/posts/[post_id]/data/get.js b/packages/server/services/posts/routes/posts/[post_id]/data/get.js
index 760b6b82..43e6e4bd 100644
--- a/packages/server/services/posts/routes/posts/[post_id]/data/get.js
+++ b/packages/server/services/posts/routes/posts/[post_id]/data/get.js
@@ -1,13 +1,13 @@
import Posts from "@classes/posts"
export default {
- middlewares: ["withOptionalAuthentication"],
- fn: async (req, res) => {
- const result = await Posts.data({
- post_id: req.params.post_id,
- for_user_id: req.auth?.session?.user_id,
- })
+ useMiddlewares: ["withOptionalAuthentication"],
+ fn: async (req, res) => {
+ const result = await Posts.data({
+ post_id: req.params.post_id,
+ for_user_id: req.auth?.session?.user_id,
+ })
- return result
- }
-}
\ No newline at end of file
+ return result
+ },
+}
diff --git a/packages/server/services/posts/routes/posts/[post_id]/delete.js b/packages/server/services/posts/routes/posts/[post_id]/delete.js
index fe1315c5..968933e8 100644
--- a/packages/server/services/posts/routes/posts/[post_id]/delete.js
+++ b/packages/server/services/posts/routes/posts/[post_id]/delete.js
@@ -1,27 +1,28 @@
import PostClass from "@classes/posts"
import { Post } from "@db_models"
+
export default {
- middlewares: ["withAuthentication"],
- fn: async (req, res) => {
- // check if post is owned or if is admin
- const post = await Post.findById(req.params.post_id).catch(() => {
- return false
- })
+ useMiddlewares: ["withAuthentication"],
+ fn: async (req, res) => {
+ // check if post is owned or if is admin
+ const post = await Post.findById(req.params.post_id).catch(() => {
+ return false
+ })
- if (!post) {
- throw new OperationError(404, "Post not found")
- }
+ if (!post) {
+ throw new OperationError(404, "Post not found")
+ }
- const user = await req.auth.user()
+ const user = await req.auth.user()
- if (post.user_id.toString() !== user._id.toString()) {
- if (!user.roles.includes("admin")) {
- throw new OperationError(403, "You cannot delete this post")
- }
- }
+ if (post.user_id.toString() !== user._id.toString()) {
+ if (!user.roles.includes("admin")) {
+ throw new OperationError(403, "You cannot delete this post")
+ }
+ }
- return await PostClass.delete({
- post_id: req.params.post_id
- })
- }
-}
\ No newline at end of file
+ return await PostClass.delete({
+ post_id: req.params.post_id,
+ })
+ },
+}
diff --git a/packages/server/services/posts/routes/posts/[post_id]/replies/get.js b/packages/server/services/posts/routes/posts/[post_id]/replies/get.js
index 059837e5..1464df63 100644
--- a/packages/server/services/posts/routes/posts/[post_id]/replies/get.js
+++ b/packages/server/services/posts/routes/posts/[post_id]/replies/get.js
@@ -1,13 +1,13 @@
import PostClass from "@classes/posts"
export default {
- middlewares: ["withOptionalAuthentication"],
- fn: async (req) => {
- return await PostClass.replies({
- post_id: req.params.post_id,
- for_user_id: req.auth?.session.user_id,
- trim: req.query.trim,
- limit: req.query.limit
- })
- }
-}
\ No newline at end of file
+ useMiddlewares: ["withOptionalAuthentication"],
+ fn: async (req) => {
+ return await PostClass.replies({
+ post_id: req.params.post_id,
+ for_user_id: req.auth?.session.user_id,
+ trim: req.query.trim,
+ limit: req.query.limit,
+ })
+ },
+}
diff --git a/packages/server/services/posts/routes/posts/[post_id]/toggle_like/post.js b/packages/server/services/posts/routes/posts/[post_id]/toggle_like/post.js
index 5b80505f..ec766a59 100644
--- a/packages/server/services/posts/routes/posts/[post_id]/toggle_like/post.js
+++ b/packages/server/services/posts/routes/posts/[post_id]/toggle_like/post.js
@@ -1,14 +1,14 @@
import Posts from "@classes/posts"
export default {
- middlewares: ["withAuthentication"],
- fn: async (req, res) => {
- const result = await Posts.toggleLike({
- post_id: req.params.post_id,
- user_id: req.auth.session.user_id,
- to: req.body?.to,
- })
+ useMiddlewares: ["withAuthentication"],
+ fn: async (req, res) => {
+ const result = await Posts.toggleLike({
+ post_id: req.params.post_id,
+ user_id: req.auth.session.user_id,
+ to: req.body?.to,
+ })
- return result
- }
-}
\ No newline at end of file
+ return result
+ },
+}
diff --git a/packages/server/services/posts/routes/posts/[post_id]/toggle_save/post.js b/packages/server/services/posts/routes/posts/[post_id]/toggle_save/post.js
index 2bf106f7..84692f10 100644
--- a/packages/server/services/posts/routes/posts/[post_id]/toggle_save/post.js
+++ b/packages/server/services/posts/routes/posts/[post_id]/toggle_save/post.js
@@ -1,14 +1,14 @@
import Posts from "@classes/posts"
export default {
- middlewares: ["withAuthentication"],
- fn: async (req, res) => {
- const result = await Posts.toggleSave({
- post_id: req.params.post_id,
- user_id: req.auth.session.user_id,
- to: req.body?.to,
- })
+ useMiddlewares: ["withAuthentication"],
+ fn: async (req, res) => {
+ const result = await Posts.toggleSave({
+ post_id: req.params.post_id,
+ user_id: req.auth.session.user_id,
+ to: req.body?.to,
+ })
- return result
- }
-}
\ No newline at end of file
+ return result
+ },
+}
diff --git a/packages/server/services/posts/routes/posts/[post_id]/update/put.js b/packages/server/services/posts/routes/posts/[post_id]/update/put.js
index 1cdd9439..c8449589 100644
--- a/packages/server/services/posts/routes/posts/[post_id]/update/put.js
+++ b/packages/server/services/posts/routes/posts/[post_id]/update/put.js
@@ -15,7 +15,7 @@ const MaxStringsLengths = {
}
export default {
- middlewares: ["withAuthentication"],
+ useMiddlewares: ["withAuthentication"],
fn: async (req) => {
let update = {}
diff --git a/packages/server/services/posts/routes/posts/[post_id]/vote_poll/[option_id]/delete.js b/packages/server/services/posts/routes/posts/[post_id]/vote_poll/[option_id]/delete.js
index 0f70d3a6..73d02493 100644
--- a/packages/server/services/posts/routes/posts/[post_id]/vote_poll/[option_id]/delete.js
+++ b/packages/server/services/posts/routes/posts/[post_id]/vote_poll/[option_id]/delete.js
@@ -1,14 +1,14 @@
import Posts from "@classes/posts"
export default {
- middlewares: ["withAuthentication"],
- fn: async (req) => {
- const result = await Posts.deleteVotePoll({
- user_id: req.auth.session.user_id,
- post_id: req.params.post_id,
- option_id: req.params.option_id,
- })
+ useMiddlewares: ["withAuthentication"],
+ fn: async (req) => {
+ const result = await Posts.deleteVotePoll({
+ user_id: req.auth.session.user_id,
+ post_id: req.params.post_id,
+ option_id: req.params.option_id,
+ })
- return result
- }
-}
\ No newline at end of file
+ return result
+ },
+}
diff --git a/packages/server/services/posts/routes/posts/[post_id]/vote_poll/[option_id]/post.js b/packages/server/services/posts/routes/posts/[post_id]/vote_poll/[option_id]/post.js
index f4fe61ac..c53495dc 100644
--- a/packages/server/services/posts/routes/posts/[post_id]/vote_poll/[option_id]/post.js
+++ b/packages/server/services/posts/routes/posts/[post_id]/vote_poll/[option_id]/post.js
@@ -1,14 +1,14 @@
import Posts from "@classes/posts"
export default {
- middlewares: ["withAuthentication"],
- fn: async (req) => {
- const result = await Posts.votePoll({
- user_id: req.auth.session.user_id,
- post_id: req.params.post_id,
- option_id: req.params.option_id,
- })
+ useMiddlewares: ["withAuthentication"],
+ fn: async (req) => {
+ const result = await Posts.votePoll({
+ user_id: req.auth.session.user_id,
+ post_id: req.params.post_id,
+ option_id: req.params.option_id,
+ })
- return result
- }
-}
\ No newline at end of file
+ return result
+ },
+}
diff --git a/packages/server/services/posts/routes/posts/feed/global/get.js b/packages/server/services/posts/routes/posts/feed/global/get.js
index 102eec4a..6a1afaa3 100644
--- a/packages/server/services/posts/routes/posts/feed/global/get.js
+++ b/packages/server/services/posts/routes/posts/feed/global/get.js
@@ -1,19 +1,19 @@
import Posts from "@classes/posts"
export default {
- middlewares: ["withOptionalAuthentication"],
- fn: async (req, res) => {
- const payload = {
- limit: req.query?.limit,
- trim: req.query?.trim,
- }
+ useMiddlewares: ["withOptionalAuthentication"],
+ fn: async (req, res) => {
+ const payload = {
+ limit: req.query?.limit,
+ page: req.query?.page,
+ }
- if (req.auth) {
- payload.user_id = req.auth.session.user_id
- }
+ if (req.auth) {
+ payload.for_user_id = req.auth.session.user_id
+ }
- const result = await Posts.globalTimeline(payload)
+ const result = await Posts.globalTimeline(payload)
- return result
- }
-}
\ No newline at end of file
+ return result
+ },
+}
diff --git a/packages/server/services/posts/routes/posts/feed/timeline/get.js b/packages/server/services/posts/routes/posts/feed/timeline/get.js
index a5bfaa19..a6ea2cf0 100644
--- a/packages/server/services/posts/routes/posts/feed/timeline/get.js
+++ b/packages/server/services/posts/routes/posts/feed/timeline/get.js
@@ -1,19 +1,19 @@
import Posts from "@classes/posts"
export default {
- middlewares: ["withOptionalAuthentication"],
- fn: async (req, res) => {
- const payload = {
- limit: req.query?.limit,
- trim: req.query?.trim,
- }
+ useMiddlewares: ["withOptionalAuthentication"],
+ fn: async (req, res) => {
+ const payload = {
+ limit: req.query?.limit,
+ page: req.query?.page,
+ }
- if (req.auth) {
- payload.user_id = req.auth.session.user_id
- }
+ if (req.auth) {
+ payload.for_user_id = req.auth.session.user_id
+ }
- const result = await Posts.timeline(payload)
+ const result = await Posts.timeline(payload)
- return result
- }
-}
\ No newline at end of file
+ return result
+ },
+}
diff --git a/packages/server/services/posts/routes/posts/liked/get.js b/packages/server/services/posts/routes/posts/liked/get.js
index e70c49c3..e4904ee7 100644
--- a/packages/server/services/posts/routes/posts/liked/get.js
+++ b/packages/server/services/posts/routes/posts/liked/get.js
@@ -1,12 +1,12 @@
import Posts from "@classes/posts"
export default {
- middlewares: ["withAuthentication"],
- fn: async (req) => {
- return await Posts.getLiked({
- trim: req.query.trim,
- limit: req.query.limit,
- user_id: req.auth.session.user_id
- })
- }
-}
\ No newline at end of file
+ useMiddlewares: ["withAuthentication"],
+ fn: async (req) => {
+ return await Posts.getLiked({
+ page: req.query.page,
+ limit: req.query.limit,
+ user_id: req.auth.session.user_id,
+ })
+ },
+}
diff --git a/packages/server/services/posts/routes/posts/new/post.js b/packages/server/services/posts/routes/posts/new/post.js
index 24739265..bb6357c2 100644
--- a/packages/server/services/posts/routes/posts/new/post.js
+++ b/packages/server/services/posts/routes/posts/new/post.js
@@ -1,7 +1,7 @@
import Posts from "@classes/posts"
export default {
- middlewares: ["withAuthentication"],
+ useMiddlewares: ["withAuthentication"],
fn: async (req, res) => {
const result = await Posts.create(
{
diff --git a/packages/server/services/posts/routes/posts/saved/get.js b/packages/server/services/posts/routes/posts/saved/get.js
index 02391afe..2aa85390 100644
--- a/packages/server/services/posts/routes/posts/saved/get.js
+++ b/packages/server/services/posts/routes/posts/saved/get.js
@@ -1,12 +1,12 @@
import Posts from "@classes/posts"
export default {
- middlewares: ["withAuthentication"],
- fn: async (req) => {
- return await Posts.getSaved({
- trim: req.query.trim,
- limit: req.query.limit,
- user_id: req.auth.session.user_id
- })
- }
-}
\ No newline at end of file
+ useMiddlewares: ["withAuthentication"],
+ fn: async (req) => {
+ return await Posts.getSaved({
+ page: req.query.page,
+ limit: req.query.limit,
+ user_id: req.auth.session.user_id,
+ })
+ },
+}
diff --git a/packages/server/services/posts/routes/posts/trending/[trending]/get.js b/packages/server/services/posts/routes/posts/trending/[trending]/get.js
index 753b9302..a6836da6 100644
--- a/packages/server/services/posts/routes/posts/trending/[trending]/get.js
+++ b/packages/server/services/posts/routes/posts/trending/[trending]/get.js
@@ -2,7 +2,7 @@ import { Post } from "@db_models"
import stage from "@classes/posts/methods/stage"
export default {
- middlewares: ["withOptionalAuthentication"],
+ useMiddlewares: ["withOptionalAuthentication"],
fn: async (req) => {
const { limit, trim } = req.query
diff --git a/packages/server/services/posts/routes/posts/trendings/get.js b/packages/server/services/posts/routes/posts/trendings/get.js
index 1301f38b..b8b76ee7 100644
--- a/packages/server/services/posts/routes/posts/trendings/get.js
+++ b/packages/server/services/posts/routes/posts/trendings/get.js
@@ -5,41 +5,41 @@ const maxDaysOld = 30
const maxHashtags = 5
export default async (req) => {
- // fetch all posts that contain in message an #, with a maximun of 5 diferent hashtags
- const startDate = DateTime.local().minus({ days: maxDaysOld }).toISO()
+ // fetch all posts that contain in message an #, with a maximun of 5 diferent hashtags
+ const startDate = DateTime.local().minus({ days: maxDaysOld }).toISO()
- const trendings = await Post.aggregate([
- {
- $match: {
- message: { $regex: /#/gi },
- created_at: { $gte: startDate }
- }
- },
- {
- $project: {
- hashtags: {
- $regexFindAll: {
- input: "$message",
- regex: /#[a-zA-Z0-9_]+/g
- }
- }
- }
- },
- { $unwind: "$hashtags" },
- {
- $project: {
- hashtag: { $substr: ["$hashtags.match", 1, -1] }
- }
- },
- {
- $group: {
- _id: "$hashtag",
- count: { $sum: 1 }
- }
- },
- { $sort: { count: -1 } },
- { $limit: maxHashtags }
- ])
+ const trendings = await Post.aggregate([
+ {
+ $match: {
+ message: { $regex: /#/gi },
+ created_at: { $gte: startDate },
+ },
+ },
+ {
+ $project: {
+ hashtags: {
+ $regexFindAll: {
+ input: "$message",
+ regex: /#[a-zA-Z0-9_]+/g,
+ },
+ },
+ },
+ },
+ { $unwind: "$hashtags" },
+ {
+ $project: {
+ hashtag: { $substr: ["$hashtags.match", 1, -1] },
+ },
+ },
+ {
+ $group: {
+ _id: "$hashtag",
+ count: { $sum: 1 },
+ },
+ },
+ { $sort: { count: -1 } },
+ { $limit: maxHashtags },
+ ])
- return trendings.map(({ _id, count }) => ({ hashtag: _id, count }));
-}
\ No newline at end of file
+ return trendings.map(({ _id, count }) => ({ hashtag: _id, count }))
+}
diff --git a/packages/server/services/posts/routes/posts/user/[user_id]/get.js b/packages/server/services/posts/routes/posts/user/[user_id]/get.js
index 61c102ec..ed282b46 100644
--- a/packages/server/services/posts/routes/posts/user/[user_id]/get.js
+++ b/packages/server/services/posts/routes/posts/user/[user_id]/get.js
@@ -1,13 +1,13 @@
import Posts from "@classes/posts"
export default {
- middlewares: ["withOptionalAuthentication"],
- fn: async (req, res) => {
- return await Posts.fromUserId({
- skip: req.query.skip,
- trim: req.query.trim,
- user_id: req.params.user_id,
- for_user_id: req.auth?.session?.user_id,
- })
- }
-}
\ No newline at end of file
+ useMiddlewares: ["withOptionalAuthentication"],
+ fn: async (req, res) => {
+ return await Posts.fromUserId({
+ limit: req.query.limit,
+ page: req.query.page,
+ user_id: req.params.user_id,
+ for_user_id: req.auth?.session?.user_id,
+ })
+ },
+}
diff --git a/packages/server/services/search/package.json b/packages/server/services/search/package.json
index 4373c84f..68b82853 100644
--- a/packages/server/services/search/package.json
+++ b/packages/server/services/search/package.json
@@ -1,6 +1,3 @@
{
- "name": "search",
- "version": "1.0.0",
- "main": "index.js",
- "license": "MIT"
+ "name": "search"
}
diff --git a/packages/server/services/search/search.service.js b/packages/server/services/search/search.service.js
index de54af63..109161b8 100644
--- a/packages/server/services/search/search.service.js
+++ b/packages/server/services/search/search.service.js
@@ -6,24 +6,23 @@ import RedisClient from "@shared-classes/RedisClient"
import SharedMiddlewares from "@shared-middlewares"
class API extends Server {
- static refName = "search"
- static wsRoutesPath = `${__dirname}/ws_routes`
- static routesPath = `${__dirname}/routes`
- static listen_port = process.env.HTTP_LISTEN_PORT ?? 3010
+ static refName = "search"
+ static routesPath = `${__dirname}/routes`
+ static listen_port = process.env.HTTP_LISTEN_PORT ?? 3010
- middlewares = {
- ...SharedMiddlewares
- }
+ middlewares = {
+ ...SharedMiddlewares,
+ }
- contexts = {
- db: new DbManager(),
- redis: RedisClient(),
- }
+ contexts = {
+ db: new DbManager(),
+ redis: RedisClient(),
+ }
- async onInitialize() {
- await this.contexts.db.initialize()
- await this.contexts.redis.initialize()
- }
+ async onInitialize() {
+ await this.contexts.db.initialize()
+ await this.contexts.redis.initialize()
+ }
}
-Boot(API)
\ No newline at end of file
+Boot(API)
diff --git a/packages/server/services/users/users.service.js b/packages/server/services/users/users.service.js
index 38acde4f..3c58eee8 100644
--- a/packages/server/services/users/users.service.js
+++ b/packages/server/services/users/users.service.js
@@ -6,26 +6,24 @@ import RedisClient from "@shared-classes/RedisClient"
import SharedMiddlewares from "@shared-middlewares"
export default class API extends Server {
- static refName = "users"
- static useEngine = "hyper-express"
- static routesPath = `${__dirname}/routes`
- static listen_port = process.env.HTTP_LISTEN_PORT ?? 3008
+ static refName = "users"
+ static useEngine = "hyper-express"
+ static routesPath = `${__dirname}/routes`
+ static listen_port = process.env.HTTP_LISTEN_PORT ?? 3008
- middlewares = {
- ...SharedMiddlewares
- }
+ middlewares = {
+ ...SharedMiddlewares,
+ }
- handleWsAuth = require("@shared-lib/handleWsAuth").default
+ contexts = {
+ db: new DbManager(),
+ redis: RedisClient(),
+ }
- contexts = {
- db: new DbManager(),
- redis: RedisClient()
- }
-
- async onInitialize() {
- await this.contexts.db.initialize()
- await this.contexts.redis.initialize()
- }
+ async onInitialize() {
+ await this.contexts.db.initialize()
+ await this.contexts.redis.initialize()
+ }
}
-Boot(API)
\ No newline at end of file
+Boot(API)
diff --git a/packages/server/utils/bufferToStream.js b/packages/server/utils/bufferToStream.js
new file mode 100644
index 00000000..e735c77a
--- /dev/null
+++ b/packages/server/utils/bufferToStream.js
@@ -0,0 +1,10 @@
+import { Duplex } from "node:stream"
+
+export default (bf) => {
+ let tmp = new Duplex()
+
+ tmp.push(bf)
+ tmp.push(null)
+
+ return tmp
+}
diff --git a/packages/server/utils/readChunk.d.ts b/packages/server/utils/readChunk.d.ts
new file mode 100644
index 00000000..3b4efa36
--- /dev/null
+++ b/packages/server/utils/readChunk.d.ts
@@ -0,0 +1,7 @@
+export default function readChunk(
+ filePath: string,
+ options: {
+ length: number
+ startPosition: number
+ },
+): Promise
diff --git a/packages/server/utils/readChunk.js b/packages/server/utils/readChunk.js
new file mode 100644
index 00000000..3fe3a3c0
--- /dev/null
+++ b/packages/server/utils/readChunk.js
@@ -0,0 +1,22 @@
+// Original fork from https://github.com/sindresorhus/read-chunk
+import { open } from "node:fs/promises"
+
+export default async (filePath, { length, startPosition }) => {
+ const fileDescriptor = await open(filePath, "r")
+
+ try {
+ let { bytesRead, buffer } = await fileDescriptor.read({
+ buffer: new Uint8Array(length),
+ length,
+ position: startPosition,
+ })
+
+ if (bytesRead < length) {
+ buffer = buffer.subarray(0, bytesRead)
+ }
+
+ return buffer
+ } finally {
+ await fileDescriptor?.close()
+ }
+}