mirror of
https://github.com/ragestudio/comty.git
synced 2025-06-09 02:24:16 +00:00
Use comty.js file uploads
This commit is contained in:
parent
270ac23a46
commit
464bb901e4
@ -1,237 +0,0 @@
|
||||
import { EventBus } from "@ragestudio/vessel"
|
||||
|
||||
export default class ChunkedUpload {
|
||||
constructor(params) {
|
||||
const {
|
||||
endpoint,
|
||||
file,
|
||||
headers = {},
|
||||
splitChunkSize = 1024 * 1024 * 10,
|
||||
maxRetries = 3,
|
||||
delayBeforeRetry = 5,
|
||||
} = params
|
||||
|
||||
if (!endpoint) {
|
||||
throw new Error("Missing endpoint")
|
||||
}
|
||||
|
||||
if ((!file) instanceof File) {
|
||||
throw new Error("Invalid or missing file")
|
||||
}
|
||||
|
||||
if (typeof headers !== "object") {
|
||||
throw new Error("Invalid headers")
|
||||
}
|
||||
|
||||
if (splitChunkSize <= 0) {
|
||||
throw new Error("Invalid splitChunkSize")
|
||||
}
|
||||
|
||||
this.chunkCount = 0
|
||||
this.retriesCount = 0
|
||||
|
||||
this.splitChunkSize = splitChunkSize
|
||||
this.totalChunks = Math.ceil(file.size / splitChunkSize)
|
||||
|
||||
this.maxRetries = maxRetries
|
||||
this.delayBeforeRetry = delayBeforeRetry
|
||||
this.offline = this.paused = false
|
||||
|
||||
this.endpoint = endpoint
|
||||
this.file = file
|
||||
this.headers = {
|
||||
...headers,
|
||||
"uploader-original-name": encodeURIComponent(file.name),
|
||||
"uploader-file-id": this.getFileUID(file),
|
||||
"uploader-chunks-total": this.totalChunks,
|
||||
"chunk-size": splitChunkSize,
|
||||
"cache-control": "no-cache",
|
||||
connection: "keep-alive",
|
||||
}
|
||||
|
||||
this.setupListeners()
|
||||
this.nextSend()
|
||||
|
||||
console.debug("[Uploader] Created", {
|
||||
splitChunkSize: splitChunkSize,
|
||||
totalChunks: this.totalChunks,
|
||||
totalSize: file.size,
|
||||
fileName: file.name,
|
||||
fileType: file.type,
|
||||
})
|
||||
}
|
||||
|
||||
_reader = new FileReader()
|
||||
events = new EventBus()
|
||||
|
||||
setupListeners() {
|
||||
window.addEventListener(
|
||||
"online",
|
||||
() =>
|
||||
!this.offline &&
|
||||
((this.offline = false),
|
||||
this.events.emit("online"),
|
||||
this.nextSend()),
|
||||
)
|
||||
window.addEventListener(
|
||||
"offline",
|
||||
() => ((this.offline = true), this.events.emit("offline")),
|
||||
)
|
||||
}
|
||||
|
||||
getFileUID(file) {
|
||||
return (
|
||||
Math.floor(Math.random() * 100000000) +
|
||||
Date.now() +
|
||||
file.size +
|
||||
"_tmp"
|
||||
)
|
||||
}
|
||||
|
||||
loadChunk() {
|
||||
return new Promise((resolve) => {
|
||||
const start = this.chunkCount * this.splitChunkSize
|
||||
const end = Math.min(start + this.splitChunkSize, this.file.size)
|
||||
|
||||
this._reader.onload = () => {
|
||||
resolve(
|
||||
new Blob([this._reader.result], {
|
||||
type: "application/octet-stream",
|
||||
}),
|
||||
)
|
||||
}
|
||||
this._reader.readAsArrayBuffer(this.file.slice(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
async sendChunk() {
|
||||
console.log(`[UPLOADER] Sending chunk ${this.chunkCount}`, {
|
||||
currentChunk: this.chunkCount,
|
||||
totalChunks: this.totalChunks,
|
||||
chunk: this.chunk,
|
||||
})
|
||||
|
||||
try {
|
||||
const res = await fetch(this.endpoint, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
...this.headers,
|
||||
"uploader-chunk-number": this.chunkCount,
|
||||
},
|
||||
body: this.chunk,
|
||||
})
|
||||
|
||||
return res
|
||||
} catch (error) {
|
||||
this.manageRetries()
|
||||
}
|
||||
}
|
||||
|
||||
manageRetries() {
|
||||
if (++this.retriesCount < this.maxRetries) {
|
||||
setTimeout(() => this.nextSend(), this.delayBeforeRetry * 1000)
|
||||
|
||||
this.events.emit("fileRetry", {
|
||||
message: `Retrying chunk ${this.chunkCount}`,
|
||||
chunk: this.chunkCount,
|
||||
retriesLeft: this.retries - this.retriesCount,
|
||||
})
|
||||
} else {
|
||||
this.events.emit("error", {
|
||||
message: `No more retries for chunk ${this.chunkCount}`,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async nextSend() {
|
||||
if (this.paused || this.offline) {
|
||||
return null
|
||||
}
|
||||
|
||||
this.chunk = await this.loadChunk()
|
||||
|
||||
try {
|
||||
const res = await this.sendChunk()
|
||||
|
||||
if (![200, 201, 204].includes(res.status)) {
|
||||
// failed!!
|
||||
return this.manageRetries()
|
||||
}
|
||||
|
||||
const data = await res.json()
|
||||
|
||||
console.log(`[UPLOADER] Chunk ${this.chunkCount} sent`)
|
||||
|
||||
this.chunkCount = this.chunkCount + 1
|
||||
|
||||
if (this.chunkCount < this.totalChunks) {
|
||||
this.nextSend()
|
||||
}
|
||||
|
||||
// check if is the last chunk, if so, handle sse events
|
||||
if (this.chunkCount === this.totalChunks) {
|
||||
if (data.sseChannelId || data.sseUrl) {
|
||||
this.waitOnSSE(data)
|
||||
} else {
|
||||
this.events.emit("finish", data)
|
||||
}
|
||||
}
|
||||
|
||||
this.events.emit("progress", {
|
||||
percent: Math.round((100 / this.totalChunks) * this.chunkCount),
|
||||
state: "Uploading",
|
||||
})
|
||||
} catch (error) {
|
||||
this.events.emit("error", error)
|
||||
}
|
||||
}
|
||||
|
||||
togglePause() {
|
||||
this.paused = !this.paused
|
||||
|
||||
if (!this.paused) {
|
||||
return this.nextSend()
|
||||
}
|
||||
}
|
||||
|
||||
waitOnSSE(data) {
|
||||
// temporal solution until comty.js manages this
|
||||
const url = `${app.cores.api.client().mainOrigin}/upload/sse_events/${data.sseChannelId}`
|
||||
|
||||
console.log(`[UPLOADER] Connecting to SSE channel >`, url)
|
||||
const eventSource = new EventSource(url)
|
||||
|
||||
eventSource.onerror = (error) => {
|
||||
this.events.emit("error", error)
|
||||
eventSource.close()
|
||||
}
|
||||
|
||||
eventSource.onopen = () => {
|
||||
console.log(`[UPLOADER] SSE channel opened`)
|
||||
}
|
||||
|
||||
eventSource.onmessage = (event) => {
|
||||
// parse json
|
||||
const messageData = JSON.parse(event.data)
|
||||
|
||||
console.log(`[UPLOADER] SSE Event >`, messageData)
|
||||
|
||||
if (messageData.event === "done") {
|
||||
this.events.emit("finish", messageData.result)
|
||||
eventSource.close()
|
||||
}
|
||||
|
||||
if (messageData.event === "error") {
|
||||
this.events.emit("error", messageData.result)
|
||||
eventSource.close()
|
||||
}
|
||||
|
||||
if (messageData.state) {
|
||||
this.events.emit("progress", {
|
||||
percent: messageData.percent,
|
||||
state: messageData.state,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,9 +1,10 @@
|
||||
import React from "react"
|
||||
import * as antd from "antd"
|
||||
import classnames from "classnames"
|
||||
import { DragDropContext, Droppable } from "react-beautiful-dnd"
|
||||
import { createSwapy } from "swapy"
|
||||
|
||||
import queuedUploadFile from "@utils/queuedUploadFile"
|
||||
import FilesModel from "@models/files"
|
||||
|
||||
import TrackManifest from "@cores/player/classes/TrackManifest"
|
||||
|
||||
import { Icons } from "@components/Icons"
|
||||
@ -209,14 +210,14 @@ class TracksManager extends React.Component {
|
||||
console.log(
|
||||
`[${trackManifest.uid}] Founded cover, uploading...`,
|
||||
)
|
||||
|
||||
const coverFile = new File(
|
||||
[trackManifest._coverBlob],
|
||||
"cover.jpg",
|
||||
{ type: trackManifest._coverBlob.type },
|
||||
)
|
||||
|
||||
const coverUpload =
|
||||
await app.cores.remoteStorage.uploadFile(coverFile)
|
||||
const coverUpload = await FilesModel.upload(coverFile)
|
||||
|
||||
trackManifest.cover = coverUpload.url
|
||||
}
|
||||
@ -243,25 +244,16 @@ class TracksManager extends React.Component {
|
||||
}
|
||||
|
||||
uploadToStorage = async (req) => {
|
||||
const response = await app.cores.remoteStorage
|
||||
.uploadFile(req.file, {
|
||||
onProgress: this.handleTrackFileUploadProgress,
|
||||
headers: {
|
||||
transformations: "a-dash",
|
||||
},
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error(error)
|
||||
antd.message.error(error)
|
||||
|
||||
req.onError(error)
|
||||
|
||||
return false
|
||||
})
|
||||
|
||||
if (response) {
|
||||
req.onSuccess(response)
|
||||
}
|
||||
await queuedUploadFile(req.file, {
|
||||
onFinish: (file, response) => {
|
||||
req.onSuccess(response)
|
||||
},
|
||||
onError: req.onError,
|
||||
onProgress: this.handleTrackFileUploadProgress,
|
||||
headers: {
|
||||
transformations: "a-dash",
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
handleTrackFileUploadProgress = async (file, progress) => {
|
||||
|
@ -10,6 +10,7 @@ import { Icons } from "@components/Icons"
|
||||
import Poll from "@components/Poll"
|
||||
|
||||
import clipboardEventFileToFile from "@utils/clipboardEventFileToFile"
|
||||
import queuedUploadFile from "@utils/queuedUploadFile"
|
||||
|
||||
import PostModel from "@models/post"
|
||||
import SearchModel from "@models/search"
|
||||
@ -195,22 +196,14 @@ export default class PostCreator extends React.Component {
|
||||
uploadFile = async (req) => {
|
||||
this.toggleUploaderVisibility(false)
|
||||
|
||||
const request = await app.cores.remoteStorage
|
||||
.uploadFile(req.file)
|
||||
.catch((error) => {
|
||||
console.error(error)
|
||||
antd.message.error(error)
|
||||
|
||||
req.onError(error)
|
||||
|
||||
return false
|
||||
})
|
||||
|
||||
if (request) {
|
||||
console.log(`Upload done >`, request)
|
||||
|
||||
return req.onSuccess(request)
|
||||
}
|
||||
await queuedUploadFile(req.file, {
|
||||
onFinish: (file, response) => {
|
||||
req.onSuccess(response)
|
||||
},
|
||||
onError: (file, response) => {
|
||||
req.onError(response)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
removeAttachment = (file_uid) => {
|
||||
|
@ -1,12 +1,13 @@
|
||||
import React from "react"
|
||||
import { Upload, Progress } from "antd"
|
||||
import classnames from "classnames"
|
||||
import queuedUploadFile from "@utils/queuedUploadFile"
|
||||
|
||||
import { Icons } from "@components/Icons"
|
||||
|
||||
import "./index.less"
|
||||
|
||||
export default (props) => {
|
||||
const UploadButton = (props) => {
|
||||
const [uploading, setUploading] = React.useState(false)
|
||||
const [progress, setProgress] = React.useState(null)
|
||||
|
||||
@ -40,17 +41,7 @@ export default (props) => {
|
||||
|
||||
handleOnStart(req.file.uid, req.file)
|
||||
|
||||
await app.cores.remoteStorage.uploadFile(req.file, {
|
||||
headers: props.headers,
|
||||
onProgress: (file, progress) => {
|
||||
setProgress(progress)
|
||||
handleOnProgress(file.uid, progress)
|
||||
},
|
||||
onError: (file, error) => {
|
||||
setProgress(null)
|
||||
handleOnError(file.uid, error)
|
||||
setUploading(false)
|
||||
},
|
||||
await queuedUploadFile(req.file, {
|
||||
onFinish: (file, response) => {
|
||||
if (typeof props.ctx?.onUpdateItem === "function") {
|
||||
props.ctx.onUpdateItem(response.url)
|
||||
@ -67,6 +58,16 @@ export default (props) => {
|
||||
setProgress(null)
|
||||
}, 1000)
|
||||
},
|
||||
onError: (file, error) => {
|
||||
setProgress(null)
|
||||
handleOnError(file.uid, error)
|
||||
setUploading(false)
|
||||
},
|
||||
onProgress: (file, progress) => {
|
||||
setProgress(progress)
|
||||
handleOnProgress(file.uid, progress)
|
||||
},
|
||||
headers: props.headers,
|
||||
})
|
||||
}
|
||||
|
||||
@ -106,3 +107,5 @@ export default (props) => {
|
||||
</Upload>
|
||||
)
|
||||
}
|
||||
|
||||
export default UploadButton
|
||||
|
@ -1,117 +0,0 @@
|
||||
import { Core } from "@ragestudio/vessel"
|
||||
|
||||
import ChunkedUpload from "@classes/ChunkedUpload"
|
||||
import SessionModel from "@models/session"
|
||||
|
||||
export default class RemoteStorage extends Core {
|
||||
static namespace = "remoteStorage"
|
||||
static depends = ["api", "tasksQueue"]
|
||||
|
||||
public = {
|
||||
uploadFile: this.uploadFile,
|
||||
getFileHash: this.getFileHash,
|
||||
binaryArrayToFile: this.binaryArrayToFile,
|
||||
}
|
||||
|
||||
binaryArrayToFile(bin, filename) {
|
||||
const { format, data } = bin
|
||||
|
||||
const filenameExt = format.split("/")[1]
|
||||
filename = `${filename}.${filenameExt}`
|
||||
|
||||
const byteArray = new Uint8Array(data)
|
||||
const blob = new Blob([byteArray], { type: data.type })
|
||||
|
||||
return new File([blob], filename, {
|
||||
type: format,
|
||||
})
|
||||
}
|
||||
|
||||
async getFileHash(file) {
|
||||
const buffer = await file.arrayBuffer()
|
||||
const hash = await crypto.subtle.digest("SHA-256", buffer)
|
||||
const hashArray = Array.from(new Uint8Array(hash))
|
||||
const hashHex = hashArray
|
||||
.map((b) => b.toString(16).padStart(2, "0"))
|
||||
.join("")
|
||||
|
||||
return hashHex
|
||||
}
|
||||
|
||||
async uploadFile(
|
||||
file,
|
||||
{
|
||||
onProgress = () => {},
|
||||
onFinish = () => {},
|
||||
onError = () => {},
|
||||
service = "standard",
|
||||
headers = {},
|
||||
} = {},
|
||||
) {
|
||||
return await new Promise((_resolve, _reject) => {
|
||||
const fn = async () =>
|
||||
new Promise((resolve, reject) => {
|
||||
const uploader = new ChunkedUpload({
|
||||
endpoint: `${app.cores.api.client().mainOrigin}/upload/chunk`,
|
||||
splitChunkSize: 5 * 1024 * 1024,
|
||||
file: file,
|
||||
service: service,
|
||||
headers: {
|
||||
...headers,
|
||||
"provider-type": service,
|
||||
Authorization: `Bearer ${SessionModel.token}`,
|
||||
},
|
||||
})
|
||||
|
||||
uploader.events.on("error", ({ message }) => {
|
||||
this.console.error("[Uploader] Error", message)
|
||||
|
||||
app.cores.notifications.new(
|
||||
{
|
||||
title: "Could not upload file",
|
||||
description: message,
|
||||
},
|
||||
{
|
||||
type: "error",
|
||||
},
|
||||
)
|
||||
|
||||
if (typeof onError === "function") {
|
||||
onError(file, message)
|
||||
}
|
||||
|
||||
reject(message)
|
||||
_reject(message)
|
||||
})
|
||||
|
||||
uploader.events.on("progress", (data) => {
|
||||
if (typeof onProgress === "function") {
|
||||
onProgress(file, data)
|
||||
}
|
||||
})
|
||||
|
||||
uploader.events.on("finish", (data) => {
|
||||
this.console.debug("[Uploader] Finish", data)
|
||||
|
||||
app.cores.notifications.new(
|
||||
{
|
||||
title: "File uploaded",
|
||||
},
|
||||
{
|
||||
type: "success",
|
||||
},
|
||||
)
|
||||
|
||||
if (typeof onFinish === "function") {
|
||||
onFinish(file, data)
|
||||
}
|
||||
|
||||
resolve(data)
|
||||
_resolve(data)
|
||||
})
|
||||
})
|
||||
|
||||
app.cores.tasksQueue.appendToQueue(`upload_${file.name}`, fn)
|
||||
})
|
||||
}
|
||||
}
|
46
packages/app/src/utils/queuedUploadFile/index.js
Normal file
46
packages/app/src/utils/queuedUploadFile/index.js
Normal file
@ -0,0 +1,46 @@
|
||||
import FilesModel from "@models/files"
|
||||
|
||||
export default (file, options) => {
|
||||
if (!app.cores.tasksQueue) {
|
||||
throw new Error("Missing tasksQueue")
|
||||
}
|
||||
|
||||
return app.cores.tasksQueue.appendToQueue(
|
||||
`upload_${file.name}`,
|
||||
async () => {
|
||||
await FilesModel.upload(file, {
|
||||
...options,
|
||||
onError: (file, error) => {
|
||||
app.cores.notifications.new(
|
||||
{
|
||||
title: "Could not upload file",
|
||||
description: error.message,
|
||||
},
|
||||
{
|
||||
type: "error",
|
||||
},
|
||||
)
|
||||
|
||||
if (typeof options.onError === "function") {
|
||||
options.onError(file, error)
|
||||
}
|
||||
},
|
||||
onFinish: (file, data) => {
|
||||
app.cores.notifications.new(
|
||||
{
|
||||
title: "File uploaded",
|
||||
description: `[${file.name}] uploaded successfully!`,
|
||||
},
|
||||
{
|
||||
type: "success",
|
||||
},
|
||||
)
|
||||
|
||||
if (typeof options.onFinish === "function") {
|
||||
options.onFinish(file, data)
|
||||
}
|
||||
},
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user