mirror of
https://github.com/ragestudio/comty.js.git
synced 2025-06-09 02:24:18 +00:00
support for file uploads
This commit is contained in:
parent
3a091da7fe
commit
073457a34c
229
src/classes/FileUploadBrowser.js
Normal file
229
src/classes/FileUploadBrowser.js
Normal file
@ -0,0 +1,229 @@
|
||||
import EventEmitter from "@foxify/events"
|
||||
|
||||
export default class FileUploadBrowser {
|
||||
constructor(params) {
|
||||
const {
|
||||
endpoint,
|
||||
file,
|
||||
headers = {},
|
||||
splitChunkSize = 1024 * 1024 * 10,
|
||||
maxRetries = 3,
|
||||
delayBeforeRetry = 5,
|
||||
} = params
|
||||
|
||||
if (!endpoint) {
|
||||
throw new Error("Missing endpoint")
|
||||
}
|
||||
|
||||
if ((!file) instanceof File) {
|
||||
throw new Error("Invalid or missing file")
|
||||
}
|
||||
|
||||
if (typeof headers !== "object") {
|
||||
throw new Error("Invalid headers")
|
||||
}
|
||||
|
||||
if (splitChunkSize <= 0) {
|
||||
throw new Error("Invalid splitChunkSize")
|
||||
}
|
||||
|
||||
this.chunkCount = 0
|
||||
this.retriesCount = 0
|
||||
|
||||
this.splitChunkSize = splitChunkSize
|
||||
this.totalChunks = Math.ceil(file.size / splitChunkSize)
|
||||
|
||||
this.maxRetries = maxRetries
|
||||
this.delayBeforeRetry = delayBeforeRetry
|
||||
this.offline = this.paused = false
|
||||
|
||||
this.endpoint = endpoint
|
||||
this.file = file
|
||||
this.headers = {
|
||||
...headers,
|
||||
"uploader-original-name": encodeURIComponent(file.name),
|
||||
"uploader-file-id": this.getFileUID(file),
|
||||
"uploader-chunks-total": this.totalChunks,
|
||||
"chunk-size": splitChunkSize,
|
||||
"cache-control": "no-cache",
|
||||
connection: "keep-alive",
|
||||
}
|
||||
|
||||
window.addEventListener(
|
||||
"online",
|
||||
() =>
|
||||
!this.offline &&
|
||||
((this.offline = false),
|
||||
this.events.emit("online"),
|
||||
this.nextSend()),
|
||||
)
|
||||
window.addEventListener(
|
||||
"offline",
|
||||
() => ((this.offline = true), this.events.emit("offline")),
|
||||
)
|
||||
}
|
||||
|
||||
_reader = new FileReader()
|
||||
events = new EventEmitter()
|
||||
|
||||
start = () => {
|
||||
this.nextSend()
|
||||
}
|
||||
|
||||
getFileUID(file) {
|
||||
return (
|
||||
Math.floor(Math.random() * 100000000) +
|
||||
Date.now() +
|
||||
file.size +
|
||||
"_tmp"
|
||||
)
|
||||
}
|
||||
|
||||
loadChunk() {
|
||||
return new Promise((resolve) => {
|
||||
const start = this.chunkCount * this.splitChunkSize
|
||||
const end = Math.min(start + this.splitChunkSize, this.file.size)
|
||||
|
||||
// load chunk as buffer
|
||||
this._reader.onload = () => {
|
||||
resolve(
|
||||
new Blob([this._reader.result], {
|
||||
type: "application/octet-stream",
|
||||
}),
|
||||
)
|
||||
}
|
||||
this._reader.readAsArrayBuffer(this.file.slice(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
async sendChunk() {
|
||||
console.log(`[UPLOADER] Sending chunk ${this.chunkCount}`, {
|
||||
currentChunk: this.chunkCount,
|
||||
totalChunks: this.totalChunks,
|
||||
chunk: this.chunk,
|
||||
})
|
||||
|
||||
try {
|
||||
const res = await fetch(this.endpoint, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
...this.headers,
|
||||
"uploader-chunk-number": this.chunkCount,
|
||||
},
|
||||
body: this.chunk,
|
||||
})
|
||||
|
||||
return res
|
||||
} catch (error) {
|
||||
this.manageRetries()
|
||||
}
|
||||
}
|
||||
|
||||
manageRetries() {
|
||||
if (++this.retriesCount < this.maxRetries) {
|
||||
setTimeout(() => this.nextSend(), this.delayBeforeRetry * 1000)
|
||||
|
||||
this.events.emit("fileRetry", {
|
||||
message: `Retrying chunk ${this.chunkCount}`,
|
||||
chunk: this.chunkCount,
|
||||
retriesLeft: this.retries - this.retriesCount,
|
||||
})
|
||||
} else {
|
||||
this.events.emit("error", {
|
||||
message: `No more retries for chunk ${this.chunkCount}`,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async nextSend() {
|
||||
if (this.paused || this.offline) {
|
||||
return null
|
||||
}
|
||||
|
||||
this.chunk = await this.loadChunk()
|
||||
|
||||
try {
|
||||
const res = await this.sendChunk()
|
||||
|
||||
if (![200, 201, 204].includes(res.status)) {
|
||||
// failed!!
|
||||
return this.manageRetries()
|
||||
}
|
||||
|
||||
const data = await res.json()
|
||||
|
||||
console.log(`[UPLOADER] Chunk ${this.chunkCount} sent`)
|
||||
|
||||
this.chunkCount = this.chunkCount + 1
|
||||
|
||||
if (this.chunkCount < this.totalChunks) {
|
||||
this.nextSend()
|
||||
}
|
||||
|
||||
// check if is the last chunk, if so, handle sse events
|
||||
if (this.chunkCount === this.totalChunks) {
|
||||
if (data.sseChannelId || data.sseUrl) {
|
||||
this.waitOnSSE(data)
|
||||
} else {
|
||||
this.events.emit("finish", data)
|
||||
}
|
||||
}
|
||||
|
||||
this.events.emit("progress", {
|
||||
percent: Math.round((100 / this.totalChunks) * this.chunkCount),
|
||||
state: "Uploading",
|
||||
})
|
||||
} catch (error) {
|
||||
this.events.emit("error", error)
|
||||
}
|
||||
}
|
||||
|
||||
togglePause() {
|
||||
this.paused = !this.paused
|
||||
|
||||
if (!this.paused) {
|
||||
return this.nextSend()
|
||||
}
|
||||
}
|
||||
|
||||
waitOnSSE(data) {
|
||||
// temporal solution until a better solution
|
||||
const url = `${app.cores.api.client().mainOrigin}/upload/sse_events/${data.sseChannelId}`
|
||||
|
||||
console.log(`[UPLOADER] Connecting to SSE channel >`, url)
|
||||
const eventSource = new EventSource(url)
|
||||
|
||||
eventSource.onerror = (error) => {
|
||||
this.events.emit("error", error)
|
||||
eventSource.close()
|
||||
}
|
||||
|
||||
eventSource.onopen = () => {
|
||||
console.log(`[UPLOADER] SSE channel opened`)
|
||||
}
|
||||
|
||||
eventSource.onmessage = (event) => {
|
||||
// parse json
|
||||
const messageData = JSON.parse(event.data)
|
||||
|
||||
console.log(`[UPLOADER] SSE Event >`, messageData)
|
||||
|
||||
if (messageData.event === "done") {
|
||||
this.events.emit("finish", messageData.result)
|
||||
eventSource.close()
|
||||
}
|
||||
|
||||
if (messageData.event === "error") {
|
||||
this.events.emit("error", messageData.result)
|
||||
eventSource.close()
|
||||
}
|
||||
|
||||
if (messageData.state) {
|
||||
this.events.emit("progress", {
|
||||
percent: messageData.percent,
|
||||
state: messageData.state,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
58
src/models/files/index.js
Normal file
58
src/models/files/index.js
Normal file
@ -0,0 +1,58 @@
|
||||
import FileUploadBrowser from "../../classes/FileUploadBrowser"
|
||||
import SessionModel from "../session"
|
||||
|
||||
export default class Files {
|
||||
static get chunkUploadEndpoint() {
|
||||
return globalThis.__comty_shared_state.mainOrigin + "/upload/chunk"
|
||||
}
|
||||
|
||||
static upload = async (
|
||||
file,
|
||||
{ service, headers, onError, onProgress, onFinish } = {},
|
||||
) => {
|
||||
try {
|
||||
if (globalThis.isServerMode) {
|
||||
throw new Error(
|
||||
"File Upload is only supported in the browser. Yet...",
|
||||
)
|
||||
}
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
const uploadInstance = new FileUploadBrowser({
|
||||
endpoint: Files.chunkUploadEndpoint,
|
||||
splitChunkSize: 5 * 1024 * 1024,
|
||||
file: file,
|
||||
service: service ?? "standard",
|
||||
headers: {
|
||||
...(headers ?? {}),
|
||||
Authorization: `Bearer ${SessionModel.token}`,
|
||||
},
|
||||
})
|
||||
|
||||
uploadInstance.events.on("error", (data) => {
|
||||
reject(data)
|
||||
})
|
||||
|
||||
uploadInstance.events.on("finish", (data) => {
|
||||
if (typeof onFinish === "function") {
|
||||
onFinish(file, data)
|
||||
}
|
||||
|
||||
resolve(data)
|
||||
})
|
||||
|
||||
uploadInstance.events.on("progress", (data) => {
|
||||
if (typeof onProgress === "function") {
|
||||
onProgress(file, data)
|
||||
}
|
||||
})
|
||||
|
||||
uploadInstance.start()
|
||||
})
|
||||
} catch (error) {
|
||||
if (typeof onError === "function") {
|
||||
onError(file, error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user