mirror of
https://github.com/ragestudio/comty.git
synced 2025-06-09 10:34:17 +00:00
Replace Backblaze-B2 with StorageClient in marketplace service
This commit is contained in:
parent
0b014761ec
commit
8e466fedf4
@ -3,7 +3,7 @@ import { Extension } from "@db_models"
|
|||||||
export default async function resolve(payload) {
|
export default async function resolve(payload) {
|
||||||
let { user_id, pkg } = payload
|
let { user_id, pkg } = payload
|
||||||
|
|
||||||
const [pkgName, pkgVersion] = pkg.split("@")
|
let [pkgName, pkgVersion] = pkg.split("@")
|
||||||
|
|
||||||
if (!pkgVersion) {
|
if (!pkgVersion) {
|
||||||
pkgVersion = "latest"
|
pkgVersion = "latest"
|
||||||
@ -13,7 +13,10 @@ export default async function resolve(payload) {
|
|||||||
return await Extension.findOne({
|
return await Extension.findOne({
|
||||||
user_id,
|
user_id,
|
||||||
name: pkgName,
|
name: pkgName,
|
||||||
}).sort({ version: -1 }).limit(1).exec()
|
})
|
||||||
|
.sort({ version: -1 })
|
||||||
|
.limit(1)
|
||||||
|
.exec()
|
||||||
}
|
}
|
||||||
|
|
||||||
return await Extension.findOne({
|
return await Extension.findOne({
|
||||||
|
@ -1,41 +1,47 @@
|
|||||||
import { Server } from "linebridge"
|
import { Server } from "linebridge"
|
||||||
import B2 from "backblaze-b2"
|
|
||||||
|
|
||||||
import DbManager from "@shared-classes/DbManager"
|
import DbManager from "@shared-classes/DbManager"
|
||||||
import CacheService from "@shared-classes/CacheService"
|
import CacheService from "@shared-classes/CacheService"
|
||||||
|
import StorageClient from "@shared-classes/StorageClient"
|
||||||
|
|
||||||
import SharedMiddlewares from "@shared-middlewares"
|
import SharedMiddlewares from "@shared-middlewares"
|
||||||
|
|
||||||
class API extends Server {
|
class API extends Server {
|
||||||
static refName = "marketplace"
|
static refName = "marketplace"
|
||||||
static wsRoutesPath = `${__dirname}/ws_routes`
|
static useEngine = "hyper-express-ng"
|
||||||
static routesPath = `${__dirname}/routes`
|
static routesPath = `${__dirname}/routes`
|
||||||
static listen_port = process.env.HTTP_LISTEN_PORT ?? 3005
|
static listen_port = process.env.HTTP_LISTEN_PORT ?? 3005
|
||||||
|
|
||||||
middlewares = {
|
middlewares = {
|
||||||
...SharedMiddlewares
|
...SharedMiddlewares,
|
||||||
}
|
}
|
||||||
|
|
||||||
contexts = {
|
contexts = {
|
||||||
db: new DbManager(),
|
db: new DbManager(),
|
||||||
b2: new B2({
|
|
||||||
applicationKeyId: process.env.B2_KEY_ID,
|
|
||||||
applicationKey: process.env.B2_APP_KEY,
|
|
||||||
}),
|
|
||||||
cache: new CacheService({
|
cache: new CacheService({
|
||||||
fsram: false
|
fsram: false,
|
||||||
|
}),
|
||||||
|
storage: StorageClient({
|
||||||
|
endPoint: process.env.B2_ENDPOINT,
|
||||||
|
cdnUrl: process.env.B2_CDN_ENDPOINT,
|
||||||
|
defaultBucket: process.env.B2_BUCKET,
|
||||||
|
accessKey: process.env.B2_KEY_ID,
|
||||||
|
secretKey: process.env.B2_APP_KEY,
|
||||||
|
port: 443,
|
||||||
|
useSSL: true,
|
||||||
|
setupBucket: false,
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
|
|
||||||
async onInitialize() {
|
async onInitialize() {
|
||||||
await this.contexts.db.initialize()
|
await this.contexts.db.initialize()
|
||||||
await this.contexts.b2.authorize()
|
await this.contexts.storage.initialize()
|
||||||
|
|
||||||
global.cache = this.contexts.cache
|
global.cache = this.contexts.cache
|
||||||
global.b2 = this.contexts.b2
|
global.storages = {
|
||||||
|
standard: this.contexts.storage,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
handleWsAuth = require("@shared-lib/handleWsAuth").default
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Boot(API)
|
Boot(API)
|
@ -1,9 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "marketplace",
|
"name": "marketplace",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"7zip-min": "^1.4.4",
|
"7zip-min": "^1.4.4"
|
||||||
"backblaze-b2": "^1.7.0",
|
|
||||||
"sucrase": "^3.32.0",
|
|
||||||
"uglify-js": "^3.17.4"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,7 +3,7 @@ import ExtensionClass from "@classes/extension"
|
|||||||
export default async (req) => {
|
export default async (req) => {
|
||||||
const { user_id, pkg } = req.params
|
const { user_id, pkg } = req.params
|
||||||
|
|
||||||
return await ExtensionClass.resolveManifest({
|
return await ExtensionClass.resolve({
|
||||||
user_id,
|
user_id,
|
||||||
pkg,
|
pkg,
|
||||||
})
|
})
|
||||||
|
@ -1,12 +0,0 @@
|
|||||||
import ExtensionClass from "@classes/extension"
|
|
||||||
|
|
||||||
export default async (req, res) => {
|
|
||||||
const { user_id, pkg } = req.params
|
|
||||||
|
|
||||||
const manifest = await ExtensionClass.resolve({
|
|
||||||
user_id,
|
|
||||||
pkg,
|
|
||||||
})
|
|
||||||
|
|
||||||
return manifest
|
|
||||||
}
|
|
@ -1,7 +1,33 @@
|
|||||||
import { Extension } from "@db_models"
|
import { Extension } from "@db_models"
|
||||||
|
|
||||||
export default async (req) => {
|
export default async (req) => {
|
||||||
const extensions = await Extension.find()
|
const { limit = 10, offset = 0 } = req.query
|
||||||
|
|
||||||
return extensions
|
const totalItems = await Extension.countDocuments()
|
||||||
|
|
||||||
|
const extensions = await Extension.aggregate([
|
||||||
|
{
|
||||||
|
$sort: { registryId: 1, version: -1 },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$group: {
|
||||||
|
_id: "$registryId",
|
||||||
|
doc: { $first: "$$ROOT" },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$replaceRoot: { newRoot: "$doc" },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$skip: parseInt(offset),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$limit: parseInt(limit),
|
||||||
|
},
|
||||||
|
])
|
||||||
|
|
||||||
|
return {
|
||||||
|
items: extensions,
|
||||||
|
total_items: totalItems,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -4,47 +4,7 @@ import fs from "node:fs"
|
|||||||
import path from "node:path"
|
import path from "node:path"
|
||||||
import sevenzip from "7zip-min"
|
import sevenzip from "7zip-min"
|
||||||
|
|
||||||
async function uploadFolderToB2(bucketId, folderPath, b2Directory) {
|
import putObject from "@shared-classes/Upload/putObject"
|
||||||
try {
|
|
||||||
const uploadFiles = async (dir) => {
|
|
||||||
const files = fs.readdirSync(dir)
|
|
||||||
|
|
||||||
for (const file of files) {
|
|
||||||
const fullPath = path.join(dir, file)
|
|
||||||
const stats = fs.statSync(fullPath)
|
|
||||||
|
|
||||||
if (stats.isDirectory()) {
|
|
||||||
await uploadFiles(fullPath)
|
|
||||||
} else {
|
|
||||||
const fileData = fs.readFileSync(fullPath)
|
|
||||||
const b2FileName = path
|
|
||||||
.join(b2Directory, path.relative(folderPath, fullPath))
|
|
||||||
.replace(/\\/g, "/")
|
|
||||||
|
|
||||||
console.log(`Uploading ${b2FileName}...`)
|
|
||||||
|
|
||||||
const uploadUrl = await b2.getUploadUrl({
|
|
||||||
bucketId: bucketId,
|
|
||||||
})
|
|
||||||
|
|
||||||
await b2.uploadFile({
|
|
||||||
uploadUrl: uploadUrl.data.uploadUrl,
|
|
||||||
uploadAuthToken: uploadUrl.data.authorizationToken,
|
|
||||||
fileName: b2FileName,
|
|
||||||
data: fileData,
|
|
||||||
})
|
|
||||||
|
|
||||||
console.log(`Uploaded ${b2FileName}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await uploadFiles(folderPath)
|
|
||||||
console.log("All files uploaded successfully.")
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Error uploading folder:", error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
middlewares: ["withAuthentication"],
|
middlewares: ["withAuthentication"],
|
||||||
@ -62,7 +22,7 @@ export default {
|
|||||||
pkg = JSON.parse(pkg)
|
pkg = JSON.parse(pkg)
|
||||||
|
|
||||||
const { user_id } = req.auth.session
|
const { user_id } = req.auth.session
|
||||||
const registryId = `${user_id}/${pkg.name}@${pkg.version}`
|
const registryId = `${user_id}/${pkg.name}`
|
||||||
const s3Path = `extensions/${pkg.name}/${pkg.version}`
|
const s3Path = `extensions/${pkg.name}/${pkg.version}`
|
||||||
|
|
||||||
const workPath = path.resolve(
|
const workPath = path.resolve(
|
||||||
@ -84,7 +44,7 @@ export default {
|
|||||||
|
|
||||||
let extensionRegistry = await Extension.findOne({
|
let extensionRegistry = await Extension.findOne({
|
||||||
user_id: user_id,
|
user_id: user_id,
|
||||||
registryId: registryId,
|
name: pkg.name,
|
||||||
version: pkg.version,
|
version: pkg.version,
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -116,16 +76,20 @@ export default {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
await uploadFolderToB2(process.env.B2_BUCKET_ID, pkgPath, s3Path)
|
await putObject({
|
||||||
|
filePath: pkgPath,
|
||||||
|
uploadPath: s3Path,
|
||||||
|
})
|
||||||
|
|
||||||
fs.promises.rm(workPath, { recursive: true, force: true })
|
fs.promises.rm(workPath, { recursive: true, force: true })
|
||||||
|
|
||||||
const assetsUrl = `https://${process.env.B2_CDN_ENDPOINT}/${process.env.B2_BUCKET}/${s3Path}`
|
const assetsUrl = `${process.env.B2_CDN_ENDPOINT}/${process.env.B2_BUCKET}/${s3Path}`
|
||||||
|
|
||||||
extensionRegistry = await Extension.create({
|
extensionRegistry = await Extension.create({
|
||||||
user_id: user_id,
|
user_id: user_id,
|
||||||
name: pkg.name,
|
name: pkg.name,
|
||||||
version: pkg.version,
|
version: pkg.version,
|
||||||
|
description: pkg.description,
|
||||||
registryId: registryId,
|
registryId: registryId,
|
||||||
assetsUrl: assetsUrl,
|
assetsUrl: assetsUrl,
|
||||||
srcUrl: `${assetsUrl}/src`,
|
srcUrl: `${assetsUrl}/src`,
|
||||||
|
@ -1,86 +0,0 @@
|
|||||||
import fs from "node:fs"
|
|
||||||
import path from "node:path"
|
|
||||||
import pMap from "p-map"
|
|
||||||
|
|
||||||
export default async function b2Upload({
|
|
||||||
source,
|
|
||||||
remotePath,
|
|
||||||
metadata = {},
|
|
||||||
targetFilename,
|
|
||||||
isDirectory,
|
|
||||||
retryNumber = 0,
|
|
||||||
}) {
|
|
||||||
if (isDirectory) {
|
|
||||||
let files = await fs.promises.readdir(source)
|
|
||||||
|
|
||||||
files = files.map((file) => {
|
|
||||||
const filePath = path.join(source, file)
|
|
||||||
|
|
||||||
const isTargetDirectory = fs.lstatSync(filePath).isDirectory()
|
|
||||||
|
|
||||||
return {
|
|
||||||
source: filePath,
|
|
||||||
remotePath: path.join(remotePath, file),
|
|
||||||
isDirectory: isTargetDirectory,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
await pMap(files, b2Upload, {
|
|
||||||
concurrency: 5,
|
|
||||||
})
|
|
||||||
|
|
||||||
return {
|
|
||||||
id: remotePath,
|
|
||||||
url: `https://${process.env.B2_CDN_ENDPOINT}/${process.env.B2_BUCKET}/${remotePath}/${targetFilename}`,
|
|
||||||
metadata: metadata,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
//await global.b2.authorize()
|
|
||||||
|
|
||||||
if (!fs.existsSync(source)) {
|
|
||||||
throw new OperationError(500, "File not found")
|
|
||||||
}
|
|
||||||
|
|
||||||
const uploadUrl = await global.b2.getUploadUrl({
|
|
||||||
bucketId: process.env.B2_BUCKET_ID,
|
|
||||||
})
|
|
||||||
|
|
||||||
console.debug(`Uploading object to B2 Storage >`, {
|
|
||||||
source: source,
|
|
||||||
remote: remotePath,
|
|
||||||
})
|
|
||||||
|
|
||||||
const data = await fs.promises.readFile(source)
|
|
||||||
|
|
||||||
await global.b2.uploadFile({
|
|
||||||
uploadUrl: uploadUrl.data.uploadUrl,
|
|
||||||
uploadAuthToken: uploadUrl.data.authorizationToken,
|
|
||||||
fileName: remotePath,
|
|
||||||
data: data,
|
|
||||||
info: metadata,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
console.error(error)
|
|
||||||
|
|
||||||
if (retryNumber < 5) {
|
|
||||||
return await b2Upload({
|
|
||||||
source,
|
|
||||||
remotePath,
|
|
||||||
metadata,
|
|
||||||
targetFilename,
|
|
||||||
isDirectory,
|
|
||||||
retryNumber: retryNumber + 1,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new OperationError(500, "B2 upload failed")
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
id: remotePath,
|
|
||||||
url: `https://${process.env.B2_CDN_ENDPOINT}/${process.env.B2_BUCKET}/${remotePath}`,
|
|
||||||
metadata: metadata,
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,23 +0,0 @@
|
|||||||
import fs from "fs"
|
|
||||||
import path from "path"
|
|
||||||
|
|
||||||
async function syncFolder(dir, destPath) {
|
|
||||||
const files = await fs.promises.readdir(dir)
|
|
||||||
|
|
||||||
for await (const file of files) {
|
|
||||||
const filePath = path.resolve(dir, file)
|
|
||||||
const desitinationFilePath = `${destPath}/${file}`
|
|
||||||
|
|
||||||
const stat = fs.statSync(filePath)
|
|
||||||
|
|
||||||
if (stat.isDirectory()) {
|
|
||||||
await syncFolder(filePath, desitinationFilePath)
|
|
||||||
} else {
|
|
||||||
const fileContent = await fs.promises.readFile(filePath)
|
|
||||||
|
|
||||||
await global.storage.putObject(process.env.S3_BUCKET, desitinationFilePath, fileContent)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export default syncFolder
|
|
Loading…
x
Reference in New Issue
Block a user