merge from local

This commit is contained in:
SrGooglo 2024-04-01 17:14:39 +02:00
parent 4342339aae
commit bcc889c6fa
110 changed files with 478 additions and 7316 deletions

View File

@ -15,6 +15,7 @@ const commands = [
],
fn: async (package_manifest, options) => {
await core.initialize()
await core.setup()
return await core.package.install(package_manifest, options)
}
@ -30,6 +31,7 @@ const commands = [
],
fn: async (pkg_id, options) => {
await core.initialize()
await core.setup()
return await core.package.execute(pkg_id, options)
}
@ -45,6 +47,7 @@ const commands = [
],
fn: async (pkg_id, options) => {
await core.initialize()
await core.setup()
return await core.package.update(pkg_id, options)
}

View File

@ -19,6 +19,7 @@
"checksum": "^1.0.0",
"cli-color": "^2.0.4",
"cli-progress": "^3.12.0",
"deep-object-diff": "^1.1.9",
"extends-classes": "^1.0.5",
"googleapis": "^134.0.0",
"human-format": "^1.2.0",

View File

@ -1,3 +1,6 @@
import Logger from "../logger"
import DB from "../db"
import fs from "node:fs"
import GenericSteps from "../generic_steps"
@ -11,27 +14,44 @@ export default class PatchManager {
this.log = Logger.child({ service: `PATCH-MANAGER|${pkg.id}` })
}
async get(patch) {
async get(select) {
if (!this.manifest.patches) {
return []
}
let list = []
if (typeof patch === "undefined") {
if (typeof select === "undefined") {
list = this.manifest.patches
} else {
list = this.manifest.patches.find((p) => p.id === patch.id)
}
if (Array.isArray(select)) {
for await (let id of select) {
const patch = this.manifest.patches.find((patch) => patch.id === id)
if (patch) {
list.push(patch)
}
}
}
return list
}
async patch(patch) {
const list = await this.get(patch)
async reapply() {
if (Array.isArray(this.pkg.applied_patches)) {
return await this.patch(this.pkg.applied_patches)
}
return true
}
async patch(select) {
const list = await this.get(select)
for await (let patch of list) {
global._relic_eventBus.emit(`pkg:update:state:${this.pkg.id}`, {
global._relic_eventBus.emit(`pkg:update:state`, {
id: this.pkg.id,
status_text: `Applying patch [${patch.id}]...`,
})
@ -41,12 +61,6 @@ export default class PatchManager {
this.log.info(`Applying ${patch.additions.length} Additions...`)
for await (let addition of patch.additions) {
this.log.info(`Applying addition [${addition.id}]...`)
global._relic_eventBus.emit(`pkg:update:state:${this.pkg.id}`, {
status_text: `Applying addition [${additions.id}]...`,
})
// resolve patch file
addition.file = await parseStringVars(addition.file, this.pkg)
@ -55,14 +69,26 @@ export default class PatchManager {
continue
}
this.log.info(`Applying addition [${addition.file}]`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: this.pkg.id,
status_text: `Applying addition [${addition.file}]`,
})
await GenericSteps(this.pkg, addition.steps, this.log)
}
}
pkg.applied_patches.push(patch.id)
if (!this.pkg.applied_patches.includes(patch.id)) {
this.pkg.applied_patches.push(patch.id)
}
}
global._relic_eventBus.emit(`pkg:update:state:${this.pkg.id}`, {
await DB.updatePackageById(this.pkg.id, { applied_patches: this.pkg.applied_patches })
global._relic_eventBus.emit(`pkg:update:state`, {
id: this.pkg.id,
status_text: `${list.length} Patches applied`,
})
@ -71,42 +97,48 @@ export default class PatchManager {
return this.pkg
}
async remove(patch) {
const list = await this.get(patch)
async remove(select) {
const list = await this.get(select)
for await (let patch of list) {
global._relic_eventBus.emit(`pkg:update:state:${this.pkg.id}`, {
global._relic_eventBus.emit(`pkg:update:state`, {
id: this.pkg.id,
status_text: `Removing patch [${patch.id}]...`,
})
Log.info(`Removing patch [${patch.id}]...`)
this.log.info(`Removing patch [${patch.id}]...`)
if (Array.isArray(patch.additions)) {
this.log.info(`Removing ${patch.additions.length} Additions...`)
for await (let addition of patch.additions) {
this.log.info(`Removing addition [${addition.id}]...`)
global._relic_eventBus.emit(`pkg:update:state:${this.pkg.id}`, {
status_text: `Removing addition [${additions.id}]...`,
})
addition.file = await parseStringVars(addition.file, this.pkg)
if (!fs.existsSync(addition.file)) {
this.log.info(`Addition [${addition.file}] does not exist. Skipping...`)
continue
}
this.log.info(`Removing addition [${addition.file}]`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: this.pkg.id,
status_text: `Removing addition [${addition.file}]`,
})
await fs.promises.unlink(addition.file)
}
}
pkg.applied_patches = pkg.applied_patches.filter((p) => {
this.pkg.applied_patches = this.pkg.applied_patches.filter((p) => {
return p !== patch.id
})
}
global._relic_eventBus.emit(`pkg:update:state:${this.pkg.id}`, {
await DB.updatePackageById(this.pkg.id, { applied_patches: this.pkg.applied_patches })
global._relic_eventBus.emit(`pkg:update:state`, {
id: this.pkg.id,
status_text: `${list.length} Patches removed`,
})

View File

@ -15,7 +15,9 @@ export default class DB {
static defaultPackageState({
id,
name,
icon,
version,
author,
install_path,
description,
license,
@ -23,13 +25,16 @@ export default class DB {
remote_manifest,
local_manifest,
config,
executable,
}) {
return {
id: id,
name: name,
version: version,
icon: icon,
install_path: install_path,
description: description,
author: author,
license: license ?? "unlicensed",
local_manifest: local_manifest ?? null,
remote_manifest: remote_manifest ?? null,
@ -38,6 +43,7 @@ export default class DB {
last_status: last_status ?? "installing",
last_update: null,
installed_at: null,
executable: executable ?? false,
}
}
@ -72,31 +78,27 @@ export default class DB {
static async writePackage(pkg) {
const db = await this.withDB()
await db.update((data) => {
const prevIndex = data["packages"].findIndex((i) => i.id === pkg.id)
const prevIndex = db.data["packages"].findIndex((i) => i.id === pkg.id)
if (prevIndex !== -1) {
data["packages"][prevIndex] = pkg
db.data["packages"][prevIndex] = pkg
} else {
data["packages"].push(pkg)
db.data["packages"].push(pkg)
}
return data
})
await db.write()
return pkg
return db.data
}
static async updatePackageById(pkg_id, obj) {
const pkg = await this.getPackages(pkg_id)
let pkg = await this.getPackages(pkg_id)
if (!pkg) {
throw new Error("Package not found")
}
pkg = lodash.merge(pkg, obj)
return await this.writePackage(pkg)
return await this.writePackage(lodash.merge({ ...pkg }, obj))
}
static async deletePackage(pkg_id) {

View File

@ -1,3 +1,5 @@
import Logger from "../logger"
import path from "node:path"
import fs from "node:fs"
import upath from "upath"
@ -21,8 +23,9 @@ export default async (pkg, step) => {
Log.info(`Cloning from [${step.url}]`)
global._relic_eventBus.emit(`pkg:update:state:${pkg.id}`, {
status_text: `Cloning from [${step.url}]...`,
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Cloning from [${step.url}]`,
})
const args = [

View File

@ -1,3 +1,5 @@
import Logger from "../logger"
import path from "node:path"
import fs from "node:fs"
import { execa } from "../libraries/execa"
@ -14,7 +16,8 @@ export default async (pkg, step) => {
const gitCMD = fs.existsSync(Vars.git_path) ? `${Vars.git_path}` : "git"
const _path = path.resolve(pkg.install_path, step.path)
global._relic_eventBus.emit(`pkg:update:state:${pkg.id}`, {
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Pulling...`,
})

View File

@ -1,3 +1,5 @@
import Logger from "../logger"
import path from "node:path"
import fs from "node:fs"
import { execa } from "../libraries/execa"
@ -23,7 +25,8 @@ export default async (pkg, step) => {
Log.info(`Fetching from origin`)
global._relic_eventBus.emit(`pkg:update:state:${pkg.id}`, {
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Fetching from origin...`,
})
@ -36,7 +39,8 @@ export default async (pkg, step) => {
Log.info(`Cleaning untracked files...`)
global._relic_eventBus.emit(`pkg:update:state:${pkg.id}`, {
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Cleaning untracked files...`,
})
@ -48,7 +52,8 @@ export default async (pkg, step) => {
Log.info(`Resetting to ${from}`)
global._relic_eventBus.emit(`pkg:update:state:${pkg.id}`, {
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Resetting to ${from}`,
})
@ -63,7 +68,8 @@ export default async (pkg, step) => {
Log.info(`Checkout to HEAD`)
global._relic_eventBus.emit(`pkg:update:state:${pkg.id}`, {
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Checkout to HEAD`,
})

View File

@ -15,9 +15,9 @@ export default async (pkg, step, logger) => {
let _path = path.resolve(pkg.install_path, step.path)
global._relic_eventBus.emit(`pkg:update:state:${pkg.id}`, {
status: "loading",
statusText: `Downloading [${step.url}]`,
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Downloading [${step.url}]`,
})
logger.info(`Downloading [${step.url} to ${_path}]`)
@ -29,8 +29,10 @@ export default async (pkg, step, logger) => {
fs.mkdirSync(path.resolve(_path, ".."), { recursive: true })
await downloadHttpFile(step.url, _path, (progress) => {
global._relic_eventBus(`pkg:update:state:${pkg.id}`, {
statusText: `Downloaded ${progress.transferredString} / ${progress.totalString} | ${progress.speedString}/s`,
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
use_id_only: true,
status_text: `Downloaded ${progress.transferredString} / ${progress.totalString} | ${progress.speedString}/s`,
})
})
@ -43,8 +45,9 @@ export default async (pkg, step, logger) => {
step.extract = path.resolve(pkg.install_path, ".")
}
global._relic_eventBus(`pkg:update:state:${pkg.id}`, {
statusText: `Extracting bundle...`,
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Extracting bundle...`,
})
await extractFile(_path, step.extract)
@ -52,8 +55,9 @@ export default async (pkg, step, logger) => {
if (step.deleteAfterExtract !== false) {
logger.info(`Deleting temporal file [${_path}]...`)
global._relic_eventBus(`pkg:update:state:${pkg.id}`, {
statusText: `Deleting temporal files...`,
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Deleting temporal files...`,
})
await fs.promises.rm(_path, { recursive: true })

View File

@ -1,3 +1,5 @@
import Logger from "../logger"
import ISM_GIT_CLONE from "./git_clone"
import ISM_GIT_PULL from "./git_pull"
import ISM_GIT_RESET from "./git_reset"
@ -20,6 +22,10 @@ const StepsOrders = [
export default async function processGenericSteps(pkg, steps, logger = Logger) {
logger.info(`Processing generic steps...`)
if (!Array.isArray(steps)) {
throw new Error(`Steps must be an array`)
}
if (steps.length === 0) {
return pkg
}

View File

@ -1,28 +1,30 @@
import Logger from "../logger"
import PatchManager from "../classes/PatchManager"
import ManifestReader from "../manifest/reader"
import ManifestVM from "../manifest/vm"
import DB from "../db"
const BaseLog = Logger.child({ service: "APPLIER" })
function findPatch(manifest, changes, mustBeInstalled) {
return manifest.patches
.filter((patch) => {
function findPatch(patches, applied_patches, changes, mustBeInstalled) {
return patches.filter((patch) => {
const patchID = patch.id
if (typeof changes.patches[patchID] === "undefined") {
return false
}
if (mustBeInstalled === true && !manifest.applied_patches.includes(patch.id) && changes.patches[patchID] === true) {
if (mustBeInstalled === true && !applied_patches.includes(patch.id) && changes.patches[patchID] === true) {
return true
}
if (mustBeInstalled === false && manifest.applied_patches.includes(patch.id) && changes.patches[patchID] === false) {
if (mustBeInstalled === false && applied_patches.includes(patch.id) && changes.patches[patchID] === false) {
return true
}
return false
})
}).map((patch) => patch.id)
}
export default async function apply(pkg_id, changes = {}) {
@ -35,11 +37,18 @@ export default async function apply(pkg_id, changes = {}) {
}
let manifest = await ManifestReader(pkg.local_manifest)
manifest = await ManifestVM(ManifestRead.code)
manifest = await ManifestVM(manifest.code)
const Log = Logger.child({ service: `APPLIER|${pkg.id}` })
Log.info(`Applying changes to package...`)
Log.info(`Changes: ${JSON.stringify(changes)}`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Applying changes to package...`,
last_status: "loading",
})
if (changes.patches) {
if (!Array.isArray(pkg.applied_patches)) {
@ -48,8 +57,8 @@ export default async function apply(pkg_id, changes = {}) {
const patches = new PatchManager(pkg, manifest)
await patches.remove(findPatch(manifest, changes, false))
await patches.patch(findPatch(manifest, changes, true))
await patches.remove(findPatch(manifest.patches, pkg.applied_patches, changes, false))
await patches.patch(findPatch(manifest.patches, pkg.applied_patches, changes, true))
}
if (changes.config) {
@ -64,15 +73,19 @@ export default async function apply(pkg_id, changes = {}) {
await DB.writePackage(pkg)
global._relic_eventBus.emit(`pkg:update:state:${pkg.id}`, {
state: "All changes applied",
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: "All changes applied",
})
Log.info(`All changes applied to package.`)
return pkg
} catch (error) {
global._relic_eventBus.emit(`pkg:${pkg_id}:error`, error)
global._relic_eventBus.emit(`pkg:error`, {
id: pkg_id,
error
})
BaseLog.error(`Failed to apply changes to package [${pkg_id}]`, error)
BaseLog.error(error.stack)

View File

@ -1,7 +1,8 @@
import Logger from "../logger"
import fs from "node:fs"
import DB from "../db"
import SetupHelper from "../helpers/setup"
import ManifestReader from "../manifest/reader"
import ManifestVM from "../manifest/vm"
import parseStringVars from "../utils/parseStringVars"
@ -18,8 +19,6 @@ export default async function execute(pkg_id, { useRemote = false, force = false
return false
}
await SetupHelper()
const manifestPath = useRemote ? pkg.remote_manifest : pkg.local_manifest
if (!fs.existsSync(manifestPath)) {
@ -30,6 +29,12 @@ export default async function execute(pkg_id, { useRemote = false, force = false
return false
}
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
last_status: "loading",
status_text: null,
})
const ManifestRead = await ManifestReader(manifestPath)
const manifest = await ManifestVM(ManifestRead.code)
@ -52,9 +57,18 @@ export default async function execute(pkg_id, { useRemote = false, force = false
})
}
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
last_status: "installed",
status_text: null,
})
return pkg
} catch (error) {
global._relic_eventBus.emit(`pkg:${pkg_id}:error`, error)
global._relic_eventBus.emit(`pkg:error`, {
id: pkg_id,
error
})
BaseLog.error(`Failed to execute package [${pkg_id}]`, error)
BaseLog.error(error.stack)

View File

@ -1,7 +1,8 @@
import Logger from "../logger"
import fs from "node:fs"
import DB from "../db"
import SetupHelper from "../helpers/setup"
import ManifestReader from "../manifest/reader"
import ManifestVM from "../manifest/vm"
import GenericSteps from "../generic_steps"
@ -13,8 +14,6 @@ export default async function install(manifest) {
let id = null
try {
await SetupHelper()
BaseLog.info(`Invoking new installation...`)
BaseLog.info(`Fetching manifest [${manifest}]`)
@ -44,6 +43,7 @@ export default async function install(manifest) {
Log.info(`Appending to db...`)
const pkg = DB.defaultPackageState({
...manifest.constructor,
id: id,
name: manifest.constructor.pkg_name,
version: manifest.constructor.version,
@ -53,6 +53,7 @@ export default async function install(manifest) {
last_status: "installing",
remote_manifest: ManifestRead.remote_manifest,
local_manifest: ManifestRead.local_manifest,
executable: !!manifest.execute
})
await DB.writePackage(pkg)
@ -72,7 +73,8 @@ export default async function install(manifest) {
if (typeof manifest.beforeInstall === "function") {
Log.info(`Executing beforeInstall hook...`)
global._relic_eventBus.emit(`pkg:update:state:${id}`, {
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Performing beforeInstall hook...`,
})
@ -82,7 +84,8 @@ export default async function install(manifest) {
if (Array.isArray(manifest.installSteps)) {
Log.info(`Executing generic install steps...`)
global._relic_eventBus.emit(`pkg:update:state:${id}`, {
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Performing generic install steps...`,
})
@ -92,14 +95,16 @@ export default async function install(manifest) {
if (typeof manifest.afterInstall === "function") {
Log.info(`Executing afterInstall hook...`)
global._relic_eventBus.emit(`pkg:update:state:${id}`, {
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Performing afterInstall hook...`,
})
await manifest.afterInstall(pkg)
}
global._relic_eventBus.emit(`pkg:update:state:${id}`, {
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Finishing up...`,
})
@ -119,7 +124,7 @@ export default async function install(manifest) {
}
pkg.local_manifest = finalPath
pkg.last_status = "installed"
pkg.last_status = "loading"
pkg.installed_at = Date.now()
await DB.writePackage(pkg)
@ -130,7 +135,8 @@ export default async function install(manifest) {
if (defaultPatches.length > 0) {
Log.info(`Applying default patches...`)
global._relic_eventBus.emit(`pkg:update:state:${id}`, {
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Applying default patches...`,
})
@ -140,17 +146,30 @@ export default async function install(manifest) {
}
}
global._relic_eventBus.emit(`pkg:update:state:${id}`, {
pkg.last_status = "installed"
await DB.writePackage(pkg)
global._relic_eventBus.emit(`pkg:update:state`, {
...pkg,
id: pkg.id,
last_status: "installed",
status_text: `Installation completed successfully`,
})
global._relic_eventBus.emit(`pkg:new:done`, pkg)
Log.info(`Package installed successfully!`)
return pkg
} catch (error) {
global._relic_eventBus.emit(`pkg:${id}:error`, error)
global._relic_eventBus.emit(`pkg:error`, {
id: pkg.id,
error
})
global._relic_eventBus.emit(`pkg:update:state:${id}`, {
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
last_status: "failed",
status_text: `Installation failed`,
})

View File

@ -0,0 +1,9 @@
import ManifestReader from "../manifest/reader"
import ManifestVM from "../manifest/vm"
export default async function softRead(manifest, options = {}) {
const Reader = await ManifestReader(manifest)
const VM = await ManifestVM(Reader.code, options)
return VM
}

View File

@ -1,3 +1,5 @@
import Logger from "../logger"
import DB from "../db"
import ManifestReader from "../manifest/reader"
import ManifestVM from "../manifest/vm"
@ -18,7 +20,8 @@ export default async function uninstall(pkg_id) {
const Log = Logger.child({ service: `UNINSTALLER|${pkg.id}` })
Log.info(`Uninstalling package...`)
global._relic_eventBus.emit(`pkg:update:state:${pkg.id}`, {
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Uninstalling package...`,
})
@ -27,33 +30,41 @@ export default async function uninstall(pkg_id) {
if (typeof manifest.uninstall === "function") {
Log.info(`Performing uninstall hook...`)
global._relic_eventBus.emit(`pkg:update:state:${pkg.id}`, {
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Performing uninstall hook...`,
})
await manifest.uninstall(pkg)
}
Log.info(`Deleting package directory...`)
global._relic_eventBus.emit(`pkg:update:state:${pkg.id}`, {
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Deleting package directory...`,
})
await rimraf(pkg.install_path)
Log.info(`Removing package from database...`)
global._relic_eventBus.emit(`pkg:update:state:${pkg.id}`, {
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Removing package from database...`,
})
await DB.deletePackage(pkg.id)
global._relic_eventBus.emit(`pkg:update:state:${pkg.id}`, {
status: "deleted",
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
last_status: "deleted",
status_text: `Uninstalling package...`,
})
global._relic_eventBus.emit(`pkg:remove`, pkg)
Log.info(`Package uninstalled successfully!`)
return pkg
} catch (error) {
global._relic_eventBus.emit(`pkg:${pkg_id}:error`, error)
global._relic_eventBus.emit(`pkg:error`, {
id: pkg_id,
error
})
BaseLog.error(`Failed to uninstall package [${pkg_id}]`, error)
BaseLog.error(error.stack)

View File

@ -1,3 +1,5 @@
import Logger from "../logger"
import DB from "../db"
import ManifestReader from "../manifest/reader"
@ -39,15 +41,21 @@ export default async function update(pkg_id) {
let ManifestRead = await ManifestReader(pkg.local_manifest)
let manifest = await ManifestVM(ManifestRead.code)
global._relic_eventBus.emit(`pkg:update:state:${pkg.id}`, {
status: "updating",
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
last_status: "updating",
status_text: `Updating package...`,
})
pkg.last_status = "updating"
await DB.writePackage(pkg)
if (typeof manifest.update === "function") {
Log.info(`Performing update hook...`)
global._relic_eventBus.emit(`pkg:update:state:${pkg.id}`, {
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Performing update hook...`,
})
@ -57,7 +65,8 @@ export default async function update(pkg_id) {
if (manifest.updateSteps) {
Log.info(`Performing update steps...`)
global._relic_eventBus.emit(`pkg:update:state:${pkg.id}`, {
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Performing update steps...`,
})
@ -67,13 +76,14 @@ export default async function update(pkg_id) {
if (Array.isArray(pkg.applied_patches)) {
const patchManager = new PatchManager(pkg, manifest)
await patchManager.patch(pkg.applied_patches)
await patchManager.reapply()
}
if (typeof manifest.afterUpdate === "function") {
Log.info(`Performing after update hook...`)
global._relic_eventBus.emit(`pkg:update:state:${pkg.id}`, {
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Performing after update hook...`,
})
@ -91,20 +101,24 @@ export default async function update(pkg_id) {
}
}
pkg.status = "installed"
pkg.last_status = "installed"
pkg.last_update = Date.now()
await DB.writePackage(pkg)
Log.info(`Package updated successfully`)
global._relic_eventBus.emit(`pkg:update:state:${pkg.id}`, {
status: "installed",
global._relic_eventBus.emit(`pkg:update:state`, {
...pkg,
id: pkg.id,
})
return pkg
} catch (error) {
global._relic_eventBus.emit(`pkg:${pkg_id}:error`, error)
global._relic_eventBus.emit(`pkg:error`, {
id: pkg_id,
error
})
BaseLog.error(`Failed to update package [${pkg_id}]`, error)
BaseLog.error(error.stack)

View File

@ -9,13 +9,24 @@ import Logger from "./logger"
import Vars from "./vars"
import DB from "./db"
import PackageInstall from "./handlers/install"
import PackageExecute from "./handlers/execute"
import PackageUninstall from "./handlers/uninstall"
import PackageUpdate from "./handlers/update"
import PackageApply from "./handlers/apply"
import PackageList from "./handlers/list"
import PackageRead from "./handlers/read"
export default class RelicCore {
constructor(params) {
this.params = params
}
eventBus = global._relic_eventBus = new EventEmitter()
logger = global.Logger = Logger
logger = Logger
db = DB
async initialize() {
await DB.initialize()
@ -34,12 +45,13 @@ export default class RelicCore {
}
package = {
install: require("./handlers/install").default,
execute: require("./handlers/execute").default,
uninstall: require("./handlers/uninstall").default,
update: require("./handlers/update").default,
apply: require("./handlers/apply").default,
list: require("./handlers/list").default,
install: PackageInstall,
execute: PackageExecute,
uninstall: PackageUninstall,
update: PackageUpdate,
apply: PackageApply,
list: PackageList,
read: PackageRead,
}
openPath(pkg_id) {

View File

@ -1,3 +1,5 @@
import Logger from "../../../logger"
import Client from "./launcher"
import Authenticator from "./authenticator"

View File

@ -1,3 +1,5 @@
import Logger from "../../../logger"
import open, { apps } from "open"
const Log = Logger.child({ service: "OPEN-LIB" })

View File

@ -1,6 +1,7 @@
import fs from "node:fs"
import path from "node:path"
import downloadHttpFile from "../helpers/downloadHttpFile"
import axios from "axios"
import checksum from "checksum"
import Vars from "../vars"
@ -15,13 +16,19 @@ export async function readManifest(manifest) {
fs.mkdirSync(Vars.cache_path, { recursive: true })
}
const cachedManifest = await downloadHttpFile(manifest, path.resolve(Vars.cache_path, `${Date.now()}.rmanifest`))
const { data: code } = await axios.get(target)
const manifestChecksum = checksum(code, { algorithm: "md5" })
const cachedManifest = path.join(Vars.cache_path, `${manifestChecksum}.rmanifest`)
await fs.promises.writeFile(cachedManifest, code)
return {
remote_manifest: manifest,
local_manifest: cachedManifest,
is_catched: true,
code: fs.readFileSync(cachedManifest, "utf8"),
code: code,
}
} else {
if (!fs.existsSync(target)) {

View File

@ -1,3 +1,5 @@
import Logger from "../logger"
import os from "node:os"
import vm from "node:vm"
import path from "node:path"
@ -8,7 +10,15 @@ import FetchLibraries from "./libraries"
import Vars from "../vars"
async function BuildManifest(baseClass, context, soft = false) {
async function BuildManifest(baseClass, context, { soft = false } = {}) {
// inject install_path
context.install_path = path.resolve(Vars.packages_path, baseClass.id)
baseClass.install_path = context.install_path
if (soft === true) {
return baseClass
}
const configManager = new ManifestConfigManager(baseClass.id)
await configManager.initialize()
@ -22,10 +32,6 @@ async function BuildManifest(baseClass, context, soft = false) {
]
}
// inject install_path
context.install_path = path.resolve(Vars.packages_path, baseClass.id)
baseClass.install_path = context.install_path
// modify context
context.Log = Logger.child({ service: `VM|${baseClass.id}` })
context.Lib = await FetchLibraries(dependencies, {
@ -46,7 +52,7 @@ function injectUseManifest(code) {
return code + "\n\nuse(Manifest);"
}
export default async (code) => {
export default async (code, { soft = false } = {}) => {
return await new Promise(async (resolve, reject) => {
try {
code = injectUseManifest(code)
@ -55,7 +61,13 @@ export default async (code) => {
Vars: Vars,
Log: Logger.child({ service: "MANIFEST_VM" }),
use: (baseClass) => {
BuildManifest(baseClass, context).then(resolve)
return BuildManifest(
baseClass,
context,
{
soft: soft,
}
).then(resolve)
},
os_string: resolveOs(),
arch: os.arch(),

View File

@ -1,3 +1,5 @@
import Logger from "../logger"
import fs from "node:fs"
import path from "node:path"
import { pipeline as streamPipeline } from "node:stream/promises"

View File

@ -1,3 +0,0 @@
provider: generic
url: https://example.com/auto-updates
updaterCacheDirName: rs-bundler-updater

View File

@ -1,5 +1,5 @@
appId: com.ragestudio.bundler
productName: rs-bundler
appId: com.ragestudio.relic
productName: Relic
directories:
buildResources: build
files:
@ -11,7 +11,7 @@ files:
asarUnpack:
- resources/**
win:
executableName: rs-bundler
executableName: relic
icon: resources/icon.ico
nsis:
artifactName: ${name}-${version}-setup.${ext}
@ -40,4 +40,4 @@ appImage:
npmRebuild: false
publish:
provider: generic
url: https://storage.ragestudio.net/rs-bundler/release
url: https://storage.ragestudio.net/relic/release

View File

@ -5,23 +5,9 @@ import react from "@vitejs/plugin-react"
export default defineConfig({
main: {
plugins: [externalizeDepsPlugin()],
// build: {
// rollupOptions: {
// output: {
// format: "es"
// }
// }
// },
},
preload: {
plugins: [externalizeDepsPlugin()],
// build: {
// rollupOptions: {
// output: {
// format: "es"
// }
// }
// },
},
renderer: {
server: {

View File

@ -6,8 +6,6 @@
"author": "RageStudio",
"license": "MIT",
"scripts": {
"format": "prettier --write .",
"lint": "eslint . --ext .js,.jsx,.cjs,.mjs,.ts,.tsx,.cts,.mts --fix",
"start": "electron-vite preview",
"dev": "electron-vite dev",
"build": "electron-vite build",
@ -26,48 +24,33 @@
"@imjs/electron-differential-updater": "^5.1.7",
"@loadable/component": "^5.16.3",
"@ragestudio/hermes": "^0.1.1",
"adm-zip": "^0.5.10",
"antd": "^5.13.2",
"checksum": "^1.0.0",
"classnames": "^2.3.2",
"electron-build": "^0.0.3",
"electron-differential-updater": "^4.3.2",
"electron-is-dev": "^2.0.0",
"electron-store": "^8.1.0",
"electron-updater": "^6.1.1",
"googleapis": "^105.0.0",
"got": "11.8.3",
"human-format": "^1.2.0",
"less": "^4.2.0",
"lodash": "^4.17.21",
"merge-stream": "^2.0.0",
"node-7z": "^3.0.0",
"open": "8.4.2",
"progress-stream": "^2.0.0",
"protocol-registry": "^1.4.1",
"react-icons": "^4.11.0",
"react-motion": "0.5.2",
"react-router-dom": "6.6.2",
"react-spinners": "^0.13.8",
"react-spring": "^9.7.3",
"react-motion": "0.5.2",
"request": "^2.88.2",
"rimraf": "^5.0.5",
"signal-exit": "^4.1.0",
"unzipper": "^0.10.14",
"upath": "^2.0.1",
"uuid": "^9.0.1",
"which": "^4.0.0",
"winreg": "^1.2.5"
},
"devDependencies": {
"@electron-toolkit/eslint-config": "^1.0.1",
"@electron-toolkit/eslint-config-prettier": "^1.0.1",
"@vitejs/plugin-react": "^4.0.4",
"electron": "^25.6.0",
"electron-builder": "^24.6.3",
"electron-vite": "^1.0.27",
"eslint": "^8.47.0",
"eslint-plugin-react": "^7.33.2",
"prettier": "^3.0.2",
"electron": "29.1.6",
"electron-builder": "24.6.3",
"electron-vite": "^2.1.0",
"react": "^17.0.2",
"react-dom": "^17.0.2",
"vite": "^4.4.9"

View File

@ -1,34 +0,0 @@
import { safeStorage } from "electron"
import sendToRender from "./utils/sendToRender"
export default class AuthService {
authorize(pkg_id, token) {
console.log("Authorizing", pkg_id, token)
global.SettingsStore.set(`auth:${pkg_id}`, safeStorage.encryptString(token))
sendToRender(`new:notification`, {
message: "Authorized",
description: "Now you can start this package",
})
return true
}
unauthorize(pkg_id) {
global.SettingsStore.delete(`auth:${pkg_id}`)
return true
}
getAuth(pkg_id) {
const value = global.SettingsStore.get(`auth:${pkg_id}`)
if (!value) {
return null
}
console.log("getAuth", value)
return safeStorage.decryptString(Buffer.from(value.data))
}
}

View File

@ -0,0 +1,45 @@
import sendToRender from "../utils/sendToRender"
export default class CoreAdapter {
constructor(electronApp, RelicCore) {
this.app = electronApp
this.core = RelicCore
this.initialize()
}
events = {
"pkg:new": (pkg) => {
sendToRender("pkg:new", pkg)
},
"pkg:remove": (pkg) => {
sendToRender("pkg:remove", pkg)
},
"pkg:update:state": (data = {}) => {
if (!data.id) {
return false
}
if (data.use_id_only === true) {
return sendToRender(`pkg:update:state:${data.id}`, data)
}
return sendToRender("pkg:update:state", data)
},
"pkg:new:done": (pkg) => {
sendToRender("pkg:new:done", pkg)
}
}
initialize = () => {
for (const [key, handler] of Object.entries(this.events)) {
global._relic_eventBus.on(key, handler)
}
}
deinitialize = () => {
for (const [key, handler] of Object.entries(this.events)) {
global._relic_eventBus.off(key, handler)
}
}
}

View File

@ -1,169 +0,0 @@
import fs from "node:fs"
import sendToRender from "../utils/sendToRender"
import initManifest from "../utils/initManifest"
import parseStringVars from "../utils/parseStringVars"
import processGenericSteps from "../generic_steps"
import {
updateInstalledPackage,
getInstalledPackages,
} from "../local_db"
function findPatch(pkg, changes, mustBeInstalled) {
return pkg.patches
.filter((patch) => {
const patchID = patch.id
if (typeof changes.patches[patchID] === "undefined") {
return false
}
if (mustBeInstalled === true && !pkg.applied_patches.includes(patch.id) && changes.patches[patchID] === true) {
return true
}
if (mustBeInstalled === false && pkg.applied_patches.includes(patch.id) && changes.patches[patchID] === false) {
return true
}
return false
})
}
export default async function apply(pkg_id, changes) {
try {
let pkg = await getInstalledPackages(pkg_id)
if (!pkg) {
sendToRender("runtime:error", "Package not found")
return false
}
pkg = await initManifest(pkg)
console.log(`[${pkg_id}] apply() | Applying changes... >`, changes)
if (typeof changes.patches !== "undefined") {
if (!Array.isArray(pkg.applied_patches)) {
pkg.applied_patches = []
}
const disablePatches = findPatch(pkg, changes, false)
const installPatches = findPatch(pkg, changes, true)
for await (let patch of disablePatches) {
sendToRender(`pkg:update:status`, {
id: pkg_id,
status: "loading",
statusText: `Removing patch [${patch.id}]...`,
})
console.log(`[${pkg_id}] apply() | Removing patch [${patch.id}]...`)
// remove patch additions
for await (let addition of patch.additions) {
// resolve patch file
addition.file = await parseStringVars(addition.file, pkg)
console.log(`[${pkg_id}] apply() | Removing addition [${addition.file}]...`)
if (!fs.existsSync(addition.file)) {
continue
}
// remove addition
await fs.promises.unlink(addition.file, { force: true, recursive: true })
}
// TODO: remove file patch overrides with original file
// remove from applied patches
pkg.applied_patches = pkg.applied_patches.filter((p) => {
return p !== patch.id
})
sendToRender(`pkg:update:status`, {
id: pkg_id,
status: "done",
statusText: `Patch [${patch.id}] removed!`,
})
}
for await (let patch of installPatches) {
if (pkg.applied_patches.includes(patch.id)) {
console.log(`[${pkg_id}] apply() | Patch [${patch.id}] already applied. Skipping...`)
continue
}
sendToRender(`pkg:update:status`, {
id: pkg_id,
status: "loading",
statusText: `Applying patch [${patch.id}]...`,
})
console.log(`[${pkg_id}] apply() | Applying patch [${patch.id}]...`)
for await (let addition of patch.additions) {
console.log(`Processing addition [${addition.file}]`, addition)
// resolve patch file
addition.file = await parseStringVars(addition.file, pkg)
if (fs.existsSync(addition.file)) {
continue
}
await processGenericSteps(pkg, addition.steps)
}
// add to applied patches
pkg.applied_patches.push(patch.id)
sendToRender(`pkg:update:status`, {
id: pkg_id,
status: "done",
statusText: `Patch [${patch.id}] applied!`,
})
}
}
if (changes.configs) {
if (!pkg.storaged_configs) {
pkg.storaged_configs = Object.entries(pkg.configs).reduce((acc, [key, value]) => {
acc[key] = value.default
return acc
}, {})
}
if (Object.keys(changes.configs).length !== 0) {
Object.entries(changes.configs).forEach(([key, value]) => {
pkg.storaged_configs[key] = value
})
}
}
await updateInstalledPackage(pkg)
sendToRender(`new:message`, {
type: "info",
message: "Changes applied",
})
sendToRender(`pkg:update:status`, {
...pkg,
})
console.log(`[${pkg_id}] apply() | Changes applied`)
return true
} catch (error) {
console.log(error)
sendToRender(`new:notification`, {
type: "error",
message: "Failed to apply changes",
})
}
}

View File

@ -1,116 +0,0 @@
import {
getInstalledPackages,
} from "../local_db"
import readManifest from "../utils/readManifest"
import initManifest from "../utils/initManifest"
import parseStringVars from "../utils/parseStringVars"
import sendToRender from "../utils/sendToRender"
import UpdateCMD from "./update"
export default async function execute(pkg_id, { force = false } = {}) {
let pkg = await getInstalledPackages(pkg_id)
if (!pkg) {
sendToRender("runtime:error", "Package not found")
return false
}
sendToRender("pkg:update:status", {
id: pkg_id,
status: "loading",
statusText: `Executing...`,
})
console.log(`[${pkg_id}] execute() | Executing...`)
if (pkg.remote_url) {
pkg = {
...pkg,
...await readManifest(pkg, { just_read: true }),
}
}
pkg = await initManifest(pkg)
if (pkg.check_updates_after_execute === true) {
if (pkg._original_manifest) {
if ((pkg._original_manifest.version !== pkg.version) && !force) {
console.log(`[${pkg_id}] execute() | Update available (${pkg._original_manifest.version} -> ${pkg.version}). Aborting...`,)
if (global.SettingsStore.get("pkg_auto_update_on_execute") === true) {
await UpdateCMD(pkg_id)
} else {
sendToRender("pkg:update_available", {
manifest: pkg._original_manifest,
current_version: pkg._original_manifest.version,
new_version: pkg.version,
})
sendToRender("pkg:update:status", {
id: pkg_id,
status: "installed",
})
return false
}
}
}
}
if (typeof pkg.after_execute === "function") {
await pkg.after_execute(pkg)
}
if (typeof pkg.execute === "string") {
pkg.execute = parseStringVars(pkg.execute, pkg)
console.log(`[${pkg_id}] execute() | Executing binary from path >`, pkg.execute)
await new Promise((resolve, reject) => {
const process = child_process.execFile(pkg.execute, [], {
shell: true,
cwd: pkg.install_path,
})
process.on("exit", resolve)
process.on("error", reject)
})
} else {
try {
if (typeof pkg.execute !== "function") {
sendToRender("installation:status", {
id: pkg_id,
status: "error",
statusText: "No execute function found",
})
return false
}
await pkg.execute(pkg)
} catch (error) {
sendToRender("new:notification", {
type: "error",
message: "Failed to launch",
description: error.toString(),
})
return sendToRender("pkg:update:status", {
id: pkg_id,
status: "installed",
statusText: `Failed to launch`,
})
}
}
sendToRender("pkg:update:status", {
id: pkg_id,
status: "installed",
})
console.log(`[${pkg_id}] execute() | Successfully executed`)
return true
}

View File

@ -1,128 +0,0 @@
import fs from "node:fs"
import readManifest from "../utils/readManifest"
import initManifest from "../utils/initManifest"
import sendToRender from "../utils/sendToRender"
import defaultManifest from "../defaults/pkg_manifest"
import processGenericSteps from "../generic_steps"
import applyChanges from "./apply"
import {
updateInstalledPackage,
} from "../local_db"
export default async function install(manifest) {
manifest = await readManifest(manifest).catch((error) => {
sendToRender("runtime:error", "Cannot fetch this manifest")
return false
})
if (!manifest) {
return false
}
let pkg = {
...defaultManifest,
...manifest,
status: "installing",
}
const pkg_id = pkg.id
sendToRender("pkg:new", pkg)
console.log(`[${pkg_id}] install() | Starting to install...`)
try {
pkg = await initManifest(pkg)
if (fs.existsSync(pkg.install_path)) {
await fs.rmSync(pkg.install_path, { recursive: true })
}
console.log(`[${pkg_id}] install() | creating install path [${pkg.install_path}]...`)
await fs.mkdirSync(pkg.install_path, { recursive: true })
// append to db
await updateInstalledPackage(pkg)
if (typeof pkg.before_install === "function") {
sendToRender(`pkg:update:status`, {
id: pkg_id,
status: "installing",
statusText: `Performing before_install hook...`,
})
console.log(`[${pkg_id}] install() | Performing before_install hook...`)
// execute before_install
await pkg.before_install(pkg)
}
sendToRender(`pkg:update:status`, {
id: pkg_id,
status: "installing",
statusText: `Performing install steps...`,
})
// Execute generic install steps
await processGenericSteps(pkg, pkg.install_steps)
if (typeof pkg.after_install === "function") {
sendToRender(`pkg:update:status`, {
id: pkg_id,
status: "installing",
statusText: `Performing after_install hook...`,
})
console.log(`[${pkg_id}] install() | Performing after_install hook...`)
// execute after_install
await pkg.after_install(pkg)
}
pkg.status = "installed"
pkg.installed_at = new Date()
// update to db
await updateInstalledPackage(pkg)
if (pkg.patches) {
// process default patches
const defaultPatches = pkg.patches.filter((patch) => patch.default)
await applyChanges(pkg.id, {
patches: Object.fromEntries(defaultPatches.map((patch) => [patch.id, true])),
})
}
sendToRender(`pkg:update:status`, {
id: pkg_id,
status: "installed",
})
sendToRender(`new:message`, {
message: `Successfully installed ${pkg.name}!`,
})
if (Array.isArray(pkg.install_ask_configs)) {
sendToRender("pkg:install:ask", pkg)
}
console.log(`[${pkg_id}] install() | Successfully installed ${pkg.name}!`)
} catch (error) {
sendToRender(`pkg:update:status`, {
id: pkg_id,
status: "error",
statusText: error.toString(),
})
console.error(error)
fs.rmdirSync(pkg.install_path, { recursive: true })
}
}

View File

@ -1,53 +0,0 @@
import {
getInstalledPackages,
deleteInstalledPackage,
} from "../local_db"
import sendToRender from "../utils/sendToRender"
import readManifest from "../utils/readManifest"
import initManifest from "../utils/initManifest"
import { rimraf } from "rimraf"
export default async function uninstall(pkg_id) {
let pkg = await getInstalledPackages(pkg_id)
if (!pkg) {
sendToRender("runtime:error", "Package not found")
return false
}
sendToRender("pkg:update:status", {
id: pkg_id,
status: "uninstalling",
statusText: `Uninstalling...`,
})
console.log(`[${pkg_id}] uninstall() | Uninstalling...`)
if (pkg.remote_url) {
pkg = await readManifest(pkg.remote_url, { just_read: true })
if (typeof pkg.uninstall === "function") {
console.log(`Performing uninstall hook...`)
await pkg.uninstall(pkg)
}
}
pkg = await initManifest(pkg)
await deleteInstalledPackage(pkg_id)
await rimraf(pkg.install_path)
sendToRender("pkg:update:status", {
id: pkg_id,
status: "uninstalling",
statusText: null,
})
sendToRender("pkg:remove", {
id: pkg_id
})
}

View File

@ -1,137 +0,0 @@
import fs from "node:fs"
import {
updateInstalledPackage,
getInstalledPackages,
} from "../local_db"
import readManifest from "../utils/readManifest"
import initManifest from "../utils/initManifest"
import sendToRender from "../utils/sendToRender"
import parseStringVars from "../utils/parseStringVars"
import processGenericSteps from "../generic_steps"
export default async function update(pkg_id) {
// find package manifest
let pkg = await getInstalledPackages(pkg_id)
if (!pkg) {
sendToRender("runtime:error", "Package not found")
return false
}
try {
// output to logs
console.log(`[${pkg_id}] update() | Updating to latest version...`)
// update render
sendToRender("pkg:update:status", {
id: pkg_id,
status: "loading",
statusText: `Updating to latest version...`,
})
// fulfill if remote available
if (pkg.remote_url) {
pkg = {
...pkg,
...await readManifest(pkg.remote_url, { just_read: true }),
}
}
// initialize package manifest
pkg = await initManifest(pkg)
// check if package manifest has a update function
if (typeof pkg.update === "function") {
// update render
sendToRender(`pkg:update:status`, {
id: pkg_id,
status: "loading",
statusText: `Performing update hook...`,
})
// output to logs
console.log(`[${pkg_id}] update() | Performing update hook`)
// execute update function
await pkg.update(pkg)
}
// Process generic steps
await processGenericSteps(pkg, pkg.update_steps)
// reapply patches
if (Array.isArray(pkg.applied_patches)) {
for await (const patchKey of pkg.applied_patches) {
const patch = pkg.patches.find((patch) => patch.id === patchKey)
if (!patch || !Array.isArray(patch.additions)) {
continue
}
console.log(`Processing patch [${patch.id}]`, patch)
for await (let addition of patch.additions) {
console.log(`Processing addition [${addition.file}]`, addition)
// resolve patch file
addition.file = await parseStringVars(addition.file, pkg)
if (fs.existsSync(addition.file)) {
continue
}
await processGenericSteps(pkg, addition.steps)
}
}
}
// check if package manifest has an after_update function
if (typeof pkg.after_update === "function") {
// update render
sendToRender(`pkg:update:status`, {
id: pkg_id,
status: "loading",
statusText: `Performing after_update hook...`,
})
// output to logs
console.log(`[${pkg_id}] update() | Performing after_update hook`)
// execute after_update function
await pkg.after_update(pkg)
}
// update package vars
pkg.status = "installed"
pkg.last_update = new Date()
// update package manifest on db
await updateInstalledPackage(pkg)
// update render
sendToRender(`pkg:update:status`, {
...pkg,
status: "installed",
})
sendToRender(`new:notification`, {
message: `(${pkg.name}) successfully updated!`,
})
// output to logs
console.log(`[${pkg_id}] update() | Successfully updated!`)
} catch (error) {
// update render
sendToRender(`pkg:update:status`, {
...pkg,
status: "error",
statusText: error.toString(),
})
// output to logs
console.error(error)
}
}

View File

@ -1,6 +0,0 @@
import pkg from "../../../package.json"
export default {
created_at_version: pkg.version,
packages: [],
}

View File

@ -1,19 +0,0 @@
export default {
id: null,
name: null,
description: null,
icon: null,
version: null,
install_path: null,
remote_url: null,
last_update: null,
status: "pending",
statusText: "Pending...",
patches: [],
applied_patches: [],
configs: {},
storaged_configs: {}
}

View File

@ -1,79 +0,0 @@
import path from "node:path"
import fs from "node:fs"
import humanFormat from "human-format"
import sendToRender from "../utils/sendToRender"
import extractFile from "../utils/extractFile"
import GoogleDriveAPI from "../lib/google_drive"
function convertSize(size) {
return `${humanFormat(size, {
decimals: 2,
})}B`
}
export default async (manifest, step) => {
let _path = path.resolve(manifest.install_path, step.path ?? ".")
console.log(`[${manifest.id}] steps.drive() | Downloading ${step.id} to ${_path}...`)
sendToRender(`pkg:update:status:${manifest.id}`, {
status: "loading",
statusText: `Downloading file id ${step.id}`,
})
if (step.tmp) {
_path = path.resolve(TMP_PATH, String(new Date().getTime()))
}
fs.mkdirSync(path.resolve(_path, ".."), { recursive: true })
sendToRender(`pkg:update:status:${manifest.id}`, {
statusText: `Starting download...`,
})
// Download file from drive
await new Promise((resolve, reject) => {
GoogleDriveAPI.operations.downloadFile(
step.id,
_path,
(err) => {
if (err) {
return reject(err)
}
return resolve()
},
(progress) => {
sendToRender(`pkg:update:status:${manifest.id}`, {
progress: progress,
statusText: `Downloaded ${convertSize(progress.transferred ?? 0)} / ${convertSize(progress.length)} | ${convertSize(progress.speed)}/s`,
})
}
)
})
if (step.extract) {
if (typeof step.extract === "string") {
step.extract = path.resolve(manifest.install_path, step.extract)
} else {
step.extract = path.resolve(manifest.install_path, ".")
}
sendToRender(`pkg:update:status:${manifest.id}`, {
statusText: `Extracting bundle...`,
})
await extractFile(_path, step.extract)
if (step.delete_after_extract) {
sendToRender(`pkg:update:status:${manifest.id}`, {
statusText: `Deleting temporal files...`,
})
await fs.promises.rm(_path, { recursive: true })
}
}
}

View File

@ -1,44 +0,0 @@
import path from "node:path"
import fs from "node:fs"
import upath from "upath"
import { execa } from "../lib/execa"
import sendToRender from "../utils/sendToRender"
import Vars from "../vars"
export default async (manifest, step) => {
const gitCMD = fs.existsSync(Vars.git_path) ? `${Vars.git_path}` : "git"
const final_path = upath.normalizeSafe(path.resolve(manifest.install_path, step.path))
if (!fs.existsSync(final_path)) {
fs.mkdirSync(final_path, { recursive: true })
}
sendToRender(`pkg:update:status`, {
id: manifest.id,
statusText: `Cloning ${step.url}`,
})
console.log(`USING GIT BIN >`, gitCMD)
console.log(`[${manifest.id}] steps.git_clone() | Cloning ${step.url}...`)
const args = [
"clone",
//`--depth ${step.depth ?? 1}`,
//"--filter=blob:none",
//"--filter=tree:0",
"--recurse-submodules",
"--remote-submodules",
step.url,
final_path,
]
await execa(gitCMD, args, {
cwd: final_path,
stdout: "inherit",
stderr: "inherit",
})
return manifest
}

View File

@ -1,29 +0,0 @@
import path from "node:path"
import fs from "node:fs"
import { execa } from "../lib/execa"
import sendToRender from "../utils/sendToRender"
import Vars from "../vars"
export default async (manifest, step) => {
const gitCMD = fs.existsSync(Vars.git_path) ? `${Vars.git_path}` : "git"
const _path = path.resolve(manifest.install_path, step.path)
sendToRender(`pkg:update:status`, {
id: manifest.id,
statusText: `Pulling...`,
})
console.log(`[${manifest.id}] steps.git_pull() | Pulling...`)
fs.mkdirSync(_path, { recursive: true })
await execa(gitCMD, ["pull", "--rebase"], {
cwd: _path,
stdout: "inherit",
stderr: "inherit",
})
return manifest
}

View File

@ -1,77 +0,0 @@
import path from "node:path"
import fs from "node:fs"
import { execa } from "../lib/execa"
import sendToRender from "../utils/sendToRender"
import git_pull from "./git_pull"
import Vars from "../vars"
export default async (manifest, step) => {
const gitCMD = fs.existsSync(Vars.git_path) ? `${Vars.git_path}` : "git"
const _path = path.resolve(manifest.install_path, step.path)
const from = step.from ?? "HEAD"
if (!fs.existsSync(_path)) {
fs.mkdirSync(_path, { recursive: true })
}
sendToRender(`pkg:update:status`, {
id: manifest.id,
statusText: `Fetching from origin...`,
})
console.log(`[${manifest.id}] steps.git_reset() | Fetching from origin`)
// fetch from origin
await execa(gitCMD, ["fetch", "origin"], {
cwd: _path,
stdout: "inherit",
stderr: "inherit",
})
sendToRender(`pkg:update:status`, {
id: manifest.id,
statusText: `Cleaning untracked files...`,
})
console.log(`[${manifest.id}] steps.git_reset() | Cleaning`)
await execa(gitCMD, ["clean", "-df"], {
cwd: _path,
stdout: "inherit",
stderr: "inherit",
})
sendToRender(`pkg:update:status`, {
id: manifest.id,
statusText: `Reset from ${from}`,
})
console.log(`[${manifest.id}] steps.git_reset() | Resetting to ${from}`)
await execa(gitCMD, ["reset", "--hard", from], {
cwd: _path,
stdout: "inherit",
stderr: "inherit",
})
// pull the latest
await git_pull(manifest, step)
sendToRender(`pkg:update:status`, {
id: manifest.id,
statusText: `Checkout to HEAD`,
})
console.log(`[${manifest.id}] steps.git_reset() | Checkout to head`)
await execa(gitCMD, ["checkout", "HEAD"], {
cwd: _path,
stdout: "inherit",
stderr: "inherit",
})
return manifest
}

View File

@ -1,108 +0,0 @@
import path from "node:path"
import fs from "node:fs"
import os from "node:os"
import { pipeline as streamPipeline } from "node:stream/promises"
import humanFormat from "human-format"
import got from "got"
import parseStringVars from "../utils/parseStringVars"
import sendToRender from "../utils/sendToRender"
import extractFile from "../utils/extractFile"
function convertSize(size) {
return `${humanFormat(size, {
decimals: 2,
})}B`
}
export default async (manifest, step) => {
step.path = await parseStringVars(step.path, manifest)
let _path = path.resolve(manifest.install_path, step.path ?? ".")
sendToRender(`pkg:update:status:${manifest.id}`, {
status: "loading",
statusText: `Downloading ${step.url}`,
})
console.log(`[${manifest.id}] steps.http() | Downloading ${step.url} to ${_path}`)
if (step.tmp) {
_path = path.resolve(os.tmpdir(), String(new Date().getTime()), path.basename(step.url))
}
fs.mkdirSync(path.resolve(_path, ".."), { recursive: true })
if (step.simple) {
await streamPipeline(
got.stream(step.url),
fs.createWriteStream(_path)
)
} else {
const remoteStream = got.stream(step.url)
const localStream = fs.createWriteStream(_path)
let progress = {
transferred: 0,
total: 0,
speed: 0,
}
let lastTransferred = 0
sendToRender(`pkg:update:status:${manifest.id}`, {
statusText: `Starting download...`,
})
remoteStream.pipe(localStream)
remoteStream.on("downloadProgress", (_progress) => {
progress = _progress
})
const progressInterval = setInterval(() => {
progress.speed = ((progress.transferred ?? 0) - lastTransferred) / 1
lastTransferred = progress.transferred ?? 0
sendToRender(`pkg:update:status:${manifest.id}`, {
progress: progress,
statusText: `Downloaded ${convertSize(progress.transferred ?? 0)} / ${convertSize(progress.total)} | ${convertSize(progress.speed)}/s`,
})
}, 1000)
await new Promise((resolve, reject) => {
localStream.on("finish", resolve)
localStream.on("error", reject)
})
clearInterval(progressInterval)
}
if (step.extract) {
if (typeof step.extract === "string") {
step.extract = path.resolve(manifest.install_path, step.extract)
} else {
step.extract = path.resolve(manifest.install_path, ".")
}
sendToRender(`pkg:update:status:${manifest.id}`, {
statusText: `Extracting bundle...`,
})
await extractFile(_path, step.extract)
if (step.delete_after_extract) {
console.log(`[${manifest.id}] steps.http() | Deleting temporal file [${_path}]...`)
sendToRender(`pkg:update:status:${manifest.id}`, {
statusText: `Deleting temporal files...`,
})
await fs.promises.rm(_path, { recursive: true })
}
}
}

View File

@ -1,79 +0,0 @@
import ISM_DRIVE_DL from "./drive"
import ISM_HTTP from "./http"
import ISM_GIT_CLONE from "./git_clone"
import ISM_GIT_PULL from "./git_pull"
import ISM_GIT_RESET from "./git_reset"
const InstallationStepsMethods = {
drive_dl: ISM_DRIVE_DL,
http: ISM_HTTP,
git_clone: ISM_GIT_CLONE,
git_pull: ISM_GIT_PULL,
git_reset: ISM_GIT_RESET,
}
const StepsOrders = [
"git_clones",
"git_clones_steps",
"git_pulls",
"git_update",
"git_pulls_steps",
"git_reset",
"drive_downloads",
"http_downloads",
]
export default async function processGenericSteps(pkg, steps) {
console.log(`[${pkg.id}] steps() | Processing steps...`, steps)
let stepsEntries = Object.entries(steps)
stepsEntries = stepsEntries.sort((a, b) => StepsOrders.indexOf(a[0]) - StepsOrders.indexOf(b[0]))
if (stepsEntries.length === 0) {
return pkg
}
for await (const [stepKey, stepValue] of stepsEntries) {
switch (stepKey) {
case "drive_downloads": {
for await (const dl_step of stepValue) {
await InstallationStepsMethods.drive_dl(pkg, dl_step)
}
break;
}
case "http_downloads": {
for await (const dl_step of stepValue) {
await InstallationStepsMethods.http(pkg, dl_step)
}
break;
}
case "git_clones":
case "git_clones_steps": {
for await (const clone_step of stepValue) {
await InstallationStepsMethods.git_clone(pkg, clone_step)
}
break;
}
case "git_pulls":
case "git_update":
case "git_pulls_steps": {
for await (const pull_step of stepValue) {
await InstallationStepsMethods.git_pull(pkg, pull_step)
}
break;
}
case "git_reset": {
for await (const reset_step of stepValue) {
await InstallationStepsMethods.git_reset(pkg, reset_step)
}
break;
}
default: {
throw new Error(`Unknown step: ${stepKey}`)
}
}
}
return pkg
}

View File

@ -1,11 +1,13 @@
import RelicCore from "../../../core/src/index"
import sendToRender from "./utils/sendToRender"
global.SettingsStore = new Store({
name: "settings",
watch: true,
})
import RelicCore from "@ragestudio/relic-core/src"
import CoreAdapter from "./classes/CoreAdapter"
import sendToRender from "./utils/sendToRender"
import path from "node:path"
import { app, shell, BrowserWindow, ipcMain } from "electron"
@ -15,71 +17,57 @@ import Store from "electron-store"
import pkg from "../../package.json"
import PkgManager from "./manager"
import { readManifest } from "./utils/readManifest"
import AuthService from "./auth"
const { autoUpdater } = require("electron-differential-updater")
const ProtocolRegistry = require("protocol-registry")
const protocolRegistryNamespace = "rsbundle"
const protocolRegistryNamespace = "relic"
class ElectronApp {
constructor() {
this.pkgManager = new PkgManager()
this.win = null
this.core = new RelicCore()
this.adapter = new CoreAdapter(this, this.core)
}
core = new RelicCore()
authService = global.authService = new AuthService()
handlers = {
"pkg:list": async () => {
return await this.pkgManager.getInstalledPackages()
return await this.core.package.list()
},
"pkg:get": async (event, manifest_id) => {
return await this.pkgManager.getInstalledPackages(manifest_id)
"pkg:get": async (event, pkg_id) => {
return await this.core.db.getPackages(pkg_id)
},
"pkg:read": async (event, manifest_url) => {
return JSON.stringify(await readManifest(manifest_url))
"pkg:read": async (event, manifest_path, options = {}) => {
const manifest = await this.core.package.read(manifest_path, options)
return JSON.stringify({
...this.core.db.defaultPackageState({ ...manifest }),
...manifest,
name: manifest.pkg_name,
})
},
"pkg:install": async (event, manifest) => {
this.pkgManager.install(manifest)
"pkg:install": async (event, manifest_path) => {
return await this.core.package.install(manifest_path)
},
"pkg:update": async (event, manifest_id, { execOnFinish = false } = {}) => {
await this.pkgManager.update(manifest_id)
"pkg:update": async (event, pkg_id, { execOnFinish = false } = {}) => {
await this.core.package.update(pkg_id)
if (execOnFinish) {
await this.pkgManager.execute(manifest_id)
}
},
"pkg:apply": async (event, manifest_id, changes) => {
return await this.pkgManager.applyChanges(manifest_id, changes)
},
"pkg:retry_install": async (event, manifest_id) => {
const pkg = await this.pkgManager.getInstalledPackages(manifest_id)
if (!pkg) {
return false
await this.core.package.execute(pkg_id)
}
await this.pkgManager.install(pkg)
return true
},
"pkg:cancel_install": async (event, manifest_id) => {
return await this.pkgManager.uninstall(manifest_id)
"pkg:apply": async (event, pkg_id, changes) => {
return await this.core.package.apply(pkg_id, changes)
},
"pkg:delete_auth": async (event, manifest_id) => {
return this.authService.unauthorize(manifest_id)
"pkg:uninstall": async (event, pkg_id) => {
return await this.core.package.uninstall(pkg_id)
},
"pkg:uninstall": async (event, ...args) => {
return await this.pkgManager.uninstall(...args)
"pkg:execute": async (event, pkg_id) => {
return await this.core.package.execute(pkg_id)
},
"pkg:execute": async (event, ...args) => {
return await this.pkgManager.execute(...args)
},
"pkg:open": async (event, manifest_id) => {
return await this.pkgManager.open(manifest_id)
"pkg:open": async (event, pkg_id) => {
return await this.core.openPath(pkg_id)
},
"updater:check": () => {
autoUpdater.checkForUpdates()
@ -103,6 +91,7 @@ class ElectronApp {
},
"app:init": async (event, data) => {
try {
await this.core.initialize()
await this.core.setup()
} catch (err) {
console.error(err)
@ -122,7 +111,7 @@ class ElectronApp {
events = {
"open-runtime-path": () => {
return this.pkgManager.openRuntimePath()
return this.core.openPath()
},
"open-dev-logs": () => {
return sendToRender("new:message", {
@ -165,8 +154,6 @@ class ElectronApp {
handleURLProtocol(url) {
const urlStarter = `${protocolRegistryNamespace}://`
console.log(url)
if (url.startsWith(urlStarter)) {
const urlValue = url.split(urlStarter)[1]
@ -177,16 +164,14 @@ class ElectronApp {
explicitAction[0] = explicitAction[0].slice(0, -1)
}
console.log(explicitAction)
if (explicitAction.length > 0) {
switch (explicitAction[0]) {
case "authorize": {
if (!explicitAction[2]) {
const [pkgid, token] = explicitAction[1].split("%23")
return this.authService.authorize(pkgid, token)
const [pkg_id, token] = explicitAction[1].split("%23")
return this.core.auth.authorize(pkg_id, token)
} else {
return this.authService.authorize(explicitAction[1], explicitAction[2])
return this.core.auth.authorize(explicitAction[1], explicitAction[2])
}
}
default: {

View File

@ -1,59 +0,0 @@
import open from "open"
import axios from "axios"
import sendToRender from "../../utils/sendToRender"
export default class Auth {
constructor(manifest) {
this.manifest = manifest
console.log(this.manifest)
}
async get() {
const authData = global.authService.getAuth(this.manifest.id)
console.log(authData)
if (authData && this.manifest.auth && this.manifest.auth.getter) {
const result = await axios({
method: "POST",
url: this.manifest.auth.getter,
headers: {
"Content-Type": "application/json",
},
data: {
auth_data: authData,
}
}).catch((err) => {
sendToRender(`new:notification`, {
type: "error",
message: "Failed to authorize",
description: err.response.data.message ?? err.response.data.error ?? err.message,
duration: 10
})
return err
})
if (result instanceof Error) {
throw result
}
console.log(result.data)
return result.data
}
return authData
}
request() {
if (!this.manifest.auth) {
return false
}
const authURL = this.manifest.auth.fetcher
open(authURL)
}
}

View File

@ -1,955 +0,0 @@
import {type Buffer} from 'node:buffer';
import {type ChildProcess} from 'node:child_process';
import {type Stream, type Readable as ReadableStream, type Writable as WritableStream} from 'node:stream';
export type StdioOption =
| 'pipe'
| 'overlapped'
| 'ipc'
| 'ignore'
| 'inherit'
| Stream
| number
| undefined;
type EncodingOption =
| 'utf8'
// eslint-disable-next-line unicorn/text-encoding-identifier-case
| 'utf-8'
| 'utf16le'
| 'utf-16le'
| 'ucs2'
| 'ucs-2'
| 'latin1'
| 'binary'
| 'ascii'
| 'hex'
| 'base64'
| 'base64url'
| 'buffer'
| null
| undefined;
type DefaultEncodingOption = 'utf8';
type BufferEncodingOption = 'buffer' | null;
export type CommonOptions<EncodingType extends EncodingOption = DefaultEncodingOption> = {
/**
Kill the spawned process when the parent process exits unless either:
- the spawned process is [`detached`](https://nodejs.org/api/child_process.html#child_process_options_detached)
- the parent process is terminated abruptly, for example, with `SIGKILL` as opposed to `SIGTERM` or a normal exit
@default true
*/
readonly cleanup?: boolean;
/**
Prefer locally installed binaries when looking for a binary to execute.
If you `$ npm install foo`, you can then `execa('foo')`.
@default `true` with `$`, `false` otherwise
*/
readonly preferLocal?: boolean;
/**
Preferred path to find locally installed binaries in (use with `preferLocal`).
@default process.cwd()
*/
readonly localDir?: string | URL;
/**
Path to the Node.js executable to use in child processes.
This can be either an absolute path or a path relative to the `cwd` option.
Requires `preferLocal` to be `true`.
For example, this can be used together with [`get-node`](https://github.com/ehmicky/get-node) to run a specific Node.js version in a child process.
@default process.execPath
*/
readonly execPath?: string;
/**
Buffer the output from the spawned process. When set to `false`, you must read the output of `stdout` and `stderr` (or `all` if the `all` option is `true`). Otherwise the returned promise will not be resolved/rejected.
If the spawned process fails, `error.stdout`, `error.stderr`, and `error.all` will contain the buffered data.
@default true
*/
readonly buffer?: boolean;
/**
Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio).
@default `inherit` with `$`, `pipe` otherwise
*/
readonly stdin?: StdioOption;
/**
Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio).
@default 'pipe'
*/
readonly stdout?: StdioOption;
/**
Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio).
@default 'pipe'
*/
readonly stderr?: StdioOption;
/**
Setting this to `false` resolves the promise with the error instead of rejecting it.
@default true
*/
readonly reject?: boolean;
/**
Add an `.all` property on the promise and the resolved value. The property contains the output of the process with `stdout` and `stderr` interleaved.
@default false
*/
readonly all?: boolean;
/**
Strip the final [newline character](https://en.wikipedia.org/wiki/Newline) from the output.
@default true
*/
readonly stripFinalNewline?: boolean;
/**
Set to `false` if you don't want to extend the environment variables when providing the `env` property.
@default true
*/
readonly extendEnv?: boolean;
/**
Current working directory of the child process.
@default process.cwd()
*/
readonly cwd?: string | URL;
/**
Environment key-value pairs. Extends automatically from `process.env`. Set `extendEnv` to `false` if you don't want this.
@default process.env
*/
readonly env?: NodeJS.ProcessEnv;
/**
Explicitly set the value of `argv[0]` sent to the child process. This will be set to `command` or `file` if not specified.
*/
readonly argv0?: string;
/**
Child's [stdio](https://nodejs.org/api/child_process.html#child_process_options_stdio) configuration.
@default 'pipe'
*/
readonly stdio?: 'pipe' | 'overlapped' | 'ignore' | 'inherit' | readonly StdioOption[];
/**
Specify the kind of serialization used for sending messages between processes when using the `stdio: 'ipc'` option or `execaNode()`:
- `json`: Uses `JSON.stringify()` and `JSON.parse()`.
- `advanced`: Uses [`v8.serialize()`](https://nodejs.org/api/v8.html#v8_v8_serialize_value)
[More info.](https://nodejs.org/api/child_process.html#child_process_advanced_serialization)
@default 'json'
*/
readonly serialization?: 'json' | 'advanced';
/**
Prepare child to run independently of its parent process. Specific behavior [depends on the platform](https://nodejs.org/api/child_process.html#child_process_options_detached).
@default false
*/
readonly detached?: boolean;
/**
Sets the user identity of the process.
*/
readonly uid?: number;
/**
Sets the group identity of the process.
*/
readonly gid?: number;
/**
If `true`, runs `command` inside of a shell. Uses `/bin/sh` on UNIX and `cmd.exe` on Windows. A different shell can be specified as a string. The shell should understand the `-c` switch on UNIX or `/d /s /c` on Windows.
We recommend against using this option since it is:
- not cross-platform, encouraging shell-specific syntax.
- slower, because of the additional shell interpretation.
- unsafe, potentially allowing command injection.
@default false
*/
readonly shell?: boolean | string;
/**
Specify the character encoding used to decode the `stdout` and `stderr` output. If set to `'buffer'` or `null`, then `stdout` and `stderr` will be a `Buffer` instead of a string.
@default 'utf8'
*/
readonly encoding?: EncodingType;
/**
If `timeout` is greater than `0`, the parent will send the signal identified by the `killSignal` property (the default is `SIGTERM`) if the child runs longer than `timeout` milliseconds.
@default 0
*/
readonly timeout?: number;
/**
Largest amount of data in bytes allowed on `stdout` or `stderr`. Default: 100 MB.
@default 100_000_000
*/
readonly maxBuffer?: number;
/**
Signal value to be used when the spawned process will be killed.
@default 'SIGTERM'
*/
readonly killSignal?: string | number;
/**
You can abort the spawned process using [`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController).
When `AbortController.abort()` is called, [`.isCanceled`](https://github.com/sindresorhus/execa#iscanceled) becomes `true`.
@example
```
import {execa} from 'execa';
const abortController = new AbortController();
const subprocess = execa('node', [], {signal: abortController.signal});
setTimeout(() => {
abortController.abort();
}, 1000);
try {
await subprocess;
} catch (error) {
console.log(subprocess.killed); // true
console.log(error.isCanceled); // true
}
```
*/
readonly signal?: AbortSignal;
/**
If `true`, no quoting or escaping of arguments is done on Windows. Ignored on other platforms. This is set to `true` automatically when the `shell` option is `true`.
@default false
*/
readonly windowsVerbatimArguments?: boolean;
/**
On Windows, do not create a new console window. Please note this also prevents `CTRL-C` [from working](https://github.com/nodejs/node/issues/29837) on Windows.
@default true
*/
readonly windowsHide?: boolean;
/**
Print each command on `stderr` before executing it.
This can also be enabled by setting the `NODE_DEBUG=execa` environment variable in the current process.
@default false
*/
readonly verbose?: boolean;
};
export type Options<EncodingType extends EncodingOption = DefaultEncodingOption> = {
/**
Write some input to the `stdin` of your binary.
If the input is a file, use the `inputFile` option instead.
*/
readonly input?: string | Buffer | ReadableStream;
/**
Use a file as input to the the `stdin` of your binary.
If the input is not a file, use the `input` option instead.
*/
readonly inputFile?: string;
} & CommonOptions<EncodingType>;
export type SyncOptions<EncodingType extends EncodingOption = DefaultEncodingOption> = {
/**
Write some input to the `stdin` of your binary.
If the input is a file, use the `inputFile` option instead.
*/
readonly input?: string | Buffer;
/**
Use a file as input to the the `stdin` of your binary.
If the input is not a file, use the `input` option instead.
*/
readonly inputFile?: string;
} & CommonOptions<EncodingType>;
export type NodeOptions<EncodingType extends EncodingOption = DefaultEncodingOption> = {
/**
The Node.js executable to use.
@default process.execPath
*/
readonly nodePath?: string;
/**
List of [CLI options](https://nodejs.org/api/cli.html#cli_options) passed to the Node.js executable.
@default process.execArgv
*/
readonly nodeOptions?: string[];
} & Options<EncodingType>;
type StdoutStderrAll = string | Buffer | undefined;
export type ExecaReturnBase<StdoutStderrType extends StdoutStderrAll> = {
/**
The file and arguments that were run, for logging purposes.
This is not escaped and should not be executed directly as a process, including using `execa()` or `execaCommand()`.
*/
command: string;
/**
Same as `command` but escaped.
This is meant to be copy and pasted into a shell, for debugging purposes.
Since the escaping is fairly basic, this should not be executed directly as a process, including using `execa()` or `execaCommand()`.
*/
escapedCommand: string;
/**
The numeric exit code of the process that was run.
*/
exitCode: number;
/**
The output of the process on stdout.
*/
stdout: StdoutStderrType;
/**
The output of the process on stderr.
*/
stderr: StdoutStderrType;
/**
Whether the process failed to run.
*/
failed: boolean;
/**
Whether the process timed out.
*/
timedOut: boolean;
/**
Whether the process was killed.
*/
killed: boolean;
/**
The name of the signal that was used to terminate the process. For example, `SIGFPE`.
If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`.
*/
signal?: string;
/**
A human-friendly description of the signal that was used to terminate the process. For example, `Floating point arithmetic error`.
If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`. It is also `undefined` when the signal is very uncommon which should seldomly happen.
*/
signalDescription?: string;
/**
The `cwd` of the command if provided in the command options. Otherwise it is `process.cwd()`.
*/
cwd: string;
};
export type ExecaSyncReturnValue<StdoutStderrType extends StdoutStderrAll = string> = {
} & ExecaReturnBase<StdoutStderrType>;
/**
Result of a child process execution. On success this is a plain object. On failure this is also an `Error` instance.
The child process fails when:
- its exit code is not `0`
- it was killed with a signal
- timing out
- being canceled
- there's not enough memory or there are already too many child processes
*/
export type ExecaReturnValue<StdoutStderrType extends StdoutStderrAll = string> = {
/**
The output of the process with `stdout` and `stderr` interleaved.
This is `undefined` if either:
- the `all` option is `false` (default value)
- `execaSync()` was used
*/
all?: StdoutStderrType;
/**
Whether the process was canceled.
You can cancel the spawned process using the [`signal`](https://github.com/sindresorhus/execa#signal-1) option.
*/
isCanceled: boolean;
} & ExecaSyncReturnValue<StdoutStderrType>;
export type ExecaSyncError<StdoutStderrType extends StdoutStderrAll = string> = {
/**
Error message when the child process failed to run. In addition to the underlying error message, it also contains some information related to why the child process errored.
The child process stderr then stdout are appended to the end, separated with newlines and not interleaved.
*/
message: string;
/**
This is the same as the `message` property except it does not include the child process stdout/stderr.
*/
shortMessage: string;
/**
Original error message. This is the same as the `message` property except it includes neither the child process stdout/stderr nor some additional information added by Execa.
This is `undefined` unless the child process exited due to an `error` event or a timeout.
*/
originalMessage?: string;
} & Error & ExecaReturnBase<StdoutStderrType>;
export type ExecaError<StdoutStderrType extends StdoutStderrAll = string> = {
/**
The output of the process with `stdout` and `stderr` interleaved.
This is `undefined` if either:
- the `all` option is `false` (default value)
- `execaSync()` was used
*/
all?: StdoutStderrType;
/**
Whether the process was canceled.
*/
isCanceled: boolean;
} & ExecaSyncError<StdoutStderrType>;
export type KillOptions = {
/**
Milliseconds to wait for the child process to terminate before sending `SIGKILL`.
Can be disabled with `false`.
@default 5000
*/
forceKillAfterTimeout?: number | false;
};
export type ExecaChildPromise<StdoutStderrType extends StdoutStderrAll> = {
/**
Stream combining/interleaving [`stdout`](https://nodejs.org/api/child_process.html#child_process_subprocess_stdout) and [`stderr`](https://nodejs.org/api/child_process.html#child_process_subprocess_stderr).
This is `undefined` if either:
- the `all` option is `false` (the default value)
- both `stdout` and `stderr` options are set to [`'inherit'`, `'ipc'`, `Stream` or `integer`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio)
*/
all?: ReadableStream;
catch<ResultType = never>(
onRejected?: (reason: ExecaError<StdoutStderrType>) => ResultType | PromiseLike<ResultType>
): Promise<ExecaReturnValue<StdoutStderrType> | ResultType>;
/**
Same as the original [`child_process#kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal), except if `signal` is `SIGTERM` (the default value) and the child process is not terminated after 5 seconds, force it by sending `SIGKILL`. Note that this graceful termination does not work on Windows, because Windows [doesn't support signals](https://nodejs.org/api/process.html#process_signal_events) (`SIGKILL` and `SIGTERM` has the same effect of force-killing the process immediately.) If you want to achieve graceful termination on Windows, you have to use other means, such as [`taskkill`](https://github.com/sindresorhus/taskkill).
*/
kill(signal?: string, options?: KillOptions): void;
/**
Similar to [`childProcess.kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal). This used to be preferred when cancelling the child process execution as the error is more descriptive and [`childProcessResult.isCanceled`](#iscanceled) is set to `true`. But now this is deprecated and you should either use `.kill()` or the `signal` option when creating the child process.
*/
cancel(): void;
/**
[Pipe](https://nodejs.org/api/stream.html#readablepipedestination-options) the child process's `stdout` to `target`, which can be:
- Another `execa()` return value
- A writable stream
- A file path string
If the `target` is another `execa()` return value, it is returned. Otherwise, the original `execa()` return value is returned. This allows chaining `pipeStdout()` then `await`ing the final result.
The `stdout` option] must be kept as `pipe`, its default value.
*/
pipeStdout?<Target extends ExecaChildPromise<StdoutStderrAll>>(target: Target): Target;
pipeStdout?(target: WritableStream | string): ExecaChildProcess<StdoutStderrType>;
/**
Like `pipeStdout()` but piping the child process's `stderr` instead.
The `stderr` option must be kept as `pipe`, its default value.
*/
pipeStderr?<Target extends ExecaChildPromise<StdoutStderrAll>>(target: Target): Target;
pipeStderr?(target: WritableStream | string): ExecaChildProcess<StdoutStderrType>;
/**
Combines both `pipeStdout()` and `pipeStderr()`.
Either the `stdout` option or the `stderr` option must be kept as `pipe`, their default value. Also, the `all` option must be set to `true`.
*/
pipeAll?<Target extends ExecaChildPromise<StdoutStderrAll>>(target: Target): Target;
pipeAll?(target: WritableStream | string): ExecaChildProcess<StdoutStderrType>;
};
export type ExecaChildProcess<StdoutStderrType extends StdoutStderrAll = string> = ChildProcess &
ExecaChildPromise<StdoutStderrType> &
Promise<ExecaReturnValue<StdoutStderrType>>;
/**
Executes a command using `file ...arguments`. `arguments` are specified as an array of strings. Returns a `childProcess`.
Arguments are automatically escaped. They can contain any character, including spaces.
This is the preferred method when executing single commands.
@param file - The program/script to execute.
@param arguments - Arguments to pass to `file` on execution.
@returns An `ExecaChildProcess` that is both:
- a `Promise` resolving or rejecting with a `childProcessResult`.
- a [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess) with some additional methods and properties.
@throws A `childProcessResult` error
@example <caption>Promise interface</caption>
```
import {execa} from 'execa';
const {stdout} = await execa('echo', ['unicorns']);
console.log(stdout);
//=> 'unicorns'
```
@example <caption>Redirect output to a file</caption>
```
import {execa} from 'execa';
// Similar to `echo unicorns > stdout.txt` in Bash
await execa('echo', ['unicorns']).pipeStdout('stdout.txt');
// Similar to `echo unicorns 2> stdout.txt` in Bash
await execa('echo', ['unicorns']).pipeStderr('stderr.txt');
// Similar to `echo unicorns &> stdout.txt` in Bash
await execa('echo', ['unicorns'], {all: true}).pipeAll('all.txt');
```
@example <caption>Redirect input from a file</caption>
```
import {execa} from 'execa';
// Similar to `cat < stdin.txt` in Bash
const {stdout} = await execa('cat', {inputFile: 'stdin.txt'});
console.log(stdout);
//=> 'unicorns'
```
@example <caption>Save and pipe output from a child process</caption>
```
import {execa} from 'execa';
const {stdout} = await execa('echo', ['unicorns']).pipeStdout(process.stdout);
// Prints `unicorns`
console.log(stdout);
// Also returns 'unicorns'
```
@example <caption>Pipe multiple processes</caption>
```
import {execa} from 'execa';
// Similar to `echo unicorns | cat` in Bash
const {stdout} = await execa('echo', ['unicorns']).pipeStdout(execa('cat'));
console.log(stdout);
//=> 'unicorns'
```
@example <caption>Handling errors</caption>
```
import {execa} from 'execa';
// Catching an error
try {
await execa('unknown', ['command']);
} catch (error) {
console.log(error);
/*
{
message: 'Command failed with ENOENT: unknown command spawn unknown ENOENT',
errno: -2,
code: 'ENOENT',
syscall: 'spawn unknown',
path: 'unknown',
spawnargs: ['command'],
originalMessage: 'spawn unknown ENOENT',
shortMessage: 'Command failed with ENOENT: unknown command spawn unknown ENOENT',
command: 'unknown command',
escapedCommand: 'unknown command',
stdout: '',
stderr: '',
failed: true,
timedOut: false,
isCanceled: false,
killed: false,
cwd: '/path/to/cwd'
}
\*\/
}
```
@example <caption>Graceful termination</caption>
```
const subprocess = execa('node');
setTimeout(() => {
subprocess.kill('SIGTERM', {
forceKillAfterTimeout: 2000
});
}, 1000);
```
*/
export function execa(
file: string,
arguments?: readonly string[],
options?: Options
): ExecaChildProcess;
export function execa(
file: string,
arguments?: readonly string[],
options?: Options<BufferEncodingOption>
): ExecaChildProcess<Buffer>;
export function execa(file: string, options?: Options): ExecaChildProcess;
export function execa(file: string, options?: Options<BufferEncodingOption>): ExecaChildProcess<Buffer>;
/**
Same as `execa()` but synchronous.
@param file - The program/script to execute.
@param arguments - Arguments to pass to `file` on execution.
@returns A `childProcessResult` object
@throws A `childProcessResult` error
@example <caption>Promise interface</caption>
```
import {execa} from 'execa';
const {stdout} = execaSync('echo', ['unicorns']);
console.log(stdout);
//=> 'unicorns'
```
@example <caption>Redirect input from a file</caption>
```
import {execa} from 'execa';
// Similar to `cat < stdin.txt` in Bash
const {stdout} = execaSync('cat', {inputFile: 'stdin.txt'});
console.log(stdout);
//=> 'unicorns'
```
@example <caption>Handling errors</caption>
```
import {execa} from 'execa';
// Catching an error
try {
execaSync('unknown', ['command']);
} catch (error) {
console.log(error);
/*
{
message: 'Command failed with ENOENT: unknown command spawnSync unknown ENOENT',
errno: -2,
code: 'ENOENT',
syscall: 'spawnSync unknown',
path: 'unknown',
spawnargs: ['command'],
originalMessage: 'spawnSync unknown ENOENT',
shortMessage: 'Command failed with ENOENT: unknown command spawnSync unknown ENOENT',
command: 'unknown command',
escapedCommand: 'unknown command',
stdout: '',
stderr: '',
failed: true,
timedOut: false,
isCanceled: false,
killed: false,
cwd: '/path/to/cwd'
}
\*\/
}
```
*/
export function execaSync(
file: string,
arguments?: readonly string[],
options?: SyncOptions
): ExecaSyncReturnValue;
export function execaSync(
file: string,
arguments?: readonly string[],
options?: SyncOptions<BufferEncodingOption>
): ExecaSyncReturnValue<Buffer>;
export function execaSync(file: string, options?: SyncOptions): ExecaSyncReturnValue;
export function execaSync(
file: string,
options?: SyncOptions<BufferEncodingOption>
): ExecaSyncReturnValue<Buffer>;
/**
Executes a command. The `command` string includes both the `file` and its `arguments`. Returns a `childProcess`.
Arguments are automatically escaped. They can contain any character, but spaces must be escaped with a backslash like `execaCommand('echo has\\ space')`.
This is the preferred method when executing a user-supplied `command` string, such as in a REPL.
@param command - The program/script to execute and its arguments.
@returns An `ExecaChildProcess` that is both:
- a `Promise` resolving or rejecting with a `childProcessResult`.
- a [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess) with some additional methods and properties.
@throws A `childProcessResult` error
@example
```
import {execaCommand} from 'execa';
const {stdout} = await execaCommand('echo unicorns');
console.log(stdout);
//=> 'unicorns'
```
*/
export function execaCommand(command: string, options?: Options): ExecaChildProcess;
export function execaCommand(command: string, options?: Options<BufferEncodingOption>): ExecaChildProcess<Buffer>;
/**
Same as `execaCommand()` but synchronous.
@param command - The program/script to execute and its arguments.
@returns A `childProcessResult` object
@throws A `childProcessResult` error
@example
```
import {execaCommandSync} from 'execa';
const {stdout} = execaCommandSync('echo unicorns');
console.log(stdout);
//=> 'unicorns'
```
*/
export function execaCommandSync(command: string, options?: SyncOptions): ExecaSyncReturnValue;
export function execaCommandSync(command: string, options?: SyncOptions<BufferEncodingOption>): ExecaSyncReturnValue<Buffer>;
type TemplateExpression =
| string
| number
| ExecaReturnValue<string | Buffer>
| ExecaSyncReturnValue<string | Buffer>
| Array<string | number | ExecaReturnValue<string | Buffer> | ExecaSyncReturnValue<string | Buffer>>;
type Execa$<StdoutStderrType extends StdoutStderrAll = string> = {
/**
Returns a new instance of `$` but with different default `options`. Consecutive calls are merged to previous ones.
This can be used to either:
- Set options for a specific command: `` $(options)`command` ``
- Share options for multiple commands: `` const $$ = $(options); $$`command`; $$`otherCommand` ``
@param options - Options to set
@returns A new instance of `$` with those `options` set
@example
```
import {$} from 'execa';
const $$ = $({stdio: 'inherit'});
await $$`echo unicorns`;
//=> 'unicorns'
await $$`echo rainbows`;
//=> 'rainbows'
```
*/
(options: Options<undefined>): Execa$<StdoutStderrType>;
(options: Options): Execa$;
(options: Options<BufferEncodingOption>): Execa$<Buffer>;
(
templates: TemplateStringsArray,
...expressions: TemplateExpression[]
): ExecaChildProcess<StdoutStderrType>;
/**
Same as $\`command\` but synchronous.
@returns A `childProcessResult` object
@throws A `childProcessResult` error
@example <caption>Basic</caption>
```
import {$} from 'execa';
const branch = $.sync`git branch --show-current`;
$.sync`dep deploy --branch=${branch}`;
```
@example <caption>Multiple arguments</caption>
```
import {$} from 'execa';
const args = ['unicorns', '&', 'rainbows!'];
const {stdout} = $.sync`echo ${args}`;
console.log(stdout);
//=> 'unicorns & rainbows!'
```
@example <caption>With options</caption>
```
import {$} from 'execa';
$.sync({stdio: 'inherit'})`echo unicorns`;
//=> 'unicorns'
```
@example <caption>Shared options</caption>
```
import {$} from 'execa';
const $$ = $({stdio: 'inherit'});
$$.sync`echo unicorns`;
//=> 'unicorns'
$$.sync`echo rainbows`;
//=> 'rainbows'
```
*/
sync(
templates: TemplateStringsArray,
...expressions: TemplateExpression[]
): ExecaSyncReturnValue<StdoutStderrType>;
};
/**
Executes a command. The `command` string includes both the `file` and its `arguments`. Returns a `childProcess`.
Arguments are automatically escaped. They can contain any character, but spaces must use `${}` like `` $`echo ${'has space'}` ``.
This is the preferred method when executing multiple commands in a script file.
The `command` string can inject any `${value}` with the following types: string, number, `childProcess` or an array of those types. For example: `` $`echo one ${'two'} ${3} ${['four', 'five']}` ``. For `${childProcess}`, the process's `stdout` is used.
@returns An `ExecaChildProcess` that is both:
- a `Promise` resolving or rejecting with a `childProcessResult`.
- a [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess) with some additional methods and properties.
@throws A `childProcessResult` error
@example <caption>Basic</caption>
```
import {$} from 'execa';
const branch = await $`git branch --show-current`;
await $`dep deploy --branch=${branch}`;
```
@example <caption>Multiple arguments</caption>
```
import {$} from 'execa';
const args = ['unicorns', '&', 'rainbows!'];
const {stdout} = await $`echo ${args}`;
console.log(stdout);
//=> 'unicorns & rainbows!'
```
@example <caption>With options</caption>
```
import {$} from 'execa';
await $({stdio: 'inherit'})`echo unicorns`;
//=> 'unicorns'
```
@example <caption>Shared options</caption>
```
import {$} from 'execa';
const $$ = $({stdio: 'inherit'});
await $$`echo unicorns`;
//=> 'unicorns'
await $$`echo rainbows`;
//=> 'rainbows'
```
*/
export const $: Execa$;
/**
Execute a Node.js script as a child process.
Arguments are automatically escaped. They can contain any character, including spaces.
This is the preferred method when executing Node.js files.
Like [`child_process#fork()`](https://nodejs.org/api/child_process.html#child_process_child_process_fork_modulepath_args_options):
- the current Node version and options are used. This can be overridden using the `nodePath` and `nodeOptions` options.
- the `shell` option cannot be used
- an extra channel [`ipc`](https://nodejs.org/api/child_process.html#child_process_options_stdio) is passed to `stdio`
@param scriptPath - Node.js script to execute.
@param arguments - Arguments to pass to `scriptPath` on execution.
@returns An `ExecaChildProcess` that is both:
- a `Promise` resolving or rejecting with a `childProcessResult`.
- a [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess) with some additional methods and properties.
@throws A `childProcessResult` error
@example
```
import {execa} from 'execa';
await execaNode('scriptPath', ['argument']);
```
*/
export function execaNode(
scriptPath: string,
arguments?: readonly string[],
options?: NodeOptions
): ExecaChildProcess;
export function execaNode(
scriptPath: string,
arguments?: readonly string[],
options?: NodeOptions<BufferEncodingOption>
): ExecaChildProcess<Buffer>;
export function execaNode(scriptPath: string, options?: NodeOptions): ExecaChildProcess;
export function execaNode(scriptPath: string, options?: NodeOptions<BufferEncodingOption>): ExecaChildProcess<Buffer>;

View File

@ -1,309 +0,0 @@
import {Buffer} from 'node:buffer';
import path from 'node:path';
import childProcess from 'node:child_process';
import process from 'node:process';
import crossSpawn from 'cross-spawn';
import stripFinalNewline from '../strip-final-newline';
import {npmRunPathEnv} from '../npm-run-path';
import onetime from '../onetime';
import {makeError} from './lib/error.js';
import {normalizeStdio, normalizeStdioNode} from './lib/stdio.js';
import {spawnedKill, spawnedCancel, setupTimeout, validateTimeout, setExitHandler} from './lib/kill.js';
import {addPipeMethods} from './lib/pipe.js';
import {handleInput, getSpawnedResult, makeAllStream, handleInputSync} from './lib/stream.js';
import {mergePromise, getSpawnedPromise} from './lib/promise.js';
import {joinCommand, parseCommand, parseTemplates, getEscapedCommand} from './lib/command.js';
import {logCommand, verboseDefault} from './lib/verbose.js';
const DEFAULT_MAX_BUFFER = 1000 * 1000 * 100;
const getEnv = ({env: envOption, extendEnv, preferLocal, localDir, execPath}) => {
const env = extendEnv ? {...process.env, ...envOption} : envOption;
if (preferLocal) {
return npmRunPathEnv({env, cwd: localDir, execPath});
}
return env;
};
const handleArguments = (file, args, options = {}) => {
const parsed = crossSpawn._parse(file, args, options);
file = parsed.command;
args = parsed.args;
options = parsed.options;
options = {
maxBuffer: DEFAULT_MAX_BUFFER,
buffer: true,
stripFinalNewline: true,
extendEnv: true,
preferLocal: false,
localDir: options.cwd || process.cwd(),
execPath: process.execPath,
encoding: 'utf8',
reject: true,
cleanup: true,
all: false,
windowsHide: true,
verbose: verboseDefault,
...options,
};
options.env = getEnv(options);
options.stdio = normalizeStdio(options);
if (process.platform === 'win32' && path.basename(file, '.exe') === 'cmd') {
// #116
args.unshift('/q');
}
return {file, args, options, parsed};
};
const handleOutput = (options, value, error) => {
if (typeof value !== 'string' && !Buffer.isBuffer(value)) {
// When `execaSync()` errors, we normalize it to '' to mimic `execa()`
return error === undefined ? undefined : '';
}
if (options.stripFinalNewline) {
return stripFinalNewline(value);
}
return value;
};
export function execa(file, args, options) {
const parsed = handleArguments(file, args, options);
const command = joinCommand(file, args);
const escapedCommand = getEscapedCommand(file, args);
logCommand(escapedCommand, parsed.options);
validateTimeout(parsed.options);
let spawned;
try {
spawned = childProcess.spawn(parsed.file, parsed.args, parsed.options);
} catch (error) {
// Ensure the returned error is always both a promise and a child process
const dummySpawned = new childProcess.ChildProcess();
const errorPromise = Promise.reject(makeError({
error,
stdout: '',
stderr: '',
all: '',
command,
escapedCommand,
parsed,
timedOut: false,
isCanceled: false,
killed: false,
}));
mergePromise(dummySpawned, errorPromise);
return dummySpawned;
}
const spawnedPromise = getSpawnedPromise(spawned);
const timedPromise = setupTimeout(spawned, parsed.options, spawnedPromise);
const processDone = setExitHandler(spawned, parsed.options, timedPromise);
const context = {isCanceled: false};
spawned.kill = spawnedKill.bind(null, spawned.kill.bind(spawned));
spawned.cancel = spawnedCancel.bind(null, spawned, context);
const handlePromise = async () => {
const [{error, exitCode, signal, timedOut}, stdoutResult, stderrResult, allResult] = await getSpawnedResult(spawned, parsed.options, processDone);
const stdout = handleOutput(parsed.options, stdoutResult);
const stderr = handleOutput(parsed.options, stderrResult);
const all = handleOutput(parsed.options, allResult);
if (error || exitCode !== 0 || signal !== null) {
const returnedError = makeError({
error,
exitCode,
signal,
stdout,
stderr,
all,
command,
escapedCommand,
parsed,
timedOut,
isCanceled: context.isCanceled || (parsed.options.signal ? parsed.options.signal.aborted : false),
killed: spawned.killed,
});
if (!parsed.options.reject) {
return returnedError;
}
throw returnedError;
}
return {
command,
escapedCommand,
exitCode: 0,
stdout,
stderr,
all,
failed: false,
timedOut: false,
isCanceled: false,
killed: false,
};
};
const handlePromiseOnce = onetime(handlePromise);
handleInput(spawned, parsed.options);
spawned.all = makeAllStream(spawned, parsed.options);
addPipeMethods(spawned);
mergePromise(spawned, handlePromiseOnce);
return spawned;
}
export function execaSync(file, args, options) {
const parsed = handleArguments(file, args, options);
const command = joinCommand(file, args);
const escapedCommand = getEscapedCommand(file, args);
logCommand(escapedCommand, parsed.options);
const input = handleInputSync(parsed.options);
let result;
try {
result = childProcess.spawnSync(parsed.file, parsed.args, {...parsed.options, input});
} catch (error) {
throw makeError({
error,
stdout: '',
stderr: '',
all: '',
command,
escapedCommand,
parsed,
timedOut: false,
isCanceled: false,
killed: false,
});
}
const stdout = handleOutput(parsed.options, result.stdout, result.error);
const stderr = handleOutput(parsed.options, result.stderr, result.error);
if (result.error || result.status !== 0 || result.signal !== null) {
const error = makeError({
stdout,
stderr,
error: result.error,
signal: result.signal,
exitCode: result.status,
command,
escapedCommand,
parsed,
timedOut: result.error && result.error.code === 'ETIMEDOUT',
isCanceled: false,
killed: result.signal !== null,
});
if (!parsed.options.reject) {
return error;
}
throw error;
}
return {
command,
escapedCommand,
exitCode: 0,
stdout,
stderr,
failed: false,
timedOut: false,
isCanceled: false,
killed: false,
};
}
const normalizeScriptStdin = ({input, inputFile, stdio}) => input === undefined && inputFile === undefined && stdio === undefined
? {stdin: 'inherit'}
: {};
const normalizeScriptOptions = (options = {}) => ({
preferLocal: true,
...normalizeScriptStdin(options),
...options,
});
function create$(options) {
function $(templatesOrOptions, ...expressions) {
if (!Array.isArray(templatesOrOptions)) {
return create$({...options, ...templatesOrOptions});
}
const [file, ...args] = parseTemplates(templatesOrOptions, expressions);
return execa(file, args, normalizeScriptOptions(options));
}
$.sync = (templates, ...expressions) => {
if (!Array.isArray(templates)) {
throw new TypeError('Please use $(options).sync`command` instead of $.sync(options)`command`.');
}
const [file, ...args] = parseTemplates(templates, expressions);
return execaSync(file, args, normalizeScriptOptions(options));
};
return $;
}
export const $ = create$();
export function execaCommand(command, options) {
const [file, ...args] = parseCommand(command);
return execa(file, args, options);
}
export function execaCommandSync(command, options) {
const [file, ...args] = parseCommand(command);
return execaSync(file, args, options);
}
export function execaNode(scriptPath, args, options = {}) {
if (args && !Array.isArray(args) && typeof args === 'object') {
options = args;
args = [];
}
const stdio = normalizeStdioNode(options);
const defaultExecArgv = process.execArgv.filter(arg => !arg.startsWith('--inspect'));
const {
nodePath = process.execPath,
nodeOptions = defaultExecArgv,
} = options;
return execa(
nodePath,
[
...nodeOptions,
scriptPath,
...(Array.isArray(args) ? args : []),
],
{
...options,
stdin: undefined,
stdout: undefined,
stderr: undefined,
stdio,
shell: false,
},
);
}

View File

@ -1,119 +0,0 @@
import {Buffer} from 'node:buffer';
import {ChildProcess} from 'node:child_process';
const normalizeArgs = (file, args = []) => {
if (!Array.isArray(args)) {
return [file];
}
return [file, ...args];
};
const NO_ESCAPE_REGEXP = /^[\w.-]+$/;
const escapeArg = arg => {
if (typeof arg !== 'string' || NO_ESCAPE_REGEXP.test(arg)) {
return arg;
}
return `"${arg.replaceAll('"', '\\"')}"`;
};
export const joinCommand = (file, args) => normalizeArgs(file, args).join(' ');
export const getEscapedCommand = (file, args) => normalizeArgs(file, args).map(arg => escapeArg(arg)).join(' ');
const SPACES_REGEXP = / +/g;
// Handle `execaCommand()`
export const parseCommand = command => {
const tokens = [];
for (const token of command.trim().split(SPACES_REGEXP)) {
// Allow spaces to be escaped by a backslash if not meant as a delimiter
const previousToken = tokens.at(-1);
if (previousToken && previousToken.endsWith('\\')) {
// Merge previous token with current one
tokens[tokens.length - 1] = `${previousToken.slice(0, -1)} ${token}`;
} else {
tokens.push(token);
}
}
return tokens;
};
const parseExpression = expression => {
const typeOfExpression = typeof expression;
if (typeOfExpression === 'string') {
return expression;
}
if (typeOfExpression === 'number') {
return String(expression);
}
if (
typeOfExpression === 'object'
&& expression !== null
&& !(expression instanceof ChildProcess)
&& 'stdout' in expression
) {
const typeOfStdout = typeof expression.stdout;
if (typeOfStdout === 'string') {
return expression.stdout;
}
if (Buffer.isBuffer(expression.stdout)) {
return expression.stdout.toString();
}
throw new TypeError(`Unexpected "${typeOfStdout}" stdout in template expression`);
}
throw new TypeError(`Unexpected "${typeOfExpression}" in template expression`);
};
const concatTokens = (tokens, nextTokens, isNew) => isNew || tokens.length === 0 || nextTokens.length === 0
? [...tokens, ...nextTokens]
: [
...tokens.slice(0, -1),
`${tokens.at(-1)}${nextTokens[0]}`,
...nextTokens.slice(1),
];
const parseTemplate = ({templates, expressions, tokens, index, template}) => {
const templateString = template ?? templates.raw[index];
const templateTokens = templateString.split(SPACES_REGEXP).filter(Boolean);
const newTokens = concatTokens(
tokens,
templateTokens,
templateString.startsWith(' '),
);
if (index === expressions.length) {
return newTokens;
}
const expression = expressions[index];
const expressionTokens = Array.isArray(expression)
? expression.map(expression => parseExpression(expression))
: [parseExpression(expression)];
return concatTokens(
newTokens,
expressionTokens,
templateString.endsWith(' '),
);
};
export const parseTemplates = (templates, expressions) => {
let tokens = [];
for (const [index, template] of templates.entries()) {
tokens = parseTemplate({templates, expressions, tokens, index, template});
}
return tokens;
};

View File

@ -1,87 +0,0 @@
import process from 'node:process';
import {signalsByName} from '../../human-signals';
const getErrorPrefix = ({timedOut, timeout, errorCode, signal, signalDescription, exitCode, isCanceled}) => {
if (timedOut) {
return `timed out after ${timeout} milliseconds`;
}
if (isCanceled) {
return 'was canceled';
}
if (errorCode !== undefined) {
return `failed with ${errorCode}`;
}
if (signal !== undefined) {
return `was killed with ${signal} (${signalDescription})`;
}
if (exitCode !== undefined) {
return `failed with exit code ${exitCode}`;
}
return 'failed';
};
export const makeError = ({
stdout,
stderr,
all,
error,
signal,
exitCode,
command,
escapedCommand,
timedOut,
isCanceled,
killed,
parsed: {options: {timeout, cwd = process.cwd()}},
}) => {
// `signal` and `exitCode` emitted on `spawned.on('exit')` event can be `null`.
// We normalize them to `undefined`
exitCode = exitCode === null ? undefined : exitCode;
signal = signal === null ? undefined : signal;
const signalDescription = signal === undefined ? undefined : signalsByName[signal].description;
const errorCode = error && error.code;
const prefix = getErrorPrefix({timedOut, timeout, errorCode, signal, signalDescription, exitCode, isCanceled});
const execaMessage = `Command ${prefix}: ${command}`;
const isError = Object.prototype.toString.call(error) === '[object Error]';
const shortMessage = isError ? `${execaMessage}\n${error.message}` : execaMessage;
const message = [shortMessage, stderr, stdout].filter(Boolean).join('\n');
if (isError) {
error.originalMessage = error.message;
error.message = message;
} else {
error = new Error(message);
}
error.shortMessage = shortMessage;
error.command = command;
error.escapedCommand = escapedCommand;
error.exitCode = exitCode;
error.signal = signal;
error.signalDescription = signalDescription;
error.stdout = stdout;
error.stderr = stderr;
error.cwd = cwd;
if (all !== undefined) {
error.all = all;
}
if ('bufferedData' in error) {
delete error.bufferedData;
}
error.failed = true;
error.timedOut = Boolean(timedOut);
error.isCanceled = isCanceled;
error.killed = killed && !timedOut;
return error;
};

View File

@ -1,102 +0,0 @@
import os from 'node:os';
import {onExit} from 'signal-exit';
const DEFAULT_FORCE_KILL_TIMEOUT = 1000 * 5;
// Monkey-patches `childProcess.kill()` to add `forceKillAfterTimeout` behavior
export const spawnedKill = (kill, signal = 'SIGTERM', options = {}) => {
const killResult = kill(signal);
setKillTimeout(kill, signal, options, killResult);
return killResult;
};
const setKillTimeout = (kill, signal, options, killResult) => {
if (!shouldForceKill(signal, options, killResult)) {
return;
}
const timeout = getForceKillAfterTimeout(options);
const t = setTimeout(() => {
kill('SIGKILL');
}, timeout);
// Guarded because there's no `.unref()` when `execa` is used in the renderer
// process in Electron. This cannot be tested since we don't run tests in
// Electron.
// istanbul ignore else
if (t.unref) {
t.unref();
}
};
const shouldForceKill = (signal, {forceKillAfterTimeout}, killResult) => isSigterm(signal) && forceKillAfterTimeout !== false && killResult;
const isSigterm = signal => signal === os.constants.signals.SIGTERM
|| (typeof signal === 'string' && signal.toUpperCase() === 'SIGTERM');
const getForceKillAfterTimeout = ({forceKillAfterTimeout = true}) => {
if (forceKillAfterTimeout === true) {
return DEFAULT_FORCE_KILL_TIMEOUT;
}
if (!Number.isFinite(forceKillAfterTimeout) || forceKillAfterTimeout < 0) {
throw new TypeError(`Expected the \`forceKillAfterTimeout\` option to be a non-negative integer, got \`${forceKillAfterTimeout}\` (${typeof forceKillAfterTimeout})`);
}
return forceKillAfterTimeout;
};
// `childProcess.cancel()`
export const spawnedCancel = (spawned, context) => {
const killResult = spawned.kill();
if (killResult) {
context.isCanceled = true;
}
};
const timeoutKill = (spawned, signal, reject) => {
spawned.kill(signal);
reject(Object.assign(new Error('Timed out'), {timedOut: true, signal}));
};
// `timeout` option handling
export const setupTimeout = (spawned, {timeout, killSignal = 'SIGTERM'}, spawnedPromise) => {
if (timeout === 0 || timeout === undefined) {
return spawnedPromise;
}
let timeoutId;
const timeoutPromise = new Promise((resolve, reject) => {
timeoutId = setTimeout(() => {
timeoutKill(spawned, killSignal, reject);
}, timeout);
});
const safeSpawnedPromise = spawnedPromise.finally(() => {
clearTimeout(timeoutId);
});
return Promise.race([timeoutPromise, safeSpawnedPromise]);
};
export const validateTimeout = ({timeout}) => {
if (timeout !== undefined && (!Number.isFinite(timeout) || timeout < 0)) {
throw new TypeError(`Expected the \`timeout\` option to be a non-negative integer, got \`${timeout}\` (${typeof timeout})`);
}
};
// `cleanup` option handling
export const setExitHandler = async (spawned, {cleanup, detached}, timedPromise) => {
if (!cleanup || detached) {
return timedPromise;
}
const removeExitHandler = onExit(() => {
spawned.kill();
});
return timedPromise.finally(() => {
removeExitHandler();
});
};

View File

@ -1,42 +0,0 @@
import {createWriteStream} from 'node:fs';
import {ChildProcess} from 'node:child_process';
import {isWritableStream} from '../../is-stream';
const isExecaChildProcess = target => target instanceof ChildProcess && typeof target.then === 'function';
const pipeToTarget = (spawned, streamName, target) => {
if (typeof target === 'string') {
spawned[streamName].pipe(createWriteStream(target));
return spawned;
}
if (isWritableStream(target)) {
spawned[streamName].pipe(target);
return spawned;
}
if (!isExecaChildProcess(target)) {
throw new TypeError('The second argument must be a string, a stream or an Execa child process.');
}
if (!isWritableStream(target.stdin)) {
throw new TypeError('The target child process\'s stdin must be available.');
}
spawned[streamName].pipe(target.stdin);
return target;
};
export const addPipeMethods = spawned => {
if (spawned.stdout !== null) {
spawned.pipeStdout = pipeToTarget.bind(undefined, spawned, 'stdout');
}
if (spawned.stderr !== null) {
spawned.pipeStderr = pipeToTarget.bind(undefined, spawned, 'stderr');
}
if (spawned.all !== undefined) {
spawned.pipeAll = pipeToTarget.bind(undefined, spawned, 'all');
}
};

View File

@ -1,36 +0,0 @@
// eslint-disable-next-line unicorn/prefer-top-level-await
const nativePromisePrototype = (async () => {})().constructor.prototype;
const descriptors = ['then', 'catch', 'finally'].map(property => [
property,
Reflect.getOwnPropertyDescriptor(nativePromisePrototype, property),
]);
// The return value is a mixin of `childProcess` and `Promise`
export const mergePromise = (spawned, promise) => {
for (const [property, descriptor] of descriptors) {
// Starting the main `promise` is deferred to avoid consuming streams
const value = typeof promise === 'function'
? (...args) => Reflect.apply(descriptor.value, promise(), args)
: descriptor.value.bind(promise);
Reflect.defineProperty(spawned, property, {...descriptor, value});
}
};
// Use promises instead of `child_process` events
export const getSpawnedPromise = spawned => new Promise((resolve, reject) => {
spawned.on('exit', (exitCode, signal) => {
resolve({exitCode, signal});
});
spawned.on('error', error => {
reject(error);
});
if (spawned.stdin) {
spawned.stdin.on('error', error => {
reject(error);
});
}
});

View File

@ -1,49 +0,0 @@
const aliases = ['stdin', 'stdout', 'stderr'];
const hasAlias = options => aliases.some(alias => options[alias] !== undefined);
export const normalizeStdio = options => {
if (!options) {
return;
}
const {stdio} = options;
if (stdio === undefined) {
return aliases.map(alias => options[alias]);
}
if (hasAlias(options)) {
throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${aliases.map(alias => `\`${alias}\``).join(', ')}`);
}
if (typeof stdio === 'string') {
return stdio;
}
if (!Array.isArray(stdio)) {
throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``);
}
const length = Math.max(stdio.length, aliases.length);
return Array.from({length}, (value, index) => stdio[index]);
};
// `ipc` is pushed unless it is already present
export const normalizeStdioNode = options => {
const stdio = normalizeStdio(options);
if (stdio === 'ipc') {
return 'ipc';
}
if (stdio === undefined || typeof stdio === 'string') {
return [stdio, stdio, stdio, 'ipc'];
}
if (stdio.includes('ipc')) {
return stdio;
}
return [...stdio, 'ipc'];
};

View File

@ -1,133 +0,0 @@
import {createReadStream, readFileSync} from 'node:fs';
import {setTimeout} from 'node:timers/promises';
import {isStream} from '../../is-stream';
import getStream, {getStreamAsBuffer} from '../../get-stream';
import mergeStream from 'merge-stream';
const validateInputOptions = input => {
if (input !== undefined) {
throw new TypeError('The `input` and `inputFile` options cannot be both set.');
}
};
const getInputSync = ({input, inputFile}) => {
if (typeof inputFile !== 'string') {
return input;
}
validateInputOptions(input);
return readFileSync(inputFile);
};
// `input` and `inputFile` option in sync mode
export const handleInputSync = options => {
const input = getInputSync(options);
if (isStream(input)) {
throw new TypeError('The `input` option cannot be a stream in sync mode');
}
return input;
};
const getInput = ({input, inputFile}) => {
if (typeof inputFile !== 'string') {
return input;
}
validateInputOptions(input);
return createReadStream(inputFile);
};
// `input` and `inputFile` option in async mode
export const handleInput = (spawned, options) => {
const input = getInput(options);
if (input === undefined) {
return;
}
if (isStream(input)) {
input.pipe(spawned.stdin);
} else {
spawned.stdin.end(input);
}
};
// `all` interleaves `stdout` and `stderr`
export const makeAllStream = (spawned, {all}) => {
if (!all || (!spawned.stdout && !spawned.stderr)) {
return;
}
const mixed = mergeStream();
if (spawned.stdout) {
mixed.add(spawned.stdout);
}
if (spawned.stderr) {
mixed.add(spawned.stderr);
}
return mixed;
};
// On failure, `result.stdout|stderr|all` should contain the currently buffered stream
const getBufferedData = async (stream, streamPromise) => {
// When `buffer` is `false`, `streamPromise` is `undefined` and there is no buffered data to retrieve
if (!stream || streamPromise === undefined) {
return;
}
// Wait for the `all` stream to receive the last chunk before destroying the stream
await setTimeout(0);
stream.destroy();
try {
return await streamPromise;
} catch (error) {
return error.bufferedData;
}
};
const getStreamPromise = (stream, {encoding, buffer, maxBuffer}) => {
if (!stream || !buffer) {
return;
}
// eslint-disable-next-line unicorn/text-encoding-identifier-case
if (encoding === 'utf8' || encoding === 'utf-8') {
return getStream(stream, {maxBuffer});
}
if (encoding === null || encoding === 'buffer') {
return getStreamAsBuffer(stream, {maxBuffer});
}
return applyEncoding(stream, maxBuffer, encoding);
};
const applyEncoding = async (stream, maxBuffer, encoding) => {
const buffer = await getStreamAsBuffer(stream, {maxBuffer});
return buffer.toString(encoding);
};
// Retrieve result of child process: exit code, signal, error, streams (stdout/stderr/all)
export const getSpawnedResult = async ({stdout, stderr, all}, {encoding, buffer, maxBuffer}, processDone) => {
const stdoutPromise = getStreamPromise(stdout, {encoding, buffer, maxBuffer});
const stderrPromise = getStreamPromise(stderr, {encoding, buffer, maxBuffer});
const allPromise = getStreamPromise(all, {encoding, buffer, maxBuffer: maxBuffer * 2});
try {
return await Promise.all([processDone, stdoutPromise, stderrPromise, allPromise]);
} catch (error) {
return Promise.all([
{error, signal: error.signal, timedOut: error.timedOut},
getBufferedData(stdout, stdoutPromise),
getBufferedData(stderr, stderrPromise),
getBufferedData(all, allPromise),
]);
}
};

View File

@ -1,19 +0,0 @@
import {debuglog} from 'node:util';
import process from 'node:process';
export const verboseDefault = debuglog('execa').enabled;
const padField = (field, padding) => String(field).padStart(padding, '0');
const getTimestamp = () => {
const date = new Date();
return `${padField(date.getHours(), 2)}:${padField(date.getMinutes(), 2)}:${padField(date.getSeconds(), 2)}.${padField(date.getMilliseconds(), 3)}`;
};
export const logCommand = (escapedCommand, {verbose}) => {
if (!verbose) {
return;
}
process.stderr.write(`[${getTimestamp()}] ${escapedCommand}\n`);
};

View File

@ -1,17 +0,0 @@
import { execa } from "."
import path from "node:path"
export default class ExecLib {
constructor(manifest) {
this.manifest = manifest
}
async file(file, args, options) {
file = path.resolve(this.manifest.install_path, file)
return await execa(file, [...args], {
...options,
cwd: this.manifest.install_path,
})
}
}

View File

@ -1,84 +0,0 @@
import {getStreamContents} from './contents.js';
import {noop, throwObjectStream, getLengthProp} from './utils.js';
export async function getStreamAsArrayBuffer(stream, options) {
return getStreamContents(stream, arrayBufferMethods, options);
}
const initArrayBuffer = () => ({contents: new ArrayBuffer(0)});
const useTextEncoder = chunk => textEncoder.encode(chunk);
const textEncoder = new TextEncoder();
const useUint8Array = chunk => new Uint8Array(chunk);
const useUint8ArrayWithOffset = chunk => new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength);
const truncateArrayBufferChunk = (convertedChunk, chunkSize) => convertedChunk.slice(0, chunkSize);
// `contents` is an increasingly growing `Uint8Array`.
const addArrayBufferChunk = (convertedChunk, {contents, length: previousLength}, length) => {
const newContents = hasArrayBufferResize() ? resizeArrayBuffer(contents, length) : resizeArrayBufferSlow(contents, length);
new Uint8Array(newContents).set(convertedChunk, previousLength);
return newContents;
};
// Without `ArrayBuffer.resize()`, `contents` size is always a power of 2.
// This means its last bytes are zeroes (not stream data), which need to be
// trimmed at the end with `ArrayBuffer.slice()`.
const resizeArrayBufferSlow = (contents, length) => {
if (length <= contents.byteLength) {
return contents;
}
const arrayBuffer = new ArrayBuffer(getNewContentsLength(length));
new Uint8Array(arrayBuffer).set(new Uint8Array(contents), 0);
return arrayBuffer;
};
// With `ArrayBuffer.resize()`, `contents` size matches exactly the size of
// the stream data. It does not include extraneous zeroes to trim at the end.
// The underlying `ArrayBuffer` does allocate a number of bytes that is a power
// of 2, but those bytes are only visible after calling `ArrayBuffer.resize()`.
const resizeArrayBuffer = (contents, length) => {
if (length <= contents.maxByteLength) {
contents.resize(length);
return contents;
}
const arrayBuffer = new ArrayBuffer(length, {maxByteLength: getNewContentsLength(length)});
new Uint8Array(arrayBuffer).set(new Uint8Array(contents), 0);
return arrayBuffer;
};
// Retrieve the closest `length` that is both >= and a power of 2
const getNewContentsLength = length => SCALE_FACTOR ** Math.ceil(Math.log(length) / Math.log(SCALE_FACTOR));
const SCALE_FACTOR = 2;
const finalizeArrayBuffer = ({contents, length}) => hasArrayBufferResize() ? contents : contents.slice(0, length);
// `ArrayBuffer.slice()` is slow. When `ArrayBuffer.resize()` is available
// (Node >=20.0.0, Safari >=16.4 and Chrome), we can use it instead.
// eslint-disable-next-line no-warning-comments
// TODO: remove after dropping support for Node 20.
// eslint-disable-next-line no-warning-comments
// TODO: use `ArrayBuffer.transferToFixedLength()` instead once it is available
const hasArrayBufferResize = () => 'resize' in ArrayBuffer.prototype;
const arrayBufferMethods = {
init: initArrayBuffer,
convertChunk: {
string: useTextEncoder,
buffer: useUint8Array,
arrayBuffer: useUint8Array,
dataView: useUint8ArrayWithOffset,
typedArray: useUint8ArrayWithOffset,
others: throwObjectStream,
},
getSize: getLengthProp,
truncateChunk: truncateArrayBufferChunk,
addChunk: addArrayBufferChunk,
getFinalChunk: noop,
finalize: finalizeArrayBuffer,
};

View File

@ -1,32 +0,0 @@
import {getStreamContents} from './contents.js';
import {identity, noop, getContentsProp} from './utils.js';
export async function getStreamAsArray(stream, options) {
return getStreamContents(stream, arrayMethods, options);
}
const initArray = () => ({contents: []});
const increment = () => 1;
const addArrayChunk = (convertedChunk, {contents}) => {
contents.push(convertedChunk);
return contents;
};
const arrayMethods = {
init: initArray,
convertChunk: {
string: identity,
buffer: identity,
arrayBuffer: identity,
dataView: identity,
typedArray: identity,
others: identity,
},
getSize: increment,
truncateChunk: noop,
addChunk: addArrayChunk,
getFinalChunk: noop,
finalize: getContentsProp,
};

View File

@ -1,20 +0,0 @@
import {getStreamAsArrayBuffer} from './array-buffer.js';
export async function getStreamAsBuffer(stream, options) {
if (!('Buffer' in globalThis)) {
throw new Error('getStreamAsBuffer() is only supported in Node.js');
}
try {
return arrayBufferToNodeBuffer(await getStreamAsArrayBuffer(stream, options));
} catch (error) {
if (error.bufferedData !== undefined) {
error.bufferedData = arrayBufferToNodeBuffer(error.bufferedData);
}
throw error;
}
}
// eslint-disable-next-line n/prefer-global/buffer
const arrayBufferToNodeBuffer = arrayBuffer => globalThis.Buffer.from(arrayBuffer);

View File

@ -1,101 +0,0 @@
export const getStreamContents = async (stream, {init, convertChunk, getSize, truncateChunk, addChunk, getFinalChunk, finalize}, {maxBuffer = Number.POSITIVE_INFINITY} = {}) => {
if (!isAsyncIterable(stream)) {
throw new Error('The first argument must be a Readable, a ReadableStream, or an async iterable.');
}
const state = init();
state.length = 0;
try {
for await (const chunk of stream) {
const chunkType = getChunkType(chunk);
const convertedChunk = convertChunk[chunkType](chunk, state);
appendChunk({convertedChunk, state, getSize, truncateChunk, addChunk, maxBuffer});
}
appendFinalChunk({state, convertChunk, getSize, truncateChunk, addChunk, getFinalChunk, maxBuffer});
return finalize(state);
} catch (error) {
error.bufferedData = finalize(state);
throw error;
}
};
const appendFinalChunk = ({state, getSize, truncateChunk, addChunk, getFinalChunk, maxBuffer}) => {
const convertedChunk = getFinalChunk(state);
if (convertedChunk !== undefined) {
appendChunk({convertedChunk, state, getSize, truncateChunk, addChunk, maxBuffer});
}
};
const appendChunk = ({convertedChunk, state, getSize, truncateChunk, addChunk, maxBuffer}) => {
const chunkSize = getSize(convertedChunk);
const newLength = state.length + chunkSize;
if (newLength <= maxBuffer) {
addNewChunk(convertedChunk, state, addChunk, newLength);
return;
}
const truncatedChunk = truncateChunk(convertedChunk, maxBuffer - state.length);
if (truncatedChunk !== undefined) {
addNewChunk(truncatedChunk, state, addChunk, maxBuffer);
}
throw new MaxBufferError();
};
const addNewChunk = (convertedChunk, state, addChunk, newLength) => {
state.contents = addChunk(convertedChunk, state, newLength);
state.length = newLength;
};
const isAsyncIterable = stream => typeof stream === 'object' && stream !== null && typeof stream[Symbol.asyncIterator] === 'function';
const getChunkType = chunk => {
const typeOfChunk = typeof chunk;
if (typeOfChunk === 'string') {
return 'string';
}
if (typeOfChunk !== 'object' || chunk === null) {
return 'others';
}
// eslint-disable-next-line n/prefer-global/buffer
if (globalThis.Buffer?.isBuffer(chunk)) {
return 'buffer';
}
const prototypeName = objectToString.call(chunk);
if (prototypeName === '[object ArrayBuffer]') {
return 'arrayBuffer';
}
if (prototypeName === '[object DataView]') {
return 'dataView';
}
if (
Number.isInteger(chunk.byteLength)
&& Number.isInteger(chunk.byteOffset)
&& objectToString.call(chunk.buffer) === '[object ArrayBuffer]'
) {
return 'typedArray';
}
return 'others';
};
const {toString: objectToString} = Object.prototype;
export class MaxBufferError extends Error {
name = 'MaxBufferError';
constructor() {
super('maxBuffer exceeded');
}
}

View File

@ -1,119 +0,0 @@
import {type Readable} from 'node:stream';
import {type Buffer} from 'node:buffer';
export class MaxBufferError extends Error {
readonly name: 'MaxBufferError';
constructor();
}
type TextStreamItem = string | Buffer | ArrayBuffer | ArrayBufferView;
export type AnyStream<SteamItem = TextStreamItem> = Readable | ReadableStream<SteamItem> | AsyncIterable<SteamItem>;
export type Options = {
/**
Maximum length of the stream. If exceeded, the promise will be rejected with a `MaxBufferError`.
Depending on the [method](#api), the length is measured with [`string.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/length), [`buffer.length`](https://nodejs.org/api/buffer.html#buflength), [`arrayBuffer.byteLength`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/byteLength) or [`array.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/length).
@default Infinity
*/
readonly maxBuffer?: number;
};
/**
Get the given `stream` as a string.
@returns The stream's contents as a promise.
@example
```
import fs from 'node:fs';
import getStream from 'get-stream';
const stream = fs.createReadStream('unicorn.txt');
console.log(await getStream(stream));
// ,,))))))));,
// __)))))))))))))),
// \|/ -\(((((''''((((((((.
// -*-==//////(('' . `)))))),
// /|\ ))| o ;-. '((((( ,(,
// ( `| / ) ;))))' ,_))^;(~
// | | | ,))((((_ _____------~~~-. %,;(;(>';'~
// o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~
// ; ''''```` `: `:::|\,__,%% );`'; ~
// | _ ) / `:|`----' `-'
// ______/\/~ | / /
// /~;;.____/;;' / ___--,-( `;;;/
// / // _;______;'------~~~~~ /;;/\ /
// // | | / ; \;;,\
// (<_ | ; /',/-----' _>
// \_| ||_ //~;~~~~~~~~~
// `\_| (,~~
// \~\
// ~~
```
@example
```
import getStream from 'get-stream';
const {body: readableStream} = await fetch('https://example.com');
console.log(await getStream(readableStream));
```
@example
```
import {opendir} from 'node:fs/promises';
import {getStreamAsArray} from 'get-stream';
const asyncIterable = await opendir(directory);
console.log(await getStreamAsArray(asyncIterable));
```
*/
export default function getStream(stream: AnyStream, options?: Options): Promise<string>;
/**
Get the given `stream` as a Node.js [`Buffer`](https://nodejs.org/api/buffer.html#class-buffer).
@returns The stream's contents as a promise.
@example
```
import {getStreamAsBuffer} from 'get-stream';
const stream = fs.createReadStream('unicorn.png');
console.log(await getStreamAsBuffer(stream));
```
*/
export function getStreamAsBuffer(stream: AnyStream, options?: Options): Promise<Buffer>;
/**
Get the given `stream` as an [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer).
@returns The stream's contents as a promise.
@example
```
import {getStreamAsArrayBuffer} from 'get-stream';
const {body: readableStream} = await fetch('https://example.com');
console.log(await getStreamAsArrayBuffer(readableStream));
```
*/
export function getStreamAsArrayBuffer(stream: AnyStream, options?: Options): Promise<ArrayBuffer>;
/**
Get the given `stream` as an array. Unlike [other methods](#api), this supports [streams of objects](https://nodejs.org/api/stream.html#object-mode).
@returns The stream's contents as a promise.
@example
```
import {getStreamAsArray} from 'get-stream';
const {body: readableStream} = await fetch('https://example.com');
console.log(await getStreamAsArray(readableStream));
```
*/
export function getStreamAsArray<Item>(stream: AnyStream<Item>, options?: Options): Promise<Item[]>;

View File

@ -1,5 +0,0 @@
export {getStreamAsArray} from './array.js';
export {getStreamAsArrayBuffer} from './array-buffer.js';
export {getStreamAsBuffer} from './buffer.js';
export {getStreamAsString as default} from './string.js';
export {MaxBufferError} from './contents.js';

View File

@ -1,98 +0,0 @@
import {Buffer} from 'node:buffer';
import {open} from 'node:fs/promises';
import {type Readable} from 'node:stream';
import fs from 'node:fs';
import {expectType, expectError, expectAssignable, expectNotAssignable} from 'tsd';
import getStream, {getStreamAsBuffer, getStreamAsArrayBuffer, getStreamAsArray, MaxBufferError, type Options, type AnyStream} from './index.js';
const nodeStream = fs.createReadStream('foo') as Readable;
const fileHandle = await open('test');
const readableStream = fileHandle.readableWebStream();
const asyncIterable = <T>(value: T): AsyncGenerator<T> => (async function * () {
yield value;
})();
const stringAsyncIterable = asyncIterable('');
const bufferAsyncIterable = asyncIterable(Buffer.from(''));
const arrayBufferAsyncIterable = asyncIterable(new ArrayBuffer(0));
const dataViewAsyncIterable = asyncIterable(new DataView(new ArrayBuffer(0)));
const typedArrayAsyncIterable = asyncIterable(new Uint8Array([]));
const objectItem = {test: true};
const objectAsyncIterable = asyncIterable(objectItem);
expectType<string>(await getStream(nodeStream));
expectType<string>(await getStream(nodeStream, {maxBuffer: 10}));
expectType<string>(await getStream(readableStream));
expectType<string>(await getStream(stringAsyncIterable));
expectType<string>(await getStream(bufferAsyncIterable));
expectType<string>(await getStream(arrayBufferAsyncIterable));
expectType<string>(await getStream(dataViewAsyncIterable));
expectType<string>(await getStream(typedArrayAsyncIterable));
expectError(await getStream(objectAsyncIterable));
expectError(await getStream({}));
expectError(await getStream(nodeStream, {maxBuffer: '10'}));
expectError(await getStream(nodeStream, {unknownOption: 10}));
expectError(await getStream(nodeStream, {maxBuffer: 10}, {}));
expectType<Buffer>(await getStreamAsBuffer(nodeStream));
expectType<Buffer>(await getStreamAsBuffer(nodeStream, {maxBuffer: 10}));
expectType<Buffer>(await getStreamAsBuffer(readableStream));
expectType<Buffer>(await getStreamAsBuffer(stringAsyncIterable));
expectType<Buffer>(await getStreamAsBuffer(bufferAsyncIterable));
expectType<Buffer>(await getStreamAsBuffer(arrayBufferAsyncIterable));
expectType<Buffer>(await getStreamAsBuffer(dataViewAsyncIterable));
expectType<Buffer>(await getStreamAsBuffer(typedArrayAsyncIterable));
expectError(await getStreamAsBuffer(objectAsyncIterable));
expectError(await getStreamAsBuffer({}));
expectError(await getStreamAsBuffer(nodeStream, {maxBuffer: '10'}));
expectError(await getStreamAsBuffer(nodeStream, {unknownOption: 10}));
expectError(await getStreamAsBuffer(nodeStream, {maxBuffer: 10}, {}));
expectType<ArrayBuffer>(await getStreamAsArrayBuffer(nodeStream));
expectType<ArrayBuffer>(await getStreamAsArrayBuffer(nodeStream, {maxBuffer: 10}));
expectType<ArrayBuffer>(await getStreamAsArrayBuffer(readableStream));
expectType<ArrayBuffer>(await getStreamAsArrayBuffer(stringAsyncIterable));
expectType<ArrayBuffer>(await getStreamAsArrayBuffer(bufferAsyncIterable));
expectType<ArrayBuffer>(await getStreamAsArrayBuffer(arrayBufferAsyncIterable));
expectType<ArrayBuffer>(await getStreamAsArrayBuffer(dataViewAsyncIterable));
expectType<ArrayBuffer>(await getStreamAsArrayBuffer(typedArrayAsyncIterable));
expectError(await getStreamAsArrayBuffer(objectAsyncIterable));
expectError(await getStreamAsArrayBuffer({}));
expectError(await getStreamAsArrayBuffer(nodeStream, {maxBuffer: '10'}));
expectError(await getStreamAsArrayBuffer(nodeStream, {unknownOption: 10}));
expectError(await getStreamAsArrayBuffer(nodeStream, {maxBuffer: 10}, {}));
expectType<any[]>(await getStreamAsArray(nodeStream));
expectType<any[]>(await getStreamAsArray(nodeStream, {maxBuffer: 10}));
expectType<any[]>(await getStreamAsArray(readableStream));
expectType<Uint8Array[]>(await getStreamAsArray(readableStream as ReadableStream<Uint8Array>));
expectType<string[]>(await getStreamAsArray(stringAsyncIterable));
expectType<Buffer[]>(await getStreamAsArray(bufferAsyncIterable));
expectType<ArrayBuffer[]>(await getStreamAsArray(arrayBufferAsyncIterable));
expectType<DataView[]>(await getStreamAsArray(dataViewAsyncIterable));
expectType<Uint8Array[]>(await getStreamAsArray(typedArrayAsyncIterable));
expectType<Array<typeof objectItem>>(await getStreamAsArray(objectAsyncIterable));
expectError(await getStreamAsArray({}));
expectError(await getStreamAsArray(nodeStream, {maxBuffer: '10'}));
expectError(await getStreamAsArray(nodeStream, {unknownOption: 10}));
expectError(await getStreamAsArray(nodeStream, {maxBuffer: 10}, {}));
expectAssignable<AnyStream>(nodeStream);
expectAssignable<AnyStream>(readableStream);
expectAssignable<AnyStream>(stringAsyncIterable);
expectAssignable<AnyStream>(bufferAsyncIterable);
expectAssignable<AnyStream>(arrayBufferAsyncIterable);
expectAssignable<AnyStream>(dataViewAsyncIterable);
expectAssignable<AnyStream>(typedArrayAsyncIterable);
expectAssignable<AnyStream<unknown>>(objectAsyncIterable);
expectNotAssignable<AnyStream>(objectAsyncIterable);
expectAssignable<AnyStream<string>>(stringAsyncIterable);
expectNotAssignable<AnyStream<string>>(bufferAsyncIterable);
expectNotAssignable<AnyStream>({});
expectAssignable<Options>({maxBuffer: 10});
expectNotAssignable<Options>({maxBuffer: '10'});
expectNotAssignable<Options>({unknownOption: 10});
expectType<MaxBufferError>(new MaxBufferError());

View File

@ -1,36 +0,0 @@
import {getStreamContents} from './contents.js';
import {identity, getContentsProp, throwObjectStream, getLengthProp} from './utils.js';
export async function getStreamAsString(stream, options) {
return getStreamContents(stream, stringMethods, options);
}
const initString = () => ({contents: '', textDecoder: new TextDecoder()});
const useTextDecoder = (chunk, {textDecoder}) => textDecoder.decode(chunk, {stream: true});
const addStringChunk = (convertedChunk, {contents}) => contents + convertedChunk;
const truncateStringChunk = (convertedChunk, chunkSize) => convertedChunk.slice(0, chunkSize);
const getFinalStringChunk = ({textDecoder}) => {
const finalChunk = textDecoder.decode();
return finalChunk === '' ? undefined : finalChunk;
};
const stringMethods = {
init: initString,
convertChunk: {
string: identity,
buffer: useTextDecoder,
arrayBuffer: useTextDecoder,
dataView: useTextDecoder,
typedArray: useTextDecoder,
others: throwObjectStream,
},
getSize: getLengthProp,
truncateChunk: truncateStringChunk,
addChunk: addStringChunk,
getFinalChunk: getFinalStringChunk,
finalize: getContentsProp,
};

View File

@ -1,11 +0,0 @@
export const identity = value => value;
export const noop = () => undefined;
export const getContentsProp = ({contents}) => contents;
export const throwObjectStream = chunk => {
throw new Error(`Streams in object mode are not supported: ${String(chunk)}`);
};
export const getLengthProp = convertedChunk => convertedChunk.length;

View File

@ -1,194 +0,0 @@
import fs from "node:fs"
import path from "node:path"
const ElectronGoogleOAuth2 = require("@getstation/electron-google-oauth2").default
import { ipcMain } from "electron"
import progressHandler from "progress-stream"
import { google } from "googleapis"
import { safeStorage } from "electron"
import sendToRender from "../../utils/sendToRender"
export default class GoogleDriveAPI {
static async createClientAuthFromCredentials(credentials) {
return await google.auth.fromJSON(credentials)
}
static async getDriveInstance() {
const credentials = await GoogleDriveAPI.readCredentials()
if (!credentials) {
throw new Error("No credentials or auth found")
}
const client = await GoogleDriveAPI.createClientAuthFromCredentials(credentials)
return google.drive({
version: "v3",
auth: client,
})
}
static async readCredentials() {
const encryptedValue = global.SettingsStore.get("drive_auth")
if (!encryptedValue) {
return null
}
const decryptedValue = safeStorage.decryptString(Buffer.from(encryptedValue, "latin1"))
if (!decryptedValue) {
return null
}
return JSON.parse(decryptedValue)
}
static async saveCredentials(credentials) {
const payload = {
type: "authorized_user",
client_id: credentials.client_id,
client_secret: credentials.client_secret,
access_token: credentials.access_token,
refresh_token: credentials.refresh_token,
}
const encryptedBuffer = safeStorage.encryptString(JSON.stringify(payload))
global.SettingsStore.set("drive_auth", encryptedBuffer.toString("latin1"))
console.log("Saved Drive credentials...",)
}
static async authorize() {
console.log("Authorizing Google Drive...")
const auth = await global._drive_oauth.openAuthWindowAndGetTokens()
await GoogleDriveAPI.saveCredentials({
...auth,
client_id: import.meta.env.MAIN_VITE_DRIVE_ID,
client_secret: import.meta.env.MAIN_VITE_DRIVE_SECRET,
})
await sendToRender("drive:authorized")
return auth
}
static async unauthorize() {
console.log("unauthorize Google Drive...")
global.SettingsStore.delete("drive_auth")
await sendToRender("drive:unauthorized")
}
static async init() {
console.log("Initializing Google Drive...")
global._drive_oauth = new ElectronGoogleOAuth2(
import.meta.env.MAIN_VITE_DRIVE_ID,
import.meta.env.MAIN_VITE_DRIVE_SECRET,
["https://www.googleapis.com/auth/drive.readonly"],
)
// register ipc events
for (const [key, fn] of Object.entries(GoogleDriveAPI.ipcHandlers)) {
ipcMain.handle(key, fn)
}
}
static operations = {
listFiles: async () => {
const drive = await GoogleDriveAPI.getDriveInstance()
const res = await drive.files.list({
pageSize: 10,
fields: "nextPageToken, files(id, name)",
})
const files = res.data.files.map((file) => {
return {
id: file.id,
name: file.name,
}
})
return files
},
downloadFile: (file_id, dest_path, callback, progressCallback) => {
return new Promise(async (resolve, reject) => {
if (!file_id) {
throw new Error("No file_id provided")
}
if (!dest_path) {
throw new Error("No destination path provided")
}
const drive = await GoogleDriveAPI.getDriveInstance()
const { data: metadata } = await drive.files.get({
fileId: file_id,
})
if (!metadata) {
throw new Error("Cannot retrieve file metadata")
}
let progress = progressHandler({
length: metadata.size,
time: 500,
})
const dest_stream = fs.createWriteStream(dest_path)
drive.files.get({
fileId: file_id,
alt: "media",
}, {
responseType: "stream",
}, (err, { data }) => {
if (err) {
return reject(err)
}
data
.on("error", (err) => {
if (typeof callback === "function") {
callback(err)
}
reject(err)
})
.pipe(progress).pipe(dest_stream)
})
progress.on("progress", (progress) => {
if (typeof progressCallback === "function") {
progressCallback(progress)
}
})
dest_stream.on("finish", () => {
if (typeof callback === "function") {
callback()
}
resolve()
})
})
}
}
static ipcHandlers = {
"drive:listFiles": GoogleDriveAPI.operations.listFiles,
"drive:authorize": GoogleDriveAPI.authorize,
"drive:unauthorize": GoogleDriveAPI.unauthorize,
}
}

View File

@ -1,275 +0,0 @@
/* eslint-disable max-lines */
// List of known process signals with information about them
export const SIGNALS = [
{
name: 'SIGHUP',
number: 1,
action: 'terminate',
description: 'Terminal closed',
standard: 'posix',
},
{
name: 'SIGINT',
number: 2,
action: 'terminate',
description: 'User interruption with CTRL-C',
standard: 'ansi',
},
{
name: 'SIGQUIT',
number: 3,
action: 'core',
description: 'User interruption with CTRL-\\',
standard: 'posix',
},
{
name: 'SIGILL',
number: 4,
action: 'core',
description: 'Invalid machine instruction',
standard: 'ansi',
},
{
name: 'SIGTRAP',
number: 5,
action: 'core',
description: 'Debugger breakpoint',
standard: 'posix',
},
{
name: 'SIGABRT',
number: 6,
action: 'core',
description: 'Aborted',
standard: 'ansi',
},
{
name: 'SIGIOT',
number: 6,
action: 'core',
description: 'Aborted',
standard: 'bsd',
},
{
name: 'SIGBUS',
number: 7,
action: 'core',
description:
'Bus error due to misaligned, non-existing address or paging error',
standard: 'bsd',
},
{
name: 'SIGEMT',
number: 7,
action: 'terminate',
description: 'Command should be emulated but is not implemented',
standard: 'other',
},
{
name: 'SIGFPE',
number: 8,
action: 'core',
description: 'Floating point arithmetic error',
standard: 'ansi',
},
{
name: 'SIGKILL',
number: 9,
action: 'terminate',
description: 'Forced termination',
standard: 'posix',
forced: true,
},
{
name: 'SIGUSR1',
number: 10,
action: 'terminate',
description: 'Application-specific signal',
standard: 'posix',
},
{
name: 'SIGSEGV',
number: 11,
action: 'core',
description: 'Segmentation fault',
standard: 'ansi',
},
{
name: 'SIGUSR2',
number: 12,
action: 'terminate',
description: 'Application-specific signal',
standard: 'posix',
},
{
name: 'SIGPIPE',
number: 13,
action: 'terminate',
description: 'Broken pipe or socket',
standard: 'posix',
},
{
name: 'SIGALRM',
number: 14,
action: 'terminate',
description: 'Timeout or timer',
standard: 'posix',
},
{
name: 'SIGTERM',
number: 15,
action: 'terminate',
description: 'Termination',
standard: 'ansi',
},
{
name: 'SIGSTKFLT',
number: 16,
action: 'terminate',
description: 'Stack is empty or overflowed',
standard: 'other',
},
{
name: 'SIGCHLD',
number: 17,
action: 'ignore',
description: 'Child process terminated, paused or unpaused',
standard: 'posix',
},
{
name: 'SIGCLD',
number: 17,
action: 'ignore',
description: 'Child process terminated, paused or unpaused',
standard: 'other',
},
{
name: 'SIGCONT',
number: 18,
action: 'unpause',
description: 'Unpaused',
standard: 'posix',
forced: true,
},
{
name: 'SIGSTOP',
number: 19,
action: 'pause',
description: 'Paused',
standard: 'posix',
forced: true,
},
{
name: 'SIGTSTP',
number: 20,
action: 'pause',
description: 'Paused using CTRL-Z or "suspend"',
standard: 'posix',
},
{
name: 'SIGTTIN',
number: 21,
action: 'pause',
description: 'Background process cannot read terminal input',
standard: 'posix',
},
{
name: 'SIGBREAK',
number: 21,
action: 'terminate',
description: 'User interruption with CTRL-BREAK',
standard: 'other',
},
{
name: 'SIGTTOU',
number: 22,
action: 'pause',
description: 'Background process cannot write to terminal output',
standard: 'posix',
},
{
name: 'SIGURG',
number: 23,
action: 'ignore',
description: 'Socket received out-of-band data',
standard: 'bsd',
},
{
name: 'SIGXCPU',
number: 24,
action: 'core',
description: 'Process timed out',
standard: 'bsd',
},
{
name: 'SIGXFSZ',
number: 25,
action: 'core',
description: 'File too big',
standard: 'bsd',
},
{
name: 'SIGVTALRM',
number: 26,
action: 'terminate',
description: 'Timeout or timer',
standard: 'bsd',
},
{
name: 'SIGPROF',
number: 27,
action: 'terminate',
description: 'Timeout or timer',
standard: 'bsd',
},
{
name: 'SIGWINCH',
number: 28,
action: 'ignore',
description: 'Terminal window size changed',
standard: 'bsd',
},
{
name: 'SIGIO',
number: 29,
action: 'terminate',
description: 'I/O is available',
standard: 'other',
},
{
name: 'SIGPOLL',
number: 29,
action: 'terminate',
description: 'Watched event',
standard: 'other',
},
{
name: 'SIGINFO',
number: 29,
action: 'ignore',
description: 'Request for process information',
standard: 'other',
},
{
name: 'SIGPWR',
number: 30,
action: 'terminate',
description: 'Device running out of power',
standard: 'systemv',
},
{
name: 'SIGSYS',
number: 31,
action: 'core',
description: 'Invalid system call',
standard: 'other',
},
{
name: 'SIGUNUSED',
number: 31,
action: 'terminate',
description: 'Invalid system call',
standard: 'other',
},
]
/* eslint-enable max-lines */

View File

@ -1,70 +0,0 @@
import { constants } from 'node:os'
import { SIGRTMAX } from './realtime.js'
import { getSignals } from './signals.js'
// Retrieve `signalsByName`, an object mapping signal name to signal properties.
// We make sure the object is sorted by `number`.
const getSignalsByName = () => {
const signals = getSignals()
return Object.fromEntries(signals.map(getSignalByName))
}
const getSignalByName = ({
name,
number,
description,
supported,
action,
forced,
standard,
}) => [name, { name, number, description, supported, action, forced, standard }]
export const signalsByName = getSignalsByName()
// Retrieve `signalsByNumber`, an object mapping signal number to signal
// properties.
// We make sure the object is sorted by `number`.
const getSignalsByNumber = () => {
const signals = getSignals()
const length = SIGRTMAX + 1
const signalsA = Array.from({ length }, (value, number) =>
getSignalByNumber(number, signals),
)
return Object.assign({}, ...signalsA)
}
const getSignalByNumber = (number, signals) => {
const signal = findSignalByNumber(number, signals)
if (signal === undefined) {
return {}
}
const { name, description, supported, action, forced, standard } = signal
return {
[number]: {
name,
number,
description,
supported,
action,
forced,
standard,
},
}
}
// Several signals might end up sharing the same number because of OS-specific
// numbers, in which case those prevail.
const findSignalByNumber = (number, signals) => {
const signal = signals.find(({ name }) => constants.signals[name] === number)
if (signal !== undefined) {
return signal
}
return signals.find((signalA) => signalA.number === number)
}
export const signalsByNumber = getSignalsByNumber()

View File

@ -1,73 +0,0 @@
/**
* What is the default action for this signal when it is not handled.
*/
export type SignalAction = 'terminate' | 'core' | 'ignore' | 'pause' | 'unpause'
/**
* Which standard defined that signal.
*/
export type SignalStandard = 'ansi' | 'posix' | 'bsd' | 'systemv' | 'other'
/**
* Standard name of the signal, for example 'SIGINT'.
*/
export type SignalName = `SIG${string}`
/**
* Code number of the signal, for example 2.
* While most number are cross-platform, some are different between different
* OS.
*/
export type SignalNumber = number
export interface Signal {
/**
* Standard name of the signal, for example 'SIGINT'.
*/
name: SignalName
/**
* Code number of the signal, for example 2.
* While most number are cross-platform, some are different between different
* OS.
*/
number: SignalNumber
/**
* Human-friendly description for the signal, for example
* 'User interruption with CTRL-C'.
*/
description: string
/**
* Whether the current OS can handle this signal in Node.js using
* `process.on(name, handler)`. The list of supported signals is OS-specific.
*/
supported: boolean
/**
* What is the default action for this signal when it is not handled.
*/
action: SignalAction
/**
* Whether the signal's default action cannot be prevented.
* This is true for SIGTERM, SIGKILL and SIGSTOP.
*/
forced: boolean
/**
* Which standard defined that signal.
*/
standard: SignalStandard
}
/**
* Object whose keys are signal names and values are signal objects.
*/
export declare const signalsByName: { [signalName: SignalName]: Signal }
/**
* Object whose keys are signal numbers and values are signal objects.
*/
export declare const signalsByNumber: { [signalNumber: SignalNumber]: Signal }

View File

@ -1,16 +0,0 @@
// List of realtime signals with information about them
export const getRealtimeSignals = () => {
const length = SIGRTMAX - SIGRTMIN + 1
return Array.from({ length }, getRealtimeSignal)
}
const getRealtimeSignal = (value, index) => ({
name: `SIGRT${index + 1}`,
number: SIGRTMIN + index,
action: 'terminate',
description: 'Application-specific signal (realtime)',
standard: 'posix',
})
const SIGRTMIN = 34
export const SIGRTMAX = 64

View File

@ -1,34 +0,0 @@
import { constants } from 'node:os'
import { SIGNALS } from './core.js'
import { getRealtimeSignals } from './realtime.js'
// Retrieve list of know signals (including realtime) with information about
// them
export const getSignals = () => {
const realtimeSignals = getRealtimeSignals()
const signals = [...SIGNALS, ...realtimeSignals].map(normalizeSignal)
return signals
}
// Normalize signal:
// - `number`: signal numbers are OS-specific. This is taken into account by
// `os.constants.signals`. However we provide a default `number` since some
// signals are not defined for some OS.
// - `forced`: set default to `false`
// - `supported`: set value
const normalizeSignal = ({
name,
number: defaultNumber,
description,
action,
forced = false,
standard,
}) => {
const {
signals: { [name]: constantSignal },
} = constants
const supported = constantSignal !== undefined
const number = supported ? constantSignal : defaultNumber
return { name, number, description, supported, action, forced, standard }
}

View File

@ -1,81 +0,0 @@
import {
Stream,
Writable as WritableStream,
Readable as ReadableStream,
Duplex as DuplexStream,
Transform as TransformStream,
} from 'node:stream';
/**
@returns Whether `stream` is a [`Stream`](https://nodejs.org/api/stream.html#stream_stream).
@example
```
import fs from 'node:fs';
import {isStream} from 'is-stream';
isStream(fs.createReadStream('unicorn.png'));
//=> true
isStream({});
//=> false
```
*/
export function isStream(stream: unknown): stream is Stream;
/**
@returns Whether `stream` is a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable).
@example
```
import fs from 'node:fs';
import {isWritableStream} from 'is-stream';
isWritableStream(fs.createWriteStrem('unicorn.txt'));
//=> true
```
*/
export function isWritableStream(stream: unknown): stream is WritableStream;
/**
@returns Whether `stream` is a [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable).
@example
```
import fs from 'node:fs';
import {isReadableStream} from 'is-stream';
isReadableStream(fs.createReadStream('unicorn.png'));
//=> true
```
*/
export function isReadableStream(stream: unknown): stream is ReadableStream;
/**
@returns Whether `stream` is a [`stream.Duplex`](https://nodejs.org/api/stream.html#stream_class_stream_duplex).
@example
```
import {Duplex as DuplexStream} from 'node:stream';
import {isDuplexStream} from 'is-stream';
isDuplexStream(new DuplexStream());
//=> true
```
*/
export function isDuplexStream(stream: unknown): stream is DuplexStream;
/**
@returns Whether `stream` is a [`stream.Transform`](https://nodejs.org/api/stream.html#stream_class_stream_transform).
@example
```
import fs from 'node:fs';
import StringifyStream from 'streaming-json-stringify';
import {isTransformStream} from 'is-stream';
isTransformStream(StringifyStream());
//=> true
```
*/
export function isTransformStream(stream: unknown): stream is TransformStream;

View File

@ -1,29 +0,0 @@
export function isStream(stream) {
return stream !== null
&& typeof stream === 'object'
&& typeof stream.pipe === 'function';
}
export function isWritableStream(stream) {
return isStream(stream)
&& stream.writable !== false
&& typeof stream._write === 'function'
&& typeof stream._writableState === 'object';
}
export function isReadableStream(stream) {
return isStream(stream)
&& stream.readable !== false
&& typeof stream._read === 'function'
&& typeof stream._readableState === 'object';
}
export function isDuplexStream(stream) {
return isWritableStream(stream)
&& isReadableStream(stream);
}
export function isTransformStream(stream) {
return isDuplexStream(stream)
&& typeof stream._transform === 'function';
}

View File

@ -1,26 +0,0 @@
import { Adapter, SyncAdapter } from '../core/Low.js'
export class Memory<T> implements Adapter<T> {
#data: T | null = null
read(): Promise<T | null> {
return Promise.resolve(this.#data)
}
write(obj: T): Promise<void> {
this.#data = obj
return Promise.resolve()
}
}
export class MemorySync<T> implements SyncAdapter<T> {
#data: T | null = null
read(): T | null {
return this.#data || null
}
write(obj: T): void {
this.#data = obj
}
}

View File

@ -1,72 +0,0 @@
import { PathLike } from 'fs'
import { Adapter, SyncAdapter } from '../../core/Low.js'
import { TextFile, TextFileSync } from './TextFile.js'
export class DataFile<T> implements Adapter<T> {
#adapter: TextFile
#parse: (str: string) => T
#stringify: (data: T) => string
constructor(
filename: PathLike,
{
parse,
stringify,
}: {
parse: (str: string) => T
stringify: (data: T) => string
},
) {
this.#adapter = new TextFile(filename)
this.#parse = parse
this.#stringify = stringify
}
async read(): Promise<T | null> {
const data = await this.#adapter.read()
if (data === null) {
return null
} else {
return this.#parse(data)
}
}
write(obj: T): Promise<void> {
return this.#adapter.write(this.#stringify(obj))
}
}
export class DataFileSync<T> implements SyncAdapter<T> {
#adapter: TextFileSync
#parse: (str: string) => T
#stringify: (data: T) => string
constructor(
filename: PathLike,
{
parse,
stringify,
}: {
parse: (str: string) => T
stringify: (data: T) => string
},
) {
this.#adapter = new TextFileSync(filename)
this.#parse = parse
this.#stringify = stringify
}
read(): T | null {
const data = this.#adapter.read()
if (data === null) {
return null
} else {
return this.#parse(data)
}
}
write(obj: T): void {
this.#adapter.write(this.#stringify(obj))
}
}

View File

@ -1,21 +0,0 @@
import { PathLike } from 'fs'
import { DataFile, DataFileSync } from './DataFile.js'
export class JSONFile<T> extends DataFile<T> {
constructor(filename: PathLike) {
super(filename, {
parse: JSON.parse,
stringify: (data: T) => JSON.stringify(data, null, 2),
})
}
}
export class JSONFileSync<T> extends DataFileSync<T> {
constructor(filename: PathLike) {
super(filename, {
parse: JSON.parse,
stringify: (data: T) => JSON.stringify(data, null, 2),
})
}
}

View File

@ -1,67 +0,0 @@
import { PathLike, readFileSync, renameSync, writeFileSync } from 'node:fs'
import { readFile } from 'node:fs/promises'
import path from 'node:path'
import { Writer } from '../../../steno'
import { Adapter, SyncAdapter } from '../../core/Low.js'
export class TextFile implements Adapter<string> {
#filename: PathLike
#writer: Writer
constructor(filename: PathLike) {
this.#filename = filename
this.#writer = new Writer(filename)
}
async read(): Promise<string | null> {
let data
try {
data = await readFile(this.#filename, 'utf-8')
} catch (e) {
if ((e as NodeJS.ErrnoException).code === 'ENOENT') {
return null
}
throw e
}
return data
}
write(str: string): Promise<void> {
return this.#writer.write(str)
}
}
export class TextFileSync implements SyncAdapter<string> {
#tempFilename: PathLike
#filename: PathLike
constructor(filename: PathLike) {
this.#filename = filename
const f = filename.toString()
this.#tempFilename = path.join(path.dirname(f), `.${path.basename(f)}.tmp`)
}
read(): string | null {
let data
try {
data = readFileSync(this.#filename, 'utf-8')
} catch (e) {
if ((e as NodeJS.ErrnoException).code === 'ENOENT') {
return null
}
throw e
}
return data
}
write(str: string): void {
writeFileSync(this.#tempFilename, str)
renameSync(this.#tempFilename, this.#filename)
}
}

View File

@ -1,64 +0,0 @@
export interface Adapter<T> {
read: () => Promise<T | null>
write: (data: T) => Promise<void>
}
export interface SyncAdapter<T> {
read: () => T | null
write: (data: T) => void
}
function checkArgs(adapter: unknown, defaultData: unknown) {
if (adapter === undefined) throw new Error('lowdb: missing adapter')
if (defaultData === undefined) throw new Error('lowdb: missing default data')
}
export class Low<T = unknown> {
adapter: Adapter<T>
data: T
constructor(adapter: Adapter<T>, defaultData: T) {
checkArgs(adapter, defaultData)
this.adapter = adapter
this.data = defaultData
}
async read(): Promise<void> {
const data = await this.adapter.read()
if (data) this.data = data
}
async write(): Promise<void> {
if (this.data) await this.adapter.write(this.data)
}
async update(fn: (data: T) => unknown): Promise<void> {
fn(this.data)
await this.write()
}
}
export class LowSync<T = unknown> {
adapter: SyncAdapter<T>
data: T
constructor(adapter: SyncAdapter<T>, defaultData: T) {
checkArgs(adapter, defaultData)
this.adapter = adapter
this.data = defaultData
}
read(): void {
const data = this.adapter.read()
if (data) this.data = data
}
write(): void {
if (this.data) this.adapter.write(this.data)
}
update(fn: (data: T) => unknown): void {
fn(this.data)
this.write()
}
}

View File

@ -1,3 +0,0 @@
export * from './adapters/node/JSONFile.js'
export * from './adapters/node/TextFile.js'
export * from './presets/node.js'

View File

@ -1,31 +0,0 @@
import { PathLike } from 'node:fs'
import { Memory, MemorySync } from '../adapters/Memory.js'
import { JSONFile, JSONFileSync } from '../adapters/node/JSONFile.js'
import { Low, LowSync } from '../core/Low.js'
export async function JSONFilePreset<Data>(
filename: PathLike,
defaultData: Data,
): Promise<Low<Data>> {
const adapter =
process.env.NODE_ENV === 'test'
? new Memory<Data>()
: new JSONFile<Data>(filename)
const db = new Low<Data>(adapter, defaultData)
await db.read()
return db
}
export function JSONFileSyncPreset<Data>(
filename: PathLike,
defaultData: Data,
): LowSync<Data> {
const adapter =
process.env.NODE_ENV === 'test'
? new MemorySync<Data>()
: new JSONFileSync<Data>(filename)
const db = new LowSync<Data>(adapter, defaultData)
db.read()
return db
}

View File

@ -1,167 +0,0 @@
const request = require('request')
const { v3 } = require('uuid')
let uuid
let api_url = 'https://authserver.mojang.com'
function parsePropts(array) {
if (array) {
const newObj = {}
for (const entry of array) {
if (newObj[entry.name]) {
newObj[entry.name].push(entry.value)
} else {
newObj[entry.name] = [entry.value]
}
}
return JSON.stringify(newObj)
} else {
return '{}'
}
}
function getUUID(value) {
if (!uuid) {
uuid = v3(value, v3.DNS)
}
return uuid
}
const Authenticator = {
getAuth: (username, password, client_token = null) => {
return new Promise((resolve, reject) => {
getUUID(username)
if (!password) {
const user = {
access_token: uuid,
client_token: client_token || uuid,
uuid,
name: username,
user_properties: '{}'
}
return resolve(user)
}
const requestObject = {
url: api_url + '/authenticate',
json: {
agent: {
name: 'Minecraft',
version: 1
},
username,
password,
clientToken: uuid,
requestUser: true
}
}
request.post(requestObject, function (error, response, body) {
if (error) return reject(error)
if (!body || !body.selectedProfile) {
return reject(new Error('Validation error: ' + response.statusMessage))
}
const userProfile = {
access_token: body.accessToken,
client_token: body.clientToken,
uuid: body.selectedProfile.id,
name: body.selectedProfile.name,
selected_profile: body.selectedProfile,
user_properties: parsePropts(body.user.properties)
}
resolve(userProfile)
})
})
},
validate: (accessToken, clientToken) => {
return new Promise((resolve, reject) => {
const requestObject = {
url: api_url + '/validate',
json: {
accessToken,
clientToken
}
}
request.post(requestObject, async function (error, response, body) {
if (error) return reject(error)
if (!body) resolve(true)
else reject(body)
})
})
},
refreshAuth: (accessToken, clientToken) => {
return new Promise((resolve, reject) => {
const requestObject = {
url: api_url + '/refresh',
json: {
accessToken,
clientToken,
requestUser: true
}
}
request.post(requestObject, function (error, response, body) {
if (error) return reject(error)
if (!body || !body.selectedProfile) {
return reject(new Error('Validation error: ' + response.statusMessage))
}
const userProfile = {
access_token: body.accessToken,
client_token: getUUID(body.selectedProfile.name),
uuid: body.selectedProfile.id,
name: body.selectedProfile.name,
user_properties: parsePropts(body.user.properties)
}
return resolve(userProfile)
})
})
},
invalidate: (accessToken, clientToken) => {
return new Promise((resolve, reject) => {
const requestObject = {
url: api_url + '/invalidate',
json: {
accessToken,
clientToken
}
}
request.post(requestObject, function (error, response, body) {
if (error) return reject(error)
if (!body) return resolve(true)
else return reject(body)
})
})
},
signOut: (username, password) => {
return new Promise((resolve, reject) => {
const requestObject = {
url: api_url + '/signout',
json: {
username,
password
}
}
request.post(requestObject, function (error, response, body) {
if (error) return reject(error)
if (!body) return resolve(true)
else return reject(body)
})
})
},
changeApiUrl: (url) => {
api_url = url
}
}
export default Authenticator

View File

@ -1,783 +0,0 @@
const fs = require('fs')
const path = require('path')
const request = require('request')
const checksum = require('checksum')
const Zip = require('adm-zip')
const child = require('child_process')
let counter = 0
export default class Handler {
constructor (client) {
this.client = client
this.options = client.options
this.baseRequest = request.defaults({
pool: { maxSockets: this.options.overrides.maxSockets || 2 },
timeout: this.options.timeout || 10000
})
}
checkJava (java) {
return new Promise(resolve => {
child.exec(`"${java}" -version`, (error, stdout, stderr) => {
if (error) {
resolve({
run: false,
message: error
})
} else {
this.client.emit('debug', `[MCLC]: Using Java version ${stderr.match(/"(.*?)"/).pop()} ${stderr.includes('64-Bit') ? '64-bit' : '32-Bit'}`)
resolve({
run: true
})
}
})
})
}
downloadAsync (url, directory, name, retry, type) {
return new Promise(resolve => {
fs.mkdirSync(directory, { recursive: true })
const _request = this.baseRequest(url)
let receivedBytes = 0
let totalBytes = 0
_request.on('response', (data) => {
if (data.statusCode === 404) {
this.client.emit('debug', `[MCLC]: Failed to download ${url} due to: File not found...`)
return resolve(false)
}
totalBytes = parseInt(data.headers['content-length'])
})
_request.on('error', async (error) => {
this.client.emit('debug', `[MCLC]: Failed to download asset to ${path.join(directory, name)} due to\n${error}.` +
` Retrying... ${retry}`)
if (retry) await this.downloadAsync(url, directory, name, false, type)
resolve()
})
_request.on('data', (data) => {
receivedBytes += data.length
this.client.emit('download-status', {
name: name,
type: type,
current: receivedBytes,
total: totalBytes
})
})
const file = fs.createWriteStream(path.join(directory, name))
_request.pipe(file)
file.once('finish', () => {
this.client.emit('download', name)
resolve({
failed: false,
asset: null
})
})
file.on('error', async (e) => {
this.client.emit('debug', `[MCLC]: Failed to download asset to ${path.join(directory, name)} due to\n${e}.` +
` Retrying... ${retry}`)
if (fs.existsSync(path.join(directory, name))) fs.unlinkSync(path.join(directory, name))
if (retry) await this.downloadAsync(url, directory, name, false, type)
resolve()
})
})
}
checkSum (hash, file) {
return new Promise((resolve, reject) => {
checksum.file(file, (err, sum) => {
if (err) {
this.client.emit('debug', `[MCLC]: Failed to check file hash due to ${err}`)
resolve(false)
} else {
resolve(hash === sum)
}
})
})
}
getVersion () {
return new Promise(resolve => {
const versionJsonPath = this.options.overrides.versionJson || path.join(this.options.directory, `${this.options.version.number}.json`)
if (fs.existsSync(versionJsonPath)) {
this.version = JSON.parse(fs.readFileSync(versionJsonPath))
return resolve(this.version)
}
const manifest = `${this.options.overrides.url.meta}/mc/game/version_manifest.json`
const cache = this.options.cache ? `${this.options.cache}/json` : `${this.options.root}/cache/json`
request.get(manifest, (error, response, body) => {
if (error && error.code !== 'ENOTFOUND') {
return resolve(error)
}
if (!error) {
if (!fs.existsSync(cache)) {
fs.mkdirSync(cache, { recursive: true })
this.client.emit('debug', '[MCLC]: Cache directory created.')
}
fs.writeFile(path.join(`${cache}/version_manifest.json`), body, (err) => {
if (err) {
return resolve(err)
}
this.client.emit('debug', '[MCLC]: Cached version_manifest.json (from request)')
})
}
let parsed = null
if (error && (error.code === 'ENOTFOUND')) {
parsed = JSON.parse(fs.readFileSync(`${cache}/version_manifest.json`))
} else {
parsed = JSON.parse(body)
}
const versionManifest = parsed.versions.find((version) => {
return version.id === this.options.version.number
})
if (!versionManifest) {
return resolve(new Error(`Version not found`))
}
request.get(versionManifest.url, (error, response, body) => {
if (error && error.code !== 'ENOTFOUND') {
return resolve(error)
}
if (!error) {
fs.writeFile(path.join(`${cache}/${this.options.version.number}.json`), body, (err) => {
if (err) {
return resolve(err)
}
this.client.emit('debug', `[MCLC]: Cached ${this.options.version.number}.json`)
})
}
this.client.emit('debug', '[MCLC]: Parsed version from version manifest')
if (error && (error.code === 'ENOTFOUND')) {
this.version = JSON.parse(fs.readFileSync(`${cache}/${this.options.version.number}.json`))
} else {
this.version = JSON.parse(body)
}
this.client.emit('debug', this.version)
return resolve(this.version)
})
})
})
}
async getJar () {
await this.downloadAsync(this.version.downloads.client.url, this.options.directory, `${this.options.version.custom ? this.options.version.custom : this.options.version.number}.jar`, true, 'version-jar')
fs.writeFileSync(path.join(this.options.directory, `${this.options.version.number}.json`), JSON.stringify(this.version, null, 4))
return this.client.emit('debug', '[MCLC]: Downloaded version jar and wrote version json')
}
async getAssets () {
const assetDirectory = path.resolve(this.options.overrides.assetRoot || path.join(this.options.root, 'assets'))
const assetId = this.options.version.custom || this.options.version.number
if (!fs.existsSync(path.join(assetDirectory, 'indexes', `${assetId}.json`))) {
await this.downloadAsync(this.version.assetIndex.url, path.join(assetDirectory, 'indexes'),
`${assetId}.json`, true, 'asset-json')
}
const index = JSON.parse(fs.readFileSync(path.join(assetDirectory, 'indexes', `${assetId}.json`), { encoding: 'utf8' }))
this.client.emit('progress', {
type: 'assets',
task: 0,
total: Object.keys(index.objects).length
})
await Promise.all(Object.keys(index.objects).map(async asset => {
const hash = index.objects[asset].hash
const subhash = hash.substring(0, 2)
const subAsset = path.join(assetDirectory, 'objects', subhash)
if (!fs.existsSync(path.join(subAsset, hash)) || !await this.checkSum(hash, path.join(subAsset, hash))) {
await this.downloadAsync(`${this.options.overrides.url.resource}/${subhash}/${hash}`, subAsset, hash,
true, 'assets')
}
counter++
this.client.emit('progress', {
type: 'assets',
task: counter,
total: Object.keys(index.objects).length
})
}))
counter = 0
// Copy assets to legacy if it's an older Minecraft version.
if (this.isLegacy()) {
if (fs.existsSync(path.join(assetDirectory, 'legacy'))) {
this.client.emit('debug', '[MCLC]: The \'legacy\' directory is no longer used as Minecraft looks ' +
'for the resouces folder regardless of what is passed in the assetDirecotry launch option. I\'d ' +
`recommend removing the directory (${path.join(assetDirectory, 'legacy')})`)
}
const legacyDirectory = path.join(this.options.root, 'resources')
this.client.emit('debug', `[MCLC]: Copying assets over to ${legacyDirectory}`)
this.client.emit('progress', {
type: 'assets-copy',
task: 0,
total: Object.keys(index.objects).length
})
await Promise.all(Object.keys(index.objects).map(async asset => {
const hash = index.objects[asset].hash
const subhash = hash.substring(0, 2)
const subAsset = path.join(assetDirectory, 'objects', subhash)
const legacyAsset = asset.split('/')
legacyAsset.pop()
if (!fs.existsSync(path.join(legacyDirectory, legacyAsset.join('/')))) {
fs.mkdirSync(path.join(legacyDirectory, legacyAsset.join('/')), { recursive: true })
}
if (!fs.existsSync(path.join(legacyDirectory, asset))) {
fs.copyFileSync(path.join(subAsset, hash), path.join(legacyDirectory, asset))
}
counter++
this.client.emit('progress', {
type: 'assets-copy',
task: counter,
total: Object.keys(index.objects).length
})
}))
}
counter = 0
this.client.emit('debug', '[MCLC]: Downloaded assets')
}
parseRule (lib) {
if (lib.rules) {
if (lib.rules.length > 1) {
if (lib.rules[0].action === 'allow' &&
lib.rules[1].action === 'disallow' &&
lib.rules[1].os.name === 'osx') {
return this.getOS() === 'osx'
} else {
return true
}
} else {
if (lib.rules[0].action === 'allow' && lib.rules[0].os) return lib.rules[0].os.name !== this.getOS()
}
} else {
return false
}
}
async getNatives () {
const nativeDirectory = path.resolve(this.options.overrides.natives || path.join(this.options.root, 'natives', this.version.id))
if (parseInt(this.version.id.split('.')[1]) >= 19) return this.options.overrides.cwd || this.options.root
if (!fs.existsSync(nativeDirectory) || !fs.readdirSync(nativeDirectory).length) {
fs.mkdirSync(nativeDirectory, { recursive: true })
const natives = async () => {
const natives = []
await Promise.all(this.version.libraries.map(async (lib) => {
if (!lib.downloads || !lib.downloads.classifiers) return
if (this.parseRule(lib)) return
const native = this.getOS() === 'osx'
? lib.downloads.classifiers['natives-osx'] || lib.downloads.classifiers['natives-macos']
: lib.downloads.classifiers[`natives-${this.getOS()}`]
natives.push(native)
}))
return natives
}
const stat = await natives()
this.client.emit('progress', {
type: 'natives',
task: 0,
total: stat.length
})
await Promise.all(stat.map(async (native) => {
if (!native) return
const name = native.path.split('/').pop()
await this.downloadAsync(native.url, nativeDirectory, name, true, 'natives')
if (!await this.checkSum(native.sha1, path.join(nativeDirectory, name))) {
await this.downloadAsync(native.url, nativeDirectory, name, true, 'natives')
}
try {
new Zip(path.join(nativeDirectory, name)).extractAllTo(nativeDirectory, true)
} catch (e) {
// Only doing a console.warn since a stupid error happens. You can basically ignore this.
// if it says Invalid file name, just means two files were downloaded and both were deleted.
// All is well.
console.warn(e)
}
fs.unlinkSync(path.join(nativeDirectory, name))
counter++
this.client.emit('progress', {
type: 'natives',
task: counter,
total: stat.length
})
}))
this.client.emit('debug', '[MCLC]: Downloaded and extracted natives')
}
counter = 0
this.client.emit('debug', `[MCLC]: Set native path to ${nativeDirectory}`)
return nativeDirectory
}
fwAddArgs () {
const forgeWrapperAgrs = [
`-Dforgewrapper.librariesDir=${path.resolve(this.options.overrides.libraryRoot || path.join(this.options.root, 'libraries'))}`,
`-Dforgewrapper.installer=${this.options.forge}`,
`-Dforgewrapper.minecraft=${this.options.mcPath}`
]
this.options.customArgs
? this.options.customArgs = this.options.customArgs.concat(forgeWrapperAgrs)
: this.options.customArgs = forgeWrapperAgrs
}
isModernForge (json) {
return json.inheritsFrom && json.inheritsFrom.split('.')[1] >= 12 && !(json.inheritsFrom === '1.12.2' && (json.id.split('.')[json.id.split('.').length - 1]) === '2847')
}
async getForgedWrapped () {
let json = null
let installerJson = null
const versionPath = path.join(this.options.root, 'forge', `${this.version.id}`, 'version.json')
// Since we're building a proper "custom" JSON that will work nativly with MCLC, the version JSON will not
// be re-generated on the next run.
if (fs.existsSync(versionPath)) {
try {
json = JSON.parse(fs.readFileSync(versionPath))
if (!json.forgeWrapperVersion || !(json.forgeWrapperVersion === this.options.overrides.fw.version)) {
this.client.emit('debug', '[MCLC]: Old ForgeWrapper has generated this version JSON, re-generating')
} else {
// If forge is modern, add ForgeWrappers launch arguments and set forge to null so MCLC treats it as a custom json.
if (this.isModernForge(json)) {
this.fwAddArgs()
this.options.forge = null
}
return json
}
} catch (e) {
console.warn(e)
this.client.emit('debug', '[MCLC]: Failed to parse Forge version JSON, re-generating')
}
}
this.client.emit('debug', '[MCLC]: Generating a proper version json, this might take a bit')
const zipFile = new Zip(this.options.forge)
json = zipFile.readAsText('version.json')
if (zipFile.getEntry('install_profile.json')) installerJson = zipFile.readAsText('install_profile.json')
try {
json = JSON.parse(json)
if (installerJson) installerJson = JSON.parse(installerJson)
} catch (e) {
this.client.emit('debug', '[MCLC]: Failed to load json files for ForgeWrapper, using Vanilla instead')
return null
}
// Adding the installer libraries as mavenFiles so MCLC downloads them but doesn't add them to the class paths.
if (installerJson) {
json.mavenFiles
? json.mavenFiles = json.mavenFiles.concat(installerJson.libraries)
: json.mavenFiles = installerJson.libraries
}
// Holder for the specifc jar ending which depends on the specifc forge version.
let jarEnding = 'universal'
// We need to handle modern forge differently than legacy.
if (this.isModernForge(json)) {
// If forge is modern and above 1.12.2, we add ForgeWrapper to the libraries so MCLC includes it in the classpaths.
if (json.inheritsFrom !== '1.12.2') {
this.fwAddArgs()
const fwName = `ForgeWrapper-${this.options.overrides.fw.version}.jar`
const fwPathArr = ['io', 'github', 'zekerzhayard', 'ForgeWrapper', this.options.overrides.fw.version]
json.libraries.push({
name: fwPathArr.join(':'),
downloads: {
artifact: {
path: [...fwPathArr, fwName].join('/'),
url: `${this.options.overrides.fw.baseUrl}${this.options.overrides.fw.version}/${fwName}`,
sha1: this.options.overrides.fw.sh1,
size: this.options.overrides.fw.size
}
}
})
json.mainClass = 'io.github.zekerzhayard.forgewrapper.installer.Main'
jarEnding = 'launcher'
// Providing a download URL to the universal jar mavenFile so it can be downloaded properly.
for (const library of json.mavenFiles) {
const lib = library.name.split(':')
if (lib[0] === 'net.minecraftforge' && lib[1].includes('forge')) {
library.downloads.artifact.url = 'https://files.minecraftforge.net/maven/' + library.downloads.artifact.path
break
}
}
} else {
// Remove the forge dependent since we're going to overwrite the first entry anyways.
for (const library in json.mavenFiles) {
const lib = json.mavenFiles[library].name.split(':')
if (lib[0] === 'net.minecraftforge' && lib[1].includes('forge')) {
delete json.mavenFiles[library]
break
}
}
}
} else {
// Modifying legacy library format to play nice with MCLC's downloadToDirectory function.
await Promise.all(json.libraries.map(async library => {
const lib = library.name.split(':')
if (lib[0] === 'net.minecraftforge' && lib[1].includes('forge')) return
let url = this.options.overrides.url.mavenForge
const name = `${lib[1]}-${lib[2]}.jar`
if (!library.url) {
if (library.serverreq || library.clientreq) {
url = this.options.overrides.url.defaultRepoForge
} else {
return
}
}
library.url = url
const downloadLink = `${url}${lib[0].replace(/\./g, '/')}/${lib[1]}/${lib[2]}/${name}`
// Checking if the file still exists on Forge's server, if not, replace it with the fallback.
// Not checking for sucess, only if it 404s.
this.baseRequest(downloadLink, (error, response, body) => {
if (error) {
this.client.emit('debug', `[MCLC]: Failed checking request for ${downloadLink}`)
} else {
if (response.statusCode === 404) library.url = this.options.overrides.url.fallbackMaven
}
})
}))
}
// If a downloads property exists, we modify the inital forge entry to include ${jarEnding} so ForgeWrapper can work properly.
// If it doesn't, we simply remove it since we're already providing the universal jar.
if (json.libraries[0].downloads) {
if (json.libraries[0].name.includes('minecraftforge')) {
json.libraries[0].name = json.libraries[0].name + `:${jarEnding}`
json.libraries[0].downloads.artifact.path = json.libraries[0].downloads.artifact.path.replace('.jar', `-${jarEnding}.jar`)
json.libraries[0].downloads.artifact.url = 'https://files.minecraftforge.net/maven/' + json.libraries[0].downloads.artifact.path
}
} else {
delete json.libraries[0]
}
// Removing duplicates and null types
json.libraries = this.cleanUp(json.libraries)
if (json.mavenFiles) json.mavenFiles = this.cleanUp(json.mavenFiles)
json.forgeWrapperVersion = this.options.overrides.fw.version
// Saving file for next run!
if (!fs.existsSync(path.join(this.options.root, 'forge', this.version.id))) {
fs.mkdirSync(path.join(this.options.root, 'forge', this.version.id), { recursive: true })
}
fs.writeFileSync(versionPath, JSON.stringify(json, null, 4))
// Make MCLC treat modern forge as a custom version json rather then legacy forge.
if (this.isModernForge(json)) this.options.forge = null
return json
}
runInstaller (path) {
return new Promise(resolve => {
const installer = child.exec(path)
installer.on('close', (code) => resolve(code))
})
}
async downloadToDirectory (directory, libraries, eventName) {
const libs = []
await Promise.all(libraries.map(async library => {
if (!library) return
if (this.parseRule(library)) return
const lib = library.name.split(':')
let jarPath
let name
if (library.downloads && library.downloads.artifact && library.downloads.artifact.path) {
name = library.downloads.artifact.path.split('/')[library.downloads.artifact.path.split('/').length - 1]
jarPath = path.join(directory, this.popString(library.downloads.artifact.path))
} else {
name = `${lib[1]}-${lib[2]}${lib[3] ? '-' + lib[3] : ''}.jar`
jarPath = path.join(directory, `${lib[0].replace(/\./g, '/')}/${lib[1]}/${lib[2]}`)
}
const downloadLibrary = async library => {
if (library.url) {
const url = `${library.url}${lib[0].replace(/\./g, '/')}/${lib[1]}/${lib[2]}/${name}`
await this.downloadAsync(url, jarPath, name, true, eventName)
} else if (library.downloads && library.downloads.artifact) {
await this.downloadAsync(library.downloads.artifact.url, jarPath, name, true, eventName)
}
}
if (!fs.existsSync(path.join(jarPath, name))) downloadLibrary(library)
else if (library.downloads && library.downloads.artifact) {
if (!this.checkSum(library.downloads.artifact.sha1, path.join(jarPath, name))) downloadLibrary(library)
}
counter++
this.client.emit('progress', {
type: eventName,
task: counter,
total: libraries.length
})
libs.push(`${jarPath}${path.sep}${name}`)
}))
counter = 0
return libs
}
async getClasses (classJson) {
let libs = []
const libraryDirectory = path.resolve(this.options.overrides.libraryRoot || path.join(this.options.root, 'libraries'))
if (classJson) {
if (classJson.mavenFiles) {
await this.downloadToDirectory(libraryDirectory, classJson.mavenFiles, 'classes-maven-custom')
}
libs = (await this.downloadToDirectory(libraryDirectory, classJson.libraries, 'classes-custom'))
}
const parsed = this.version.libraries.map(lib => {
if (lib.downloads && lib.downloads.artifact && !this.parseRule(lib)) return lib
})
libs = libs.concat((await this.downloadToDirectory(libraryDirectory, parsed, 'classes')))
counter = 0
// Temp Quilt support
if (classJson) libs.sort()
this.client.emit('debug', '[MCLC]: Collected class paths')
return libs
}
popString (path) {
const tempArray = path.split('/')
tempArray.pop()
return tempArray.join('/')
}
cleanUp (array) {
const newArray = []
for (const classPath in array) {
if (newArray.includes(array[classPath]) || array[classPath] === null) continue
newArray.push(array[classPath])
}
return newArray
}
formatQuickPlay () {
const types = {
singleplayer: '--quickPlaySingleplayer',
multiplayer: '--quickPlayMultiplayer',
realms: '--quickPlayRealms',
legacy: null
}
const { type, identifier, path } = this.options.quickPlay
const keys = Object.keys(types)
if (!keys.includes(type)) {
this.client.emit('debug', `[MCLC]: quickPlay type is not valid. Valid types are: ${keys.join(', ')}`)
return null
}
const returnArgs = type === 'legacy'
? ['--server', identifier.split(':')[0], '--port', identifier.split(':')[1] || '25565']
: [types[type], identifier]
if (path) returnArgs.push('--quickPlayPath', path)
return returnArgs
}
async getLaunchOptions (modification) {
const type = Object.assign({}, this.version, modification)
let args = type.minecraftArguments
? type.minecraftArguments.split(' ')
: type.arguments.game
const assetRoot = path.resolve(this.options.overrides.assetRoot || path.join(this.options.root, 'assets'))
const assetPath = this.isLegacy()
? path.join(this.options.root, 'resources')
: path.join(assetRoot)
const minArgs = this.options.overrides.minArgs || this.isLegacy() ? 5 : 11
if (args.length < minArgs) args = args.concat(this.version.minecraftArguments ? this.version.minecraftArguments.split(' ') : this.version.arguments.game)
if (this.options.customLaunchArgs) args = args.concat(this.options.customLaunchArgs)
this.options.authorization = await Promise.resolve(this.options.authorization)
this.options.authorization.meta = this.options.authorization.meta ? this.options.authorization.meta : { type: 'mojang' }
const fields = {
'${auth_access_token}': this.options.authorization.access_token,
'${auth_session}': this.options.authorization.access_token,
'${auth_player_name}': this.options.authorization.name,
'${auth_uuid}': this.options.authorization.uuid,
'${auth_xuid}': this.options.authorization.meta.xuid || this.options.authorization.access_token,
'${user_properties}': this.options.authorization.user_properties,
'${user_type}': this.options.authorization.meta.type,
'${version_name}': this.options.version.number,
'${assets_index_name}': this.options.overrides.assetIndex || this.options.version.custom || this.options.version.number,
'${game_directory}': this.options.overrides.gameDirectory || this.options.root,
'${assets_root}': assetPath,
'${game_assets}': assetPath,
'${version_type}': this.options.version.type,
'${clientid}': this.options.authorization.meta.clientId || (this.options.authorization.client_token || this.options.authorization.access_token),
'${resolution_width}': this.options.window ? this.options.window.width : 856,
'${resolution_height}': this.options.window ? this.options.window.height : 482
}
if (this.options.authorization.meta.demo && (this.options.features ? !this.options.features.includes('is_demo_user') : true)) {
args.push('--demo')
}
const replaceArg = (obj, index) => {
if (Array.isArray(obj.value)) {
for (const arg of obj.value) {
args.push(arg)
}
} else {
args.push(obj.value)
}
delete args[index]
}
for (let index = 0; index < args.length; index++) {
if (typeof args[index] === 'object') {
if (args[index].rules) {
if (!this.options.features) continue
const featureFlags = []
for (const rule of args[index].rules) {
featureFlags.push(...Object.keys(rule.features))
}
let hasAllRules = true
for (const feature of this.options.features) {
if (!featureFlags.includes(feature)) {
hasAllRules = false
}
}
if (hasAllRules) replaceArg(args[index], index)
} else {
replaceArg(args[index], index)
}
} else {
if (Object.keys(fields).includes(args[index])) {
args[index] = fields[args[index]]
}
}
}
if (this.options.window) {
// eslint-disable-next-line no-unused-expressions
this.options.window.fullscreen
? args.push('--fullscreen')
: () => {
if (this.options.features ? !this.options.features.includes('has_custom_resolution') : true) {
args.push('--width', this.options.window.width, '--height', this.options.window.height)
}
}
}
if (this.options.server) this.client.emit('debug', '[MCLC]: server and port are deprecated launch flags. Use the quickPlay field.')
if (this.options.quickPlay) args = args.concat(this.formatQuickPlay())
if (this.options.proxy) {
args.push(
'--proxyHost',
this.options.proxy.host,
'--proxyPort',
this.options.proxy.port || '8080',
'--proxyUser',
this.options.proxy.username,
'--proxyPass',
this.options.proxy.password
)
}
args = args.filter(value => typeof value === 'string' || typeof value === 'number')
this.client.emit('debug', '[MCLC]: Set launch options')
return args
}
async getJVM () {
const opts = {
windows: '-XX:HeapDumpPath=MojangTricksIntelDriversForPerformance_javaw.exe_minecraft.exe.heapdump',
osx: '-XstartOnFirstThread',
linux: '-Xss1M'
}
return opts[this.getOS()]
}
isLegacy () {
return this.version.assets === 'legacy' || this.version.assets === 'pre-1.6'
}
getOS () {
if (this.options.os) {
return this.options.os
} else {
switch (process.platform) {
case 'win32': return 'windows'
case 'darwin': return 'osx'
default: return 'linux'
}
}
}
// To prevent launchers from breaking when they update. Will be reworked with rewrite.
getMemory () {
if (!this.options.memory) {
this.client.emit('debug', '[MCLC]: Memory not set! Setting 1GB as MAX!')
this.options.memory = {
min: 512,
max: 1023
}
}
if (!isNaN(this.options.memory.max) && !isNaN(this.options.memory.min)) {
if (this.options.memory.max < this.options.memory.min) {
this.client.emit('debug', '[MCLC]: MIN memory is higher then MAX! Resetting!')
this.options.memory.max = 1023
this.options.memory.min = 512
}
return [`${this.options.memory.max}M`, `${this.options.memory.min}M`]
} else { return [`${this.options.memory.max}`, `${this.options.memory.min}`] }
}
async extractPackage (options = this.options) {
if (options.clientPackage.startsWith('http')) {
await this.downloadAsync(options.clientPackage, options.root, 'clientPackage.zip', true, 'client-package')
options.clientPackage = path.join(options.root, 'clientPackage.zip')
}
new Zip(options.clientPackage).extractAllTo(options.root, true)
if (options.removePackage) fs.unlinkSync(options.clientPackage)
return this.client.emit('package-extract', true)
}
}

View File

@ -1,34 +0,0 @@
import Client from "./launcher"
import Authenticator from "./authenticator"
export default class MCL {
/**
* Asynchronously authenticate the user using the provided username and password.
*
* @param {string} username - the username of the user
* @param {string} password - the password of the user
* @return {Promise<Object>} the authentication information
*/
async auth(username, password) {
return await Authenticator.getAuth(username, password)
}
/**
* Launches a new client with the given options.
*
* @param {Object} opts - The options to be passed for launching the client.
* @return {Promise<Client>} A promise that resolves with the launched client.
*/
async launch(opts, callbacks) {
const launcher = new Client()
launcher.on("debug", (e) => console.log(e))
launcher.on("data", (e) => console.log(e))
launcher.on("close", (e) => console.log(e))
launcher.on("error", (e) => console.log(e))
await launcher.launch(opts, callbacks)
return launcher
}
}

View File

@ -1,224 +0,0 @@
import fs from "node:fs"
import path from "node:path"
import { EventEmitter } from "events"
import child from "child_process"
import Handler from "./handler"
export default class MCLCore extends EventEmitter {
async launch(options, callbacks = {}) {
try {
this.options = { ...options }
this.options.root = path.resolve(this.options.root)
this.options.overrides = {
detached: true,
...this.options.overrides,
url: {
meta: 'https://launchermeta.mojang.com',
resource: 'https://resources.download.minecraft.net',
mavenForge: 'http://files.minecraftforge.net/maven/',
defaultRepoForge: 'https://libraries.minecraft.net/',
fallbackMaven: 'https://search.maven.org/remotecontent?filepath=',
...this.options.overrides
? this.options.overrides.url
: undefined
},
fw: {
baseUrl: 'https://github.com/ZekerZhayard/ForgeWrapper/releases/download/',
version: '1.5.6',
sh1: 'b38d28e8b7fde13b1bc0db946a2da6760fecf98d',
size: 34715,
...this.options.overrides
? this.options.overrides.fw
: undefined
}
}
this.handler = new Handler(this)
this.printVersion()
const java = await this.handler.checkJava(this.options.javaPath || 'java')
if (!java.run) {
this.emit('debug', `[MCLC]: Couldn't start Minecraft due to: ${java.message}`)
this.emit('close', 1)
return null
}
this.createRootDirectory()
this.createGameDirectory()
await this.extractPackage()
if (this.options.installer) {
// So installers that create a profile in launcher_profiles.json can run without breaking.
const profilePath = path.join(this.options.root, 'launcher_profiles.json')
if (!fs.existsSync(profilePath) || !JSON.parse(fs.readFileSync(profilePath)).profiles) {
fs.writeFileSync(profilePath, JSON.stringify({ profiles: {} }, null, 4))
}
const code = await this.handler.runInstaller(this.options.installer)
if (!this.options.version.custom && code === 0) {
this.emit('debug', '[MCLC]: Installer successfully ran, but no custom version was provided')
}
this.emit('debug', `[MCLC]: Installer closed with code ${code}`)
}
const directory = this.options.overrides.directory || path.join(this.options.root, 'versions', this.options.version.custom ? this.options.version.custom : this.options.version.number)
this.options.directory = directory
const versionFile = await this.handler.getVersion()
const mcPath = this.options.overrides.minecraftJar || (this.options.version.custom
? path.join(this.options.root, 'versions', this.options.version.custom, `${this.options.version.custom}.jar`)
: path.join(directory, `${this.options.version.number}.jar`))
this.options.mcPath = mcPath
const nativePath = await this.handler.getNatives()
if (!fs.existsSync(mcPath)) {
this.emit('debug', '[MCLC]: Attempting to download Minecraft version jar')
if (typeof callbacks.install === "function") {
callbacks.install()
}
await this.handler.getJar()
}
const modifyJson = await this.getModifyJson()
const args = []
let jvm = [
'-XX:-UseAdaptiveSizePolicy',
'-XX:-OmitStackTraceInFastThrow',
'-Dfml.ignorePatchDiscrepancies=true',
'-Dfml.ignoreInvalidMinecraftCertificates=true',
`-Djava.library.path=${nativePath}`,
`-Xmx${this.handler.getMemory()[0]}`,
`-Xms${this.handler.getMemory()[1]}`
]
if (this.handler.getOS() === 'osx') {
if (parseInt(versionFile.id.split('.')[1]) > 12) jvm.push(await this.handler.getJVM())
} else jvm.push(await this.handler.getJVM())
if (this.options.customArgs) jvm = jvm.concat(this.options.customArgs)
if (this.options.overrides.logj4ConfigurationFile) {
jvm.push(`-Dlog4j.configurationFile=${path.resolve(this.options.overrides.logj4ConfigurationFile)}`)
}
// https://help.minecraft.net/hc/en-us/articles/4416199399693-Security-Vulnerability-in-Minecraft-Java-Edition
if (parseInt(versionFile.id.split('.')[1]) === 18 && !parseInt(versionFile.id.split('.')[2])) jvm.push('-Dlog4j2.formatMsgNoLookups=true')
if (parseInt(versionFile.id.split('.')[1]) === 17) jvm.push('-Dlog4j2.formatMsgNoLookups=true')
if (parseInt(versionFile.id.split('.')[1]) < 17) {
if (!jvm.find(arg => arg.includes('Dlog4j.configurationFile'))) {
const configPath = path.resolve(this.options.overrides.cwd || this.options.root)
const intVersion = parseInt(versionFile.id.split('.')[1])
if (intVersion >= 12) {
await this.handler.downloadAsync('https://launcher.mojang.com/v1/objects/02937d122c86ce73319ef9975b58896fc1b491d1/log4j2_112-116.xml',
configPath, 'log4j2_112-116.xml', true, 'log4j')
jvm.push('-Dlog4j.configurationFile=log4j2_112-116.xml')
} else if (intVersion >= 7) {
await this.handler.downloadAsync('https://launcher.mojang.com/v1/objects/dd2b723346a8dcd48e7f4d245f6bf09e98db9696/log4j2_17-111.xml',
configPath, 'log4j2_17-111.xml', true, 'log4j')
jvm.push('-Dlog4j.configurationFile=log4j2_17-111.xml')
}
}
}
const classes = this.options.overrides.classes || this.handler.cleanUp(await this.handler.getClasses(modifyJson))
const classPaths = ['-cp']
const separator = this.handler.getOS() === 'windows' ? ';' : ':'
this.emit('debug', `[MCLC]: Using ${separator} to separate class paths`)
// Handling launch arguments.
const file = modifyJson || versionFile
// So mods like fabric work.
const jar = fs.existsSync(mcPath)
? `${separator}${mcPath}`
: `${separator}${path.join(directory, `${this.options.version.number}.jar`)}`
classPaths.push(`${this.options.forge ? this.options.forge + separator : ''}${classes.join(separator)}${jar}`)
classPaths.push(file.mainClass)
this.emit('debug', '[MCLC]: Attempting to download assets')
if (typeof callbacks.init_assets === "function") {
callbacks.init_assets()
}
await this.handler.getAssets()
// Forge -> Custom -> Vanilla
const launchOptions = await this.handler.getLaunchOptions(modifyJson)
const launchArguments = args.concat(jvm, classPaths, launchOptions)
this.emit('arguments', launchArguments)
this.emit('debug', `[MCLC]: Launching with arguments ${launchArguments.join(' ')}`)
return this.startMinecraft(launchArguments)
} catch (e) {
this.emit('debug', `[MCLC]: Failed to start due to ${e}, closing...`)
return null
}
}
printVersion() {
if (fs.existsSync(path.join(__dirname, '..', 'package.json'))) {
const { version } = require('../package.json')
this.emit('debug', `[MCLC]: MCLC version ${version}`)
} else { this.emit('debug', '[MCLC]: Package JSON not found, skipping MCLC version check.') }
}
createRootDirectory() {
if (!fs.existsSync(this.options.root)) {
this.emit('debug', '[MCLC]: Attempting to create root folder')
fs.mkdirSync(this.options.root)
}
}
createGameDirectory() {
if (this.options.overrides.gameDirectory) {
this.options.overrides.gameDirectory = path.resolve(this.options.overrides.gameDirectory)
if (!fs.existsSync(this.options.overrides.gameDirectory)) {
fs.mkdirSync(this.options.overrides.gameDirectory, { recursive: true })
}
}
}
async extractPackage() {
if (this.options.clientPackage) {
this.emit('debug', `[MCLC]: Extracting client package to ${this.options.root}`)
await this.handler.extractPackage()
}
}
async getModifyJson() {
let modifyJson = null
if (this.options.forge) {
this.options.forge = path.resolve(this.options.forge)
this.emit('debug', '[MCLC]: Detected Forge in options, getting dependencies')
modifyJson = await this.handler.getForgedWrapped()
} else if (this.options.version.custom) {
this.emit('debug', '[MCLC]: Detected custom in options, setting custom version file')
modifyJson = modifyJson || JSON.parse(fs.readFileSync(path.join(this.options.root, 'versions', this.options.version.custom, `${this.options.version.custom}.json`), { encoding: 'utf8' }))
}
return modifyJson
}
startMinecraft(launchArguments) {
const minecraft = child.spawn(this.options.javaPath ? this.options.javaPath : 'java', launchArguments,
{ cwd: this.options.overrides.cwd || this.options.root, detached: this.options.overrides.detached })
minecraft.stdout.on('data', (data) => this.emit('data', data.toString('utf-8')))
minecraft.stderr.on('data', (data) => this.emit('data', data.toString('utf-8')))
minecraft.on('close', (code) => this.emit('close', code))
return minecraft
}
}

View File

@ -1,71 +0,0 @@
const copyProperty = (to, from, property, ignoreNonConfigurable) => {
// `Function#length` should reflect the parameters of `to` not `from` since we keep its body.
// `Function#prototype` is non-writable and non-configurable so can never be modified.
if (property === 'length' || property === 'prototype') {
return;
}
// `Function#arguments` and `Function#caller` should not be copied. They were reported to be present in `Reflect.ownKeys` for some devices in React Native (#41), so we explicitly ignore them here.
if (property === 'arguments' || property === 'caller') {
return;
}
const toDescriptor = Object.getOwnPropertyDescriptor(to, property);
const fromDescriptor = Object.getOwnPropertyDescriptor(from, property);
if (!canCopyProperty(toDescriptor, fromDescriptor) && ignoreNonConfigurable) {
return;
}
Object.defineProperty(to, property, fromDescriptor);
};
// `Object.defineProperty()` throws if the property exists, is not configurable and either:
// - one its descriptors is changed
// - it is non-writable and its value is changed
const canCopyProperty = function (toDescriptor, fromDescriptor) {
return toDescriptor === undefined || toDescriptor.configurable || (
toDescriptor.writable === fromDescriptor.writable
&& toDescriptor.enumerable === fromDescriptor.enumerable
&& toDescriptor.configurable === fromDescriptor.configurable
&& (toDescriptor.writable || toDescriptor.value === fromDescriptor.value)
);
};
const changePrototype = (to, from) => {
const fromPrototype = Object.getPrototypeOf(from);
if (fromPrototype === Object.getPrototypeOf(to)) {
return;
}
Object.setPrototypeOf(to, fromPrototype);
};
const wrappedToString = (withName, fromBody) => `/* Wrapped ${withName}*/\n${fromBody}`;
const toStringDescriptor = Object.getOwnPropertyDescriptor(Function.prototype, 'toString');
const toStringName = Object.getOwnPropertyDescriptor(Function.prototype.toString, 'name');
// We call `from.toString()` early (not lazily) to ensure `from` can be garbage collected.
// We use `bind()` instead of a closure for the same reason.
// Calling `from.toString()` early also allows caching it in case `to.toString()` is called several times.
const changeToString = (to, from, name) => {
const withName = name === '' ? '' : `with ${name.trim()}() `;
const newToString = wrappedToString.bind(null, withName, from.toString());
// Ensure `to.toString.toString` is non-enumerable and has the same `same`
Object.defineProperty(newToString, 'name', toStringName);
Object.defineProperty(to, 'toString', { ...toStringDescriptor, value: newToString });
};
export default function mimicFunction(to, from, { ignoreNonConfigurable = false } = {}) {
const { name } = to;
for (const property of Reflect.ownKeys(from)) {
copyProperty(to, from, property, ignoreNonConfigurable);
}
changePrototype(to, from);
changeToString(to, from, name);
return to;
}

View File

@ -1,84 +0,0 @@
export interface RunPathOptions {
/**
Working directory.
@default process.cwd()
*/
readonly cwd?: string | URL;
/**
PATH to be appended. Default: [`PATH`](https://github.com/sindresorhus/path-key).
Set it to an empty string to exclude the default PATH.
*/
readonly path?: string;
/**
Path to the Node.js executable to use in child processes if that is different from the current one. Its directory is pushed to the front of PATH.
This can be either an absolute path or a path relative to the `cwd` option.
@default process.execPath
*/
readonly execPath?: string | URL;
}
export type ProcessEnv = Record<string, string | undefined>;
export interface EnvOptions {
/**
The working directory.
@default process.cwd()
*/
readonly cwd?: string | URL;
/**
Accepts an object of environment variables, like `process.env`, and modifies the PATH using the correct [PATH key](https://github.com/sindresorhus/path-key). Use this if you're modifying the PATH for use in the `child_process` options.
*/
readonly env?: ProcessEnv;
/**
The path to the current Node.js executable. Its directory is pushed to the front of PATH.
This can be either an absolute path or a path relative to the `cwd` option.
@default process.execPath
*/
readonly execPath?: string | URL;
}
/**
Get your [PATH](https://en.wikipedia.org/wiki/PATH_(variable)) prepended with locally installed binaries.
@returns The augmented path string.
@example
```
import childProcess from 'node:child_process';
import {npmRunPath} from 'npm-run-path';
console.log(process.env.PATH);
//=> '/usr/local/bin'
console.log(npmRunPath());
//=> '/Users/sindresorhus/dev/foo/node_modules/.bin:/Users/sindresorhus/dev/node_modules/.bin:/Users/sindresorhus/node_modules/.bin:/Users/node_modules/.bin:/node_modules/.bin:/usr/local/bin'
```
*/
export function npmRunPath(options?: RunPathOptions): string;
/**
@returns The augmented [`process.env`](https://nodejs.org/api/process.html#process_process_env) object.
@example
```
import childProcess from 'node:child_process';
import {npmRunPathEnv} from 'npm-run-path';
// `foo` is a locally installed binary
childProcess.execFileSync('foo', {
env: npmRunPathEnv()
});
```
*/
export function npmRunPathEnv(options?: EnvOptions): ProcessEnv;

View File

@ -1,51 +0,0 @@
import process from 'node:process';
import path from 'node:path';
import url from 'node:url';
function pathKey(options = {}) {
const {
env = process.env,
platform = process.platform
} = options;
if (platform !== 'win32') {
return 'PATH';
}
return Object.keys(env).reverse().find(key => key.toUpperCase() === 'PATH') || 'Path';
}
export function npmRunPath(options = {}) {
const {
cwd = process.cwd(),
path: path_ = process.env[pathKey()],
execPath = process.execPath,
} = options;
let previous;
const execPathString = execPath instanceof URL ? url.fileURLToPath(execPath) : execPath;
const cwdString = cwd instanceof URL ? url.fileURLToPath(cwd) : cwd;
let cwdPath = path.resolve(cwdString);
const result = [];
while (previous !== cwdPath) {
result.push(path.join(cwdPath, 'node_modules/.bin'));
previous = cwdPath;
cwdPath = path.resolve(cwdPath, '..');
}
// Ensure the running `node` binary is used.
result.push(path.resolve(cwdString, execPathString, '..'));
return [...result, path_].join(path.delimiter);
}
export function npmRunPathEnv({ env = process.env, ...options } = {}) {
env = { ...env };
const path = pathKey({ env });
options.path = env[path];
env[path] = npmRunPath(options);
return env;
}

View File

@ -1,59 +0,0 @@
export type Options = {
/**
Throw an error when called more than once.
@default false
*/
readonly throw?: boolean;
};
declare const onetime: {
/**
Ensure a function is only called once. When called multiple times it will return the return value from the first call.
@param fn - The function that should only be called once.
@returns A function that only calls `fn` once.
@example
```
import onetime from 'onetime';
let index = 0;
const foo = onetime(() => ++index);
foo(); //=> 1
foo(); //=> 1
foo(); //=> 1
onetime.callCount(foo); //=> 3
```
*/
<ArgumentsType extends unknown[], ReturnType>(
fn: (...arguments_: ArgumentsType) => ReturnType,
options?: Options
): (...arguments_: ArgumentsType) => ReturnType;
/**
Get the number of times `fn` has been called.
@param fn - The function to get call count from.
@returns A number representing how many times `fn` has been called.
@example
```
import onetime from 'onetime';
const foo = onetime(() => {});
foo();
foo();
foo();
console.log(onetime.callCount(foo));
//=> 3
```
*/
callCount(fn: (...arguments_: any[]) => unknown): number;
};
export default onetime;

View File

@ -1,41 +0,0 @@
import mimicFunction from '../mimic-function';
const calledFunctions = new WeakMap();
const onetime = (function_, options = {}) => {
if (typeof function_ !== 'function') {
throw new TypeError('Expected a function');
}
let returnValue;
let callCount = 0;
const functionName = function_.displayName || function_.name || '<anonymous>';
const onetime = function (...arguments_) {
calledFunctions.set(onetime, ++callCount);
if (callCount === 1) {
returnValue = function_.apply(this, arguments_);
function_ = undefined;
} else if (options.throw === true) {
throw new Error(`Function \`${functionName}\` can only be called once`);
}
return returnValue;
};
mimicFunction(onetime, function_);
calledFunctions.set(onetime, callCount);
return onetime;
};
onetime.callCount = function_ => {
if (!calledFunctions.has(function_)) {
throw new Error(`The given function \`${function_.name}\` is not wrapped by the \`onetime\` package`);
}
return calledFunctions.get(function_);
};
export default onetime;

View File

@ -1,13 +0,0 @@
import mcl from "./mcl"
import ipc from "./renderer_ipc"
import rfs from "./rfs"
import exec from "./execa/public_lib"
import auth from "./auth"
export default {
mcl: mcl,
ipc: ipc,
rfs: rfs,
exec: exec,
auth: auth,
}

View File

@ -1,7 +0,0 @@
import sendToRender from "../../utils/sendToRender"
export default class RendererIPC {
async send(...args) {
return await sendToRender(...args)
}
}

View File

@ -1,47 +0,0 @@
import path from "node:path"
import fs from "node:fs"
import { execa } from "../../lib/execa"
import Vars from "../../vars"
export default class RFS {
constructor(manifest) {
this.manifest = manifest
}
async mount(remote_dir, to, cb) {
let mountPoint = path.resolve(this.manifest.install_path)
if (typeof to === "string") {
mountPoint = path.join(mountPoint, to)
} else {
mountPoint = path.join(mountPoint, "rfs_mount")
}
// check if already mounted
if (fs.existsSync(mountPoint)) {
return true
}
const process = execa(
Vars.rclone_path,
[
"mount",
"--vfs-cache-mode",
"full",
"--http-url",
remote_dir,
":http:",
mountPoint,
], {
stdout: "inherit",
stderr: "inherit",
})
if (typeof cb === "function") {
cb(process)
}
return process
}
}

View File

@ -1,107 +0,0 @@
import { PathLike } from 'node:fs'
import { rename, writeFile } from 'node:fs/promises'
import { basename, dirname, join } from 'node:path'
import { fileURLToPath } from 'node:url'
// Returns a temporary file
// Example: for /some/file will return /some/.file.tmp
function getTempFilename(file: PathLike): string {
const f = file instanceof URL ? fileURLToPath(file) : file.toString()
return join(dirname(f), `.${basename(f)}.tmp`)
}
// Retries an asynchronous operation with a delay between retries and a maximum retry count
async function retryAsyncOperation(
fn: () => Promise<void>,
maxRetries: number,
delayMs: number,
): Promise<void> {
for (let i = 0; i < maxRetries; i++) {
try {
return await fn()
} catch (error) {
if (i < maxRetries - 1) {
await new Promise((resolve) => setTimeout(resolve, delayMs))
} else {
throw error // Rethrow the error if max retries reached
}
}
}
}
type Resolve = () => void
type Reject = (error: Error) => void
type Data = Parameters<typeof writeFile>[1]
export class Writer {
#filename: PathLike
#tempFilename: PathLike
#locked = false
#prev: [Resolve, Reject] | null = null
#next: [Resolve, Reject] | null = null
#nextPromise: Promise<void> | null = null
#nextData: Data | null = null
// File is locked, add data for later
#add(data: Data): Promise<void> {
// Only keep most recent data
this.#nextData = data
// Create a singleton promise to resolve all next promises once next data is written
this.#nextPromise ||= new Promise((resolve, reject) => {
this.#next = [resolve, reject]
})
// Return a promise that will resolve at the same time as next promise
return new Promise((resolve, reject) => {
this.#nextPromise?.then(resolve).catch(reject)
})
}
// File isn't locked, write data
async #write(data: Data): Promise<void> {
// Lock file
this.#locked = true
try {
// Atomic write
await writeFile(this.#tempFilename, data, 'utf-8')
await retryAsyncOperation(
async () => {
await rename(this.#tempFilename, this.#filename)
},
10,
100,
)
// Call resolve
this.#prev?.[0]()
} catch (err) {
// Call reject
if (err instanceof Error) {
this.#prev?.[1](err)
}
throw err
} finally {
// Unlock file
this.#locked = false
this.#prev = this.#next
this.#next = this.#nextPromise = null
if (this.#nextData !== null) {
const nextData = this.#nextData
this.#nextData = null
await this.write(nextData)
}
}
}
constructor(filename: PathLike) {
this.#filename = filename
this.#tempFilename = getTempFilename(filename)
}
async write(data: Data): Promise<void> {
return this.#locked ? this.#add(data) : this.#write(data)
}
}

View File

@ -1,18 +0,0 @@
/**
Strip the final [newline character](https://en.wikipedia.org/wiki/Newline) from a string or Uint8Array.
@returns The input without any final newline.
@example
```
import stripFinalNewline from 'strip-final-newline';
stripFinalNewline('foo\nbar\n\n');
//=> 'foo\nbar\n'
const uint8Array = new TextEncoder().encode('foo\nbar\n\n')
new TextDecoder().decode(stripFinalNewline(uint8Array));
//=> 'foo\nbar\n'
```
*/
export default function stripFinalNewline<T extends string | Uint8Array>(input: T): T;

View File

@ -1,26 +0,0 @@
export default function stripFinalNewline(input) {
if (typeof input === 'string') {
return stripFinalNewlineString(input);
}
if (!(ArrayBuffer.isView(input) && input.BYTES_PER_ELEMENT === 1)) {
throw new Error('Input must be a string or a Uint8Array');
}
return stripFinalNewlineBinary(input);
}
const stripFinalNewlineString = input =>
input.at(-1) === LF
? input.slice(0, input.at(-2) === CR ? -2 : -1)
: input;
const stripFinalNewlineBinary = input =>
input.at(-1) === LF_BINARY
? input.subarray(0, input.at(-2) === CR_BINARY ? -2 : -1)
: input;
const LF = '\n';
const LF_BINARY = LF.codePointAt(0);
const CR = '\r';
const CR_BINARY = CR.codePointAt(0);

View File

@ -1,50 +0,0 @@
import { JSONFilePreset } from "./lib/lowdb"
import DefaultDB from "./defaults/local_db"
import Vars from "./vars"
export async function withDB() {
return await JSONFilePreset(Vars.local_db, DefaultDB)
}
export async function updateInstalledPackage(pkg) {
const db = await withDB()
await db.update((data) => {
const prevIndex = data["packages"].findIndex((i) => i.id === pkg.id)
if (prevIndex !== -1) {
data["packages"][prevIndex] = pkg
} else {
data["packages"].push(pkg)
}
return data
})
return pkg
}
export async function getInstalledPackages(pkg_id) {
const db = await withDB()
if (pkg_id) {
return db.data["packages"].find((i) => i.id === pkg_id)
}
return db.data["packages"]
}
export async function deleteInstalledPackage(pkg_id) {
const db = await withDB()
await db.update((data) => {
data["packages"] = data["packages"].filter((i) => i.id !== pkg_id)
return data
})
return pkg_id
}
export default withDB

View File

@ -1,108 +0,0 @@
import fs from "node:fs"
import open from "open"
import Vars from "./vars"
import * as local_db from "./local_db"
import InstallCMD from "./commands/install"
import UpdateCMD from "./commands/update"
import ApplyCMD from "./commands/apply"
import UninstallCMD from "./commands/uninstall"
import ExecuteCMD from "./commands/execute"
export default class PkgManager {
constructor() {
this.initialize()
}
async initialize() {
if (!fs.existsSync(Vars.runtime_path)) {
fs.mkdirSync(Vars.runtime_path, { recursive: true })
}
if (!fs.existsSync(Vars.packages_path)) {
fs.mkdirSync(Vars.packages_path, { recursive: true })
}
}
/**
* Opens the runtime path folder.
*/
openRuntimePath() {
open(Vars.runtime_path)
}
/**
* Asynchronously retrieves the installed packages using the provided arguments.
*
* @param {...type} args - The arguments to be passed to the underlying local database function
* @return {type} The result of the local database function call
*/
async getInstalledPackages(...args) {
return await local_db.getInstalledPackages(...args)
}
/**
* Asynchronously opens a package folder.
*
* @param {type} pkg_id - the ID of the package to open
* @return {type} undefined
*/
async open(pkg_id) {
const pkg = await local_db.getInstalledPackages(pkg_id)
if (pkg) {
open(pkg.install_path)
}
}
/**
* Asynchronously installs using the given arguments.
*
* @param {...*} args - variable number of arguments
* @return {Promise} a promise representing the result of the installation
*/
async install(...args) {
return await InstallCMD(...args)
}
/**
* Asynchronously updates something using the arguments provided.
*
* @param {...*} args - The arguments to be passed to the update function
* @return {Promise} The result of the update operation
*/
async update(...args) {
return await UpdateCMD(...args)
}
/**
* Asynchronously applies changes using the given arguments.
*
* @param {...*} args - The arguments to be passed to ApplyCMD
* @return {Promise} The result of the ApplyCMD function
*/
async applyChanges(...args) {
return await ApplyCMD(...args)
}
/**
* Asynchronously uninstalls using the given arguments.
*
* @param {...args} args - arguments to be passed to UninstallCMD
* @return {Promise} the result of UninstallCMD
*/
async uninstall(...args) {
return await UninstallCMD(...args)
}
/**
* Executes the command with the given arguments asynchronously.
*
* @param {...args} args - the arguments to be passed to the command
* @return {Promise} a promise that resolves to the result of the command execution
*/
async execute(...args) {
return await ExecuteCMD(...args)
}
}

View File

@ -1,35 +0,0 @@
import resolveDestBin from "@utils/resolveDestBin"
import Vars from "@vars"
const baseURL = "https://storage.ragestudio.net/rstudio/binaries"
export default [
{
id: "7zip-bin",
url: resolveDestBin(`${baseURL}/7zip-bin`, process.platform === "win32" ? "7za.exe" : "7za"),
destination: Vars.sevenzip_path,
rewritePermissions: true,
extract: false,
},
{
id: "git-bin",
url: resolveDestBin(`${baseURL}/git`, "git-bundle-2.4.0.zip"),
destination: Vars.git_path,
rewritePermissions: true,
extract: true,
},
{
id: "rclone-bin",
url: resolveDestBin(`${baseURL}/rclone-bin`, "rclone-bin.zip"),
destination: Vars.rclone_path,
rewritePermissions: true,
extract: true,
},
{
id: "java-jdk",
url: resolveDestBin(`${baseURL}/java-jdk`, "java-jdk.zip"),
destination: Vars.java_path,
rewritePermissions: true,
extract: true,
},
]

View File

@ -1,144 +0,0 @@
import path from "node:path"
import fs from "node:fs"
import os from "node:os"
import ChildProcess from "node:child_process"
import { pipeline as streamPipeline } from "node:stream/promises"
import unzipper from "unzipper"
import got from "got"
import Vars from "./vars"
function resolveDestBin(pre, post) {
let url = null
if (process.platform === "darwin") {
url = `${pre}/mac/${process.arch}/${post}`
}
else if (process.platform === "win32") {
url = `${pre}/win/${process.arch}/${post}`
}
else {
url = `${pre}/linux/${process.arch}/${post}`
}
return url
}
async function main() {
const binariesPath = Vars.binaries_path
if (!fs.existsSync(binariesPath)) {
fs.mkdirSync(binariesPath, { recursive: true })
}
let sevenzip_exec = Vars.sevenzip_path
let git_exec = Vars.git_path
let rclone_exec = Vars.rclone_path
if (!fs.existsSync(sevenzip_exec)) {
global.win.webContents.send("setup_step", "Downloading 7z binaries...")
console.log(`Downloading 7z binaries...`)
fs.mkdirSync(path.resolve(binariesPath, "7z-bin"), { recursive: true })
let url = resolveDestBin(`https://storage.ragestudio.net/rstudio/binaries/7zip-bin`, process.platform === "win32" ? "7za.exe" : "7za")
await streamPipeline(
got.stream(url),
fs.createWriteStream(sevenzip_exec)
)
if (os.platform() !== "win32") {
ChildProcess.execSync("chmod +x " + sevenzip_exec)
}
}
if (!fs.existsSync(git_exec) && process.platform === "win32") {
const tempPath = path.resolve(binariesPath, "git-bundle.zip")
const binPath = path.resolve(binariesPath, "git-bin")
if (!fs.existsSync(tempPath)) {
global.win.webContents.send("setup_step", "Downloading GIT binaries...")
console.log(`Downloading git binaries...`)
let url = resolveDestBin(`https://storage.ragestudio.net/rstudio/binaries/git`, "git-bundle-2.4.0.zip")
await streamPipeline(
got.stream(url),
fs.createWriteStream(tempPath)
)
}
global.win.webContents.send("setup_step", "Extracting GIT binaries...")
console.log(`Extracting GIT...`)
await new Promise((resolve, reject) => {
fs.createReadStream(tempPath).pipe(unzipper.Extract({ path: binPath })).on("close", resolve).on("error", reject)
})
fs.unlinkSync(tempPath)
}
if (!fs.existsSync(Vars.rclone_path) && process.platform === "win32") {
console.log(`Downloading rclone binaries...`)
global.win.webContents.send("setup_step", "Downloading rclone binaries...")
const tempPath = path.resolve(binariesPath, "rclone-bin.zip")
let url = resolveDestBin(`https://storage.ragestudio.net/rstudio/binaries/rclone`, "rclone-bin.zip")
await streamPipeline(
got.stream(url),
fs.createWriteStream(tempPath)
)
global.win.webContents.send("setup_step", "Extracting rclone binaries...")
await new Promise((resolve, reject) => {
fs.createReadStream(tempPath).pipe(unzipper.Extract({ path: path.resolve(binariesPath, "rclone-bin") })).on("close", resolve).on("error", reject)
})
if (os.platform() !== "win32") {
ChildProcess.execSync("chmod +x " + Vars.rclone_path)
}
fs.unlinkSync(tempPath)
}
if (!fs.existsSync(Vars.java_path)) {
console.log(`Downloading java binaries...`)
global.win.webContents.send("setup_step", "Downloading Java JDK...")
const tempPath = path.resolve(binariesPath, "java-jdk.zip")
let url = resolveDestBin(`https://storage.ragestudio.net/rstudio/binaries/java`, "java-jdk.zip")
await streamPipeline(
got.stream(url),
fs.createWriteStream(tempPath)
)
global.win.webContents.send("setup_step", "Extracting JAVA...")
await new Promise((resolve, reject) => {
fs.createReadStream(tempPath).pipe(unzipper.Extract({ path: path.resolve(binariesPath, "java-jdk") })).on("close", resolve).on("error", reject)
})
if (os.platform() !== "win32") {
ChildProcess.execSync("chmod +x " + path.resolve(binariesPath, "java-jdk"))
}
fs.unlinkSync(tempPath)
}
console.log(`7z binaries: ${sevenzip_exec}`)
console.log(`GIT binaries: ${git_exec}`)
console.log(`rclone binaries: ${rclone_exec}`)
console.log(`JAVA jdk: ${Vars.java_path}`)
global.win.webContents.send("setup_step", undefined)
global.win.webContents.send("setup:done")
}
export default main

View File

@ -1,44 +0,0 @@
import fs from "node:fs"
import path from "node:path"
import { pipeline as streamPipeline } from "node:stream/promises"
import { extractFull } from "node-7z"
import unzipper from "unzipper"
import Vars from "../vars"
export async function extractFile(file, dest) {
const ext = path.extname(file)
console.log(`extractFile() | Extracting ${file} to ${dest}`)
switch (ext) {
case ".zip": {
await streamPipeline(
fs.createReadStream(file),
unzipper.Extract({
path: dest,
})
)
break
}
case ".7z": {
await extractFull(file, dest, {
$bin: Vars.sevenzip_path,
})
break
}
case ".gz": {
await extractFull(file, dest, {
$bin: Vars.sevenzip_path
})
break
}
default:
throw new Error(`Unsupported file extension: ${ext}`)
}
return dest
}
export default extractFile

View File

@ -1,56 +0,0 @@
import path from "node:path"
import os from "node:os"
import lodash from "lodash"
import Vars from "../vars"
import PublicLibs from "../lib/public_bind"
async function importLib(libs, bindCtx) {
const libraries = {}
for await (const lib of libs) {
if (PublicLibs[lib]) {
if (typeof PublicLibs[lib] === "function") {
libraries[lib] = new PublicLibs[lib](bindCtx)
} else {
libraries[lib] = PublicLibs[lib]
}
}
}
return libraries
}
export default async (manifest = {}) => {
const install_path = path.resolve(Vars.packages_path, manifest.id)
const os_string = `${os.platform()}-${os.arch()}`
manifest.install_path = install_path
if (typeof manifest.init === "function") {
const init_result = await manifest.init({
manifest: manifest,
install_path: install_path,
os_string: os_string,
})
manifest = lodash.merge(manifest, init_result)
delete manifest.init
}
if (Array.isArray(manifest.import_libs)) {
manifest.libraries = await importLib(manifest.import_libs, {
id: manifest.id,
version: manifest.version,
install_path: install_path,
auth: manifest.auth,
configs: manifest.configs,
os_string: os_string,
})
console.log(`[${manifest.id}] initManifest() | Using libraries: ${manifest.import_libs.join(", ")}`)
}
return manifest
}

View File

@ -1,21 +0,0 @@
export default function parseStringVars(str, pkg) {
if (!pkg) {
return str
}
const vars = {
id: pkg.id,
name: pkg.name,
version: pkg.version,
install_path: pkg.install_path,
remote_url: pkg.remote_url,
}
const regex = /%([^%]+)%/g
str = str.replace(regex, (match, varName) => {
return vars[varName]
})
return str
}

View File

@ -1,25 +0,0 @@
import fs from "node:fs"
import path from "node:path"
async function readDirRecurse(dir, maxDepth = 3, current = 0) {
if (current > maxDepth) {
return []
}
const files = await fs.promises.readdir(dir)
const promises = files.map(async (file) => {
const filePath = path.join(dir, file)
const stat = await fs.promises.stat(filePath)
if (stat.isDirectory()) {
return readDirRecurse(filePath, maxDepth, current + 1)
}
return filePath
})
return (await Promise.all(promises)).flat()
}
export default readDirRecurse

Some files were not shown because too many files have changed in this diff Show More