merge from local

This commit is contained in:
SrGooglo 2024-04-02 18:47:41 +02:00
parent 74bb53ada4
commit 5abcd2640c
147 changed files with 50 additions and 7427 deletions

View File

@ -1,9 +0,0 @@
root = true
[*]
charset = utf-8
indent_style = space
indent_size = 2
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true

View File

@ -1,3 +0,0 @@
{
"recommendations": ["dbaeumer.vscode-eslint"]
}

39
.vscode/launch.json vendored
View File

@ -1,39 +0,0 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "Debug Main Process",
"type": "node",
"request": "launch",
"cwd": "${workspaceRoot}",
"runtimeExecutable": "${workspaceRoot}/node_modules/.bin/electron-vite",
"windows": {
"runtimeExecutable": "${workspaceRoot}/node_modules/.bin/electron-vite.cmd"
},
"runtimeArgs": ["--sourcemap"],
"env": {
"REMOTE_DEBUGGING_PORT": "9222"
}
},
{
"name": "Debug Renderer Process",
"port": 9222,
"request": "attach",
"type": "chrome",
"webRoot": "${workspaceFolder}/src/renderer",
"timeout": 60000,
"presentation": {
"hidden": true
}
}
],
"compounds": [
{
"name": "Debug All",
"configurations": ["Debug Main Process", "Debug Renderer Process"],
"presentation": {
"order": 1
}
}
]
}

27
.vscode/settings.json vendored
View File

@ -1,27 +0,0 @@
{
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[javascript]": {
"editor.defaultFormatter": "vscode.typescript-language-features"
},
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"cSpell.words": [
"admzip",
"antd",
"APPDATA",
"catched",
"Classname",
"execa",
"getstation",
"imjs",
"ragestudio",
"rclone",
"sevenzip",
"unzipper",
"upath",
"userdata"
]
}

View File

@ -1,13 +1,54 @@
{ {
"name": "@ragestudio/relic", "name": "@ragestudio/relic-gui",
"private": true, "version": "0.17.0",
"workspaces": [ "description": "RageStudio Relic, yet another package manager.",
"packages/*" "main": "./out/main/index.js",
], "author": "RageStudio",
"repository": "https://github.com/srgooglo/rs_bundler",
"author": "SrGooglo <srgooglo@ragestudio.net>",
"license": "MIT", "license": "MIT",
"scripts": { "scripts": {
"postinstall": "node scripts/postinstall.js" "start": "electron-vite preview",
"dev": "electron-vite dev",
"build": "electron-vite build",
"postinstall": "electron-builder install-app-deps",
"pack:win": "electron-builder --win --config",
"pack:mac": "electron-builder --mac --config",
"pack:linux": "electron-builder --linux --config",
"build:win": "npm run build && npm run pack:win",
"build:mac": "npm run build && npm run pack:mac",
"build:linux": "npm run build && npm run pack:linux"
},
"dependencies": {
"@electron-toolkit/preload": "^2.0.0",
"@electron-toolkit/utils": "^2.0.0",
"@getstation/electron-google-oauth2": "^14.0.0",
"@imjs/electron-differential-updater": "^5.1.7",
"@loadable/component": "^5.16.3",
"@ragestudio/relic-core": "^0.17.0",
"antd": "^5.13.2",
"classnames": "^2.3.2",
"electron-differential-updater": "^4.3.2",
"electron-is-dev": "^2.0.0",
"electron-store": "^8.1.0",
"electron-updater": "^6.1.1",
"got": "11.8.3",
"human-format": "^1.2.0",
"protocol-registry": "^1.4.1",
"less": "^4.2.0",
"lodash": "^4.17.21",
"react-icons": "^4.11.0",
"react-motion": "0.5.2",
"react-router-dom": "6.6.2",
"react-spinners": "^0.13.8",
"react-spring": "^9.7.3"
},
"devDependencies": {
"@ragestudio/hermes": "^0.1.1",
"@vitejs/plugin-react": "^4.0.4",
"electron": "25.6.0",
"electron-builder": "24.6.3",
"electron-vite": "^2.1.0",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"vite": "^4.4.9"
} }
} }

View File

@ -1,2 +0,0 @@
#!/usr/bin/env node
require("./dist/index.js")

View File

@ -1,21 +0,0 @@
{
"name": "@ragestudio/relic-cli",
"version": "0.17.0",
"license": "MIT",
"author": "RageStudio",
"description": "RageStudio Relic, yet another package manager.",
"main": "./dist/index.js",
"bin": {
"relic": "./bin.js"
},
"scripts": {
"dev": "hermes-node ./src/index.js",
"build": "hermes build"
},
"dependencies": {
"commander": "^12.0.0"
},
"devDependencies": {
"@ragestudio/hermes": "^0.1.1"
}
}

View File

@ -1,169 +0,0 @@
import RelicCore from "@ragestudio/relic-core"
import { program, Command, Argument } from "commander"
import pkg from "../package.json"
const commands = [
{
cmd: "install",
description: "Install a package manifest from a path or URL",
arguments: [
{
name: "package_manifest",
description: "Path or URL to a package manifest",
}
],
fn: async (package_manifest, options) => {
await core.initialize()
await core.setup()
return await core.package.install(package_manifest, options)
}
},
{
cmd: "run",
description: "Execute a package",
arguments: [
{
name: "id",
description: "The id of the package to execute",
}
],
fn: async (pkg_id, options) => {
await core.initialize()
await core.setup()
return await core.package.execute(pkg_id, options)
}
},
{
cmd: "update",
description: "Update a package",
arguments: [
{
name: "id",
description: "The id of the package to update",
}
],
fn: async (pkg_id, options) => {
await core.initialize()
await core.setup()
return await core.package.update(pkg_id, options)
}
},
{
cmd: "uninstall",
description: "Uninstall a package",
arguments: [
{
name: "id",
description: "The id of the package to uninstall",
}
],
fn: async (pkg_id, options) => {
await core.initialize()
return await core.package.uninstall(pkg_id, options)
}
},
{
cmd: "apply",
description: "Apply changes to a installed package",
arguments: [
{
name: "id",
description: "The id of the package to apply changes to",
},
],
options: [
{
name: "add_patches",
description: "Add patches to the package",
},
{
name: "remove_patches",
description: "Remove patches from the package",
},
],
fn: async (pkg_id, options) => {
await core.initialize()
return await core.package.apply(pkg_id, options)
}
},
{
cmd: "list",
description: "List installed package manifests",
fn: async () => {
await core.initialize()
return console.log(await core.package.list())
}
},
{
cmd: "open-path",
description: "Open the base path or a package path",
options: [
{
name: "pkg_id",
description: "Path to open",
}
],
fn: async (options) => {
await core.initialize()
await core.openPath(options.pkg_id)
}
}
]
async function main() {
global.core = new RelicCore()
program
.name(pkg.name)
.description(pkg.description)
.version(pkg.version)
for await (const command of commands) {
const cmd = new Command(command.cmd).action(command.fn)
if (command.description) {
cmd.description(command.description)
}
if (Array.isArray(command.arguments)) {
for await (const argument of command.arguments) {
if (typeof argument === "string") {
cmd.addArgument(new Argument(argument))
} else {
const arg = new Argument(argument.name, argument.description)
if (argument.default) {
arg.default(argument.default)
}
cmd.addArgument(arg)
}
}
}
if (Array.isArray(command.options)) {
for await (const option of command.options) {
if (typeof option === "string") {
cmd.option(option)
} else {
cmd.option(option.name, option.description, option.default)
}
}
}
program.addCommand(cmd)
}
program.parse()
}
main()

View File

@ -1,38 +0,0 @@
{
"name": "@ragestudio/relic-core",
"version": "0.17.0",
"license": "MIT",
"author": "RageStudio",
"description": "RageStudio Relic, yet another package manager.",
"main": "./dist/index.js",
"files": [
"dist",
"src"
],
"scripts": {
"build": "hermes build"
},
"dependencies": {
"@foxify/events": "^2.1.0",
"adm-zip": "^0.5.12",
"axios": "^1.6.8",
"checksum": "^1.0.0",
"cli-color": "^2.0.4",
"cli-progress": "^3.12.0",
"deep-object-diff": "^1.1.9",
"extends-classes": "^1.0.5",
"googleapis": "^134.0.0",
"human-format": "^1.2.0",
"merge-stream": "^2.0.0",
"module-alias": "^2.2.3",
"node-7z": "^3.0.0",
"open": "8.4.2",
"request": "^2.88.2",
"rimraf": "^5.0.5",
"signal-exit": "^4.1.0",
"unzipper": "^0.10.14",
"upath": "^2.0.1",
"uuid": "^9.0.1",
"winston": "^3.13.0"
}
}

View File

@ -1,36 +0,0 @@
import path from "path"
import { JSONFilePreset } from "../libraries/lowdb/presets/node"
import Vars from "../vars"
//! WARNING: Please DO NOT storage any password or sensitive data here,
// cause its not use any encryption method, and it will be stored in plain text.
// This is intended to store session tokens among other vars.
export default class ManifestAuthService {
static vaultPath = path.resolve(Vars.runtime_path, "auth.json")
static async withDB() {
return await JSONFilePreset(ManifestAuthService.vaultPath, {})
}
static has = async (pkg_id) => {
const db = await this.withDB()
return !!db.data[pkg_id]
}
static set = async (pkg_id, value) => {
const db = await this.withDB()
return await db.update((data) => {
data[pkg_id] = value
})
}
static get = async (pkg_id) => {
const db = await this.withDB()
return await db.data[pkg_id]
}
}

View File

@ -1,34 +0,0 @@
import DB from "../db"
export default class ManifestConfigManager {
constructor(pkg_id) {
this.pkg_id = pkg_id
this.config = null
}
async initialize() {
const pkg = await DB.getPackages(this.pkg_id) ?? {}
this.config = pkg.config
}
set(key, value) {
this.config[key] = value
DB.updatePackageById(pkg_id, { config: this.config })
return this.config
}
get(key) {
return this.config[key]
}
delete(key) {
delete this.config[key]
DB.updatePackageById(pkg_id, { config: this.config })
return this.config
}
}

View File

@ -1,149 +0,0 @@
import Logger from "../logger"
import DB from "../db"
import fs from "node:fs"
import GenericSteps from "../generic_steps"
import parseStringVars from "../utils/parseStringVars"
export default class PatchManager {
constructor(pkg, manifest) {
this.pkg = pkg
this.manifest = manifest
this.log = Logger.child({ service: `PATCH-MANAGER|${pkg.id}` })
}
async get(select) {
if (!this.manifest.patches) {
return []
}
let list = []
if (typeof select === "undefined") {
list = this.manifest.patches
}
if (Array.isArray(select)) {
for await (let id of select) {
const patch = this.manifest.patches.find((patch) => patch.id === id)
if (patch) {
list.push(patch)
}
}
}
return list
}
async reapply() {
if (Array.isArray(this.pkg.applied_patches)) {
return await this.patch(this.pkg.applied_patches)
}
return true
}
async patch(select) {
const list = await this.get(select)
for await (let patch of list) {
global._relic_eventBus.emit(`pkg:update:state`, {
id: this.pkg.id,
status_text: `Applying patch [${patch.id}]...`,
})
this.log.info(`Applying patch [${patch.id}]...`)
if (Array.isArray(patch.additions)) {
this.log.info(`Applying ${patch.additions.length} Additions...`)
for await (let addition of patch.additions) {
// resolve patch file
addition.file = await parseStringVars(addition.file, this.pkg)
if (fs.existsSync(addition.file)) {
this.log.info(`Addition [${addition.file}] already exists. Skipping...`)
continue
}
this.log.info(`Applying addition [${addition.file}]`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: this.pkg.id,
status_text: `Applying addition [${addition.file}]`,
})
await GenericSteps(this.pkg, addition.steps, this.log)
}
}
if (!this.pkg.applied_patches.includes(patch.id)) {
this.pkg.applied_patches.push(patch.id)
}
}
await DB.updatePackageById(this.pkg.id, { applied_patches: this.pkg.applied_patches })
global._relic_eventBus.emit(`pkg:update:state`, {
id: this.pkg.id,
status_text: `${list.length} Patches applied`,
})
this.log.info(`${list.length} Patches applied`)
return this.pkg
}
async remove(select) {
const list = await this.get(select)
for await (let patch of list) {
global._relic_eventBus.emit(`pkg:update:state`, {
id: this.pkg.id,
status_text: `Removing patch [${patch.id}]...`,
})
this.log.info(`Removing patch [${patch.id}]...`)
if (Array.isArray(patch.additions)) {
this.log.info(`Removing ${patch.additions.length} Additions...`)
for await (let addition of patch.additions) {
addition.file = await parseStringVars(addition.file, this.pkg)
if (!fs.existsSync(addition.file)) {
this.log.info(`Addition [${addition.file}] does not exist. Skipping...`)
continue
}
this.log.info(`Removing addition [${addition.file}]`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: this.pkg.id,
status_text: `Removing addition [${addition.file}]`,
})
await fs.promises.unlink(addition.file)
}
}
this.pkg.applied_patches = this.pkg.applied_patches.filter((p) => {
return p !== patch.id
})
}
await DB.updatePackageById(this.pkg.id, { applied_patches: this.pkg.applied_patches })
global._relic_eventBus.emit(`pkg:update:state`, {
id: this.pkg.id,
status_text: `${list.length} Patches removed`,
})
this.log.info(`${list.length} Patches removed`)
return this.pkg
}
}

View File

@ -1,115 +0,0 @@
import { JSONFilePreset } from "./libraries/lowdb/presets/node"
import Vars from "./vars"
import pkg from "../package.json"
import fs from "node:fs"
import lodash from "lodash"
export default class DB {
static get defaultRoot() {
return {
created_at_version: pkg.version,
packages: [],
}
}
static defaultPackageState({
id,
name,
icon,
version,
author,
install_path,
description,
license,
last_status,
remote_manifest,
local_manifest,
config,
executable,
}) {
return {
id: id,
name: name,
version: version,
icon: icon,
install_path: install_path,
description: description,
author: author,
license: license ?? "unlicensed",
local_manifest: local_manifest ?? null,
remote_manifest: remote_manifest ?? null,
applied_patches: [],
config: typeof config === "object" ? config : {},
last_status: last_status ?? "installing",
last_update: null,
installed_at: null,
executable: executable ?? false,
}
}
static async withDB() {
return await JSONFilePreset(Vars.db_path, DB.defaultRoot)
}
static async initialize() {
await this.cleanOrphans()
}
static async cleanOrphans() {
const list = await this.getPackages()
for (const pkg of list) {
if (!fs.existsSync(pkg.install_path)) {
await this.deletePackage(pkg.id)
}
}
}
static async getPackages(pkg_id) {
const db = await this.withDB()
if (pkg_id) {
return db.data["packages"].find((i) => i.id === pkg_id)
}
return db.data["packages"]
}
static async writePackage(pkg) {
const db = await this.withDB()
const prevIndex = db.data["packages"].findIndex((i) => i.id === pkg.id)
if (prevIndex !== -1) {
db.data["packages"][prevIndex] = pkg
} else {
db.data["packages"].push(pkg)
}
await db.write()
return db.data
}
static async updatePackageById(pkg_id, obj) {
let pkg = await this.getPackages(pkg_id)
if (!pkg) {
throw new Error("Package not found")
}
return await this.writePackage(lodash.merge({ ...pkg }, obj))
}
static async deletePackage(pkg_id) {
const db = await this.withDB()
await db.update((data) => {
data["packages"] = data["packages"].filter((i) => i.id !== pkg_id)
return data
})
return pkg_id
}
}

View File

@ -1,49 +0,0 @@
import Logger from "../logger"
import path from "node:path"
import fs from "node:fs"
import upath from "upath"
import { execa } from "../libraries/execa"
import Vars from "../vars"
export default async (pkg, step) => {
if (!step.path) {
step.path = `.`
}
const Log = Logger.child({ service: `GIT|${pkg.id}` })
const gitCMD = fs.existsSync(Vars.git_path) ? `${Vars.git_path}` : "git"
const final_path = upath.normalizeSafe(path.resolve(pkg.install_path, step.path))
if (!fs.existsSync(final_path)) {
fs.mkdirSync(final_path, { recursive: true })
}
Log.info(`Cloning from [${step.url}]`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Cloning from [${step.url}]`,
})
const args = [
"clone",
//`--depth ${step.depth ?? 1}`,
//"--filter=blob:none",
//"--filter=tree:0",
"--recurse-submodules",
"--remote-submodules",
step.url,
final_path,
]
await execa(gitCMD, args, {
cwd: final_path,
stdout: "inherit",
stderr: "inherit",
})
return pkg
}

View File

@ -1,33 +0,0 @@
import Logger from "../logger"
import path from "node:path"
import fs from "node:fs"
import { execa } from "../libraries/execa"
import Vars from "../vars"
export default async (pkg, step) => {
if (!step.path) {
step.path = `.`
}
const Log = Logger.child({ service: `GIT|${pkg.id}` })
const gitCMD = fs.existsSync(Vars.git_path) ? `${Vars.git_path}` : "git"
const _path = path.resolve(pkg.install_path, step.path)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Pulling...`,
})
Log.info(`Pulling from HEAD...`)
await execa(gitCMD, ["pull", "--rebase"], {
cwd: _path,
stdout: "inherit",
stderr: "inherit",
})
return pkg
}

View File

@ -1,83 +0,0 @@
import Logger from "../logger"
import path from "node:path"
import fs from "node:fs"
import { execa } from "../libraries/execa"
import git_pull from "./git_pull"
import Vars from "../vars"
export default async (pkg, step) => {
if (!step.path) {
step.path = `.`
}
const Log = Logger.child({ service: `GIT|${pkg.id}` })
const gitCMD = fs.existsSync(Vars.git_path) ? `${Vars.git_path}` : "git"
const _path = path.resolve(pkg.install_path, step.path)
const from = step.from ?? "HEAD"
if (!fs.existsSync(_path)) {
fs.mkdirSync(_path, { recursive: true })
}
Log.info(`Fetching from origin`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Fetching from origin...`,
})
// fetch from origin
await execa(gitCMD, ["fetch", "origin"], {
cwd: _path,
stdout: "inherit",
stderr: "inherit",
})
Log.info(`Cleaning untracked files...`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Cleaning untracked files...`,
})
await execa(gitCMD, ["clean", "-df"], {
cwd: _path,
stdout: "inherit",
stderr: "inherit",
})
Log.info(`Resetting to ${from}`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Resetting to ${from}`,
})
await execa(gitCMD, ["reset", "--hard", from], {
cwd: _path,
stdout: "inherit",
stderr: "inherit",
})
// pull the latest
await git_pull(pkg, step)
Log.info(`Checkout to HEAD`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Checkout to HEAD`,
})
await execa(gitCMD, ["checkout", "HEAD"], {
cwd: _path,
stdout: "inherit",
stderr: "inherit",
})
return pkg
}

View File

@ -1,66 +0,0 @@
import path from "node:path"
import fs from "node:fs"
import os from "node:os"
import downloadHttpFile from "../helpers/downloadHttpFile"
import parseStringVars from "../utils/parseStringVars"
import extractFile from "../utils/extractFile"
export default async (pkg, step, logger) => {
if (!step.path) {
step.path = `./${path.basename(step.url)}`
}
step.path = await parseStringVars(step.path, pkg)
let _path = path.resolve(pkg.install_path, step.path)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Downloading [${step.url}]`,
})
logger.info(`Downloading [${step.url} to ${_path}]`)
if (step.tmp) {
_path = path.resolve(os.tmpdir(), String(new Date().getTime()), path.basename(step.url))
}
fs.mkdirSync(path.resolve(_path, ".."), { recursive: true })
await downloadHttpFile(step.url, _path, (progress) => {
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
use_id_only: true,
status_text: `Downloaded ${progress.transferredString} / ${progress.totalString} | ${progress.speedString}/s`,
})
})
logger.info(`Downloaded finished.`)
if (step.extract) {
if (typeof step.extract === "string") {
step.extract = path.resolve(pkg.install_path, step.extract)
} else {
step.extract = path.resolve(pkg.install_path, ".")
}
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Extracting bundle...`,
})
await extractFile(_path, step.extract)
if (step.deleteAfterExtract !== false) {
logger.info(`Deleting temporal file [${_path}]...`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Deleting temporal files...`,
})
await fs.promises.rm(_path, { recursive: true })
}
}
}

View File

@ -1,48 +0,0 @@
import Logger from "../logger"
import ISM_GIT_CLONE from "./git_clone"
import ISM_GIT_PULL from "./git_pull"
import ISM_GIT_RESET from "./git_reset"
import ISM_HTTP from "./http"
const InstallationStepsMethods = {
git_clone: ISM_GIT_CLONE,
git_pull: ISM_GIT_PULL,
git_reset: ISM_GIT_RESET,
http_file: ISM_HTTP,
}
const StepsOrders = [
"git_clones",
"git_pull",
"git_reset",
"http_file",
]
export default async function processGenericSteps(pkg, steps, logger = Logger) {
logger.info(`Processing generic steps...`)
if (!Array.isArray(steps)) {
throw new Error(`Steps must be an array`)
}
if (steps.length === 0) {
return pkg
}
steps = steps.sort((a, b) => {
return StepsOrders.indexOf(a.type) - StepsOrders.indexOf(b.type)
})
for await (let step of steps) {
step.type = step.type.toLowerCase()
if (!InstallationStepsMethods[step.type]) {
throw new Error(`Unknown step: ${step.type}`)
}
await InstallationStepsMethods[step.type](pkg, step, logger)
}
return pkg
}

View File

@ -1,95 +0,0 @@
import Logger from "../logger"
import PatchManager from "../classes/PatchManager"
import ManifestReader from "../manifest/reader"
import ManifestVM from "../manifest/vm"
import DB from "../db"
const BaseLog = Logger.child({ service: "APPLIER" })
function findPatch(patches, applied_patches, changes, mustBeInstalled) {
return patches.filter((patch) => {
const patchID = patch.id
if (typeof changes.patches[patchID] === "undefined") {
return false
}
if (mustBeInstalled === true && !applied_patches.includes(patch.id) && changes.patches[patchID] === true) {
return true
}
if (mustBeInstalled === false && applied_patches.includes(patch.id) && changes.patches[patchID] === false) {
return true
}
return false
}).map((patch) => patch.id)
}
export default async function apply(pkg_id, changes = {}) {
try {
let pkg = await DB.getPackages(pkg_id)
if (!pkg) {
BaseLog.error(`Package not found [${pkg_id}]`)
return null
}
let manifest = await ManifestReader(pkg.local_manifest)
manifest = await ManifestVM(manifest.code)
const Log = Logger.child({ service: `APPLIER|${pkg.id}` })
Log.info(`Applying changes to package...`)
Log.info(`Changes: ${JSON.stringify(changes)}`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Applying changes to package...`,
last_status: "loading",
})
if (changes.patches) {
if (!Array.isArray(pkg.applied_patches)) {
pkg.applied_patches = []
}
const patches = new PatchManager(pkg, manifest)
await patches.remove(findPatch(manifest.patches, pkg.applied_patches, changes, false))
await patches.patch(findPatch(manifest.patches, pkg.applied_patches, changes, true))
}
if (changes.config) {
Log.info(`Applying config to package...`)
if (Object.keys(changes.config).length !== 0) {
Object.entries(changes.config).forEach(([key, value]) => {
pkg.config[key] = value
})
}
}
await DB.writePackage(pkg)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: "All changes applied",
})
Log.info(`All changes applied to package.`)
return pkg
} catch (error) {
global._relic_eventBus.emit(`pkg:error`, {
id: pkg_id,
error
})
BaseLog.error(`Failed to apply changes to package [${pkg_id}]`, error)
BaseLog.error(error.stack)
return null
}
}

View File

@ -1,33 +0,0 @@
import ManifestAuthDB from "../classes/ManifestAuthDB"
import DB from "../db"
import Logger from "../logger"
const Log = Logger.child({ service: "AUTH" })
export default async (pkg_id, value) => {
if (!pkg_id) {
Log.error("pkg_id is required")
return false
}
if (!value) {
Log.error("value is required")
return false
}
const pkg = await DB.getPackages(pkg_id)
if (!pkg) {
Log.error("Package not found")
return false
}
Log.info(`Setting auth for [${pkg_id}]`)
await ManifestAuthDB.set(pkg_id, value)
global._relic_eventBus.emit("pkg:authorized", pkg)
return true
}

View File

@ -1,43 +0,0 @@
import Logger from "../logger"
import DB from "../db"
import softRead from "./read"
const Log = Logger.child({ service: "CHECK_UPDATE" })
export default async function checkUpdate(pkg_id) {
const pkg = await DB.getPackages(pkg_id)
if (!pkg) {
Log.error("Package not found")
return false
}
Log.info(`Checking update for [${pkg_id}]`)
const remoteSoftManifest = await softRead(pkg.remote_manifest, {
soft: true
})
if (!remoteSoftManifest) {
Log.error("Cannot read remote manifest")
return false
}
if (pkg.version === remoteSoftManifest.version) {
Log.info("No update available")
return false
}
Log.info("Update available")
Log.info("Local:", pkg.version)
Log.info("Remote:", remoteSoftManifest.version)
Log.info("Changelog:", remoteSoftManifest.changelog_url)
return {
id: pkg.id,
local: pkg.version,
remote: remoteSoftManifest.version,
changelog: remoteSoftManifest.changelog_url,
}
}

View File

@ -1,80 +0,0 @@
import Logger from "../logger"
import fs from "node:fs"
import DB from "../db"
import ManifestReader from "../manifest/reader"
import ManifestVM from "../manifest/vm"
import parseStringVars from "../utils/parseStringVars"
import { execa } from "../libraries/execa"
const BaseLog = Logger.child({ service: "EXECUTER" })
export default async function execute(pkg_id, { useRemote = false, force = false } = {}) {
try {
const pkg = await DB.getPackages(pkg_id)
if (!pkg) {
BaseLog.info(`Package not found [${pkg_id}]`)
return false
}
const manifestPath = useRemote ? pkg.remote_manifest : pkg.local_manifest
if (!fs.existsSync(manifestPath)) {
BaseLog.error(`Manifest not found in expected path [${manifestPath}]
\nMaybe the package installation has not been completed yet or corrupted.
`)
return false
}
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
last_status: "loading",
status_text: null,
})
const ManifestRead = await ManifestReader(manifestPath)
const manifest = await ManifestVM(ManifestRead.code)
if (typeof manifest.execute === "function") {
await manifest.execute(pkg)
}
if (typeof manifest.execute === "string") {
manifest.execute = parseStringVars(manifest.execute, pkg)
BaseLog.info(`Executing binary > [${manifest.execute}]`)
const args = Array.isArray(manifest.execute_args) ? manifest.execute_args : []
await execa(manifest.execute, args, {
cwd: pkg.install_path,
stdout: "inherit",
stderr: "inherit",
})
}
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
last_status: "installed",
status_text: null,
})
return pkg
} catch (error) {
global._relic_eventBus.emit(`pkg:error`, {
id: pkg_id,
event: "execute",
last_status: "installed",
error,
})
BaseLog.error(`Failed to execute package [${pkg_id}]`, error)
BaseLog.error(error.stack)
return null
}
}

View File

@ -1,182 +0,0 @@
import Logger from "../logger"
import fs from "node:fs"
import DB from "../db"
import ManifestReader from "../manifest/reader"
import ManifestVM from "../manifest/vm"
import GenericSteps from "../generic_steps"
import Apply from "../handlers/apply"
const BaseLog = Logger.child({ service: "INSTALLER" })
export default async function install(manifest) {
let id = null
try {
BaseLog.info(`Invoking new installation...`)
BaseLog.info(`Fetching manifest [${manifest}]`)
const ManifestRead = await ManifestReader(manifest)
manifest = await ManifestVM(ManifestRead.code)
id = manifest.constructor.id
const Log = BaseLog.child({ service: `INSTALLER|${id}` })
Log.info(`Creating install path [${manifest.install_path}]`)
if (fs.existsSync(manifest.install_path)) {
Log.info(`Package already exists, removing...`)
await fs.rmSync(manifest.install_path, { recursive: true })
}
await fs.mkdirSync(manifest.install_path, { recursive: true })
Log.info(`Initializing manifest...`)
if (typeof manifest.initialize === "function") {
await manifest.initialize()
}
Log.info(`Appending to db...`)
const pkg = DB.defaultPackageState({
...manifest.constructor,
id: id,
name: manifest.constructor.pkg_name,
version: manifest.constructor.version,
install_path: manifest.install_path,
description: manifest.constructor.description,
license: manifest.constructor.license,
last_status: "installing",
remote_manifest: ManifestRead.remote_manifest,
local_manifest: ManifestRead.local_manifest,
executable: !!manifest.execute
})
await DB.writePackage(pkg)
global._relic_eventBus.emit("pkg:new", pkg)
if (manifest.configuration) {
Log.info(`Applying default config to package...`)
pkg.config = Object.entries(manifest.configuration).reduce((acc, [key, value]) => {
acc[key] = value.default
return acc
}, {})
}
if (typeof manifest.beforeInstall === "function") {
Log.info(`Executing beforeInstall hook...`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Performing beforeInstall hook...`,
})
await manifest.beforeInstall(pkg)
}
if (Array.isArray(manifest.installSteps)) {
Log.info(`Executing generic install steps...`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Performing generic install steps...`,
})
await GenericSteps(pkg, manifest.installSteps, Log)
}
if (typeof manifest.afterInstall === "function") {
Log.info(`Executing afterInstall hook...`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Performing afterInstall hook...`,
})
await manifest.afterInstall(pkg)
}
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Finishing up...`,
})
Log.info(`Copying manifest to the final location...`)
const finalPath = `${manifest.install_path}/.rmanifest`
if (fs.existsSync(finalPath)) {
await fs.promises.unlink(finalPath)
}
await fs.promises.copyFile(ManifestRead.local_manifest, finalPath)
if (ManifestRead.is_catched) {
Log.info(`Removing cache manifest...`)
await fs.promises.unlink(ManifestRead.local_manifest)
}
pkg.local_manifest = finalPath
pkg.last_status = "loading"
pkg.installed_at = Date.now()
await DB.writePackage(pkg)
if (manifest.patches) {
const defaultPatches = manifest.patches.filter((patch) => patch.default)
if (defaultPatches.length > 0) {
Log.info(`Applying default patches...`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Applying default patches...`,
})
await Apply(id, {
patches: Object.fromEntries(defaultPatches.map((patch) => [patch.id, true])),
})
}
}
pkg.last_status = "installed"
await DB.writePackage(pkg)
global._relic_eventBus.emit(`pkg:update:state`, {
...pkg,
id: pkg.id,
last_status: "installed",
status_text: `Installation completed successfully`,
})
global._relic_eventBus.emit(`pkg:new:done`, pkg)
Log.info(`Package installed successfully!`)
return pkg
} catch (error) {
global._relic_eventBus.emit(`pkg:error`, {
id: pkg.id,
error
})
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
last_status: "failed",
status_text: `Installation failed`,
})
BaseLog.error(`Error during installation of package [${id}] >`, error)
BaseLog.error(error.stack)
return null
}
}

View File

@ -1,5 +0,0 @@
import DB from "../db"
export default async function list() {
return await DB.getPackages()
}

View File

@ -1,9 +0,0 @@
import ManifestReader from "../manifest/reader"
import ManifestVM from "../manifest/vm"
export default async function softRead(manifest, options = {}) {
const Reader = await ManifestReader(manifest)
const VM = await ManifestVM(Reader.code, options)
return VM
}

View File

@ -1,74 +0,0 @@
import Logger from "../logger"
import DB from "../db"
import ManifestReader from "../manifest/reader"
import ManifestVM from "../manifest/vm"
import { rimraf } from "rimraf"
const BaseLog = Logger.child({ service: "UNINSTALLER" })
export default async function uninstall(pkg_id) {
try {
const pkg = await DB.getPackages(pkg_id)
if (!pkg) {
BaseLog.info(`Package not found [${pkg_id}]`)
return null
}
const Log = Logger.child({ service: `UNINSTALLER|${pkg.id}` })
Log.info(`Uninstalling package...`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Uninstalling package...`,
})
const ManifestRead = await ManifestReader(pkg.local_manifest)
const manifest = await ManifestVM(ManifestRead.code)
if (typeof manifest.uninstall === "function") {
Log.info(`Performing uninstall hook...`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Performing uninstall hook...`,
})
await manifest.uninstall(pkg)
}
Log.info(`Deleting package directory...`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Deleting package directory...`,
})
await rimraf(pkg.install_path)
Log.info(`Removing package from database...`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Removing package from database...`,
})
await DB.deletePackage(pkg.id)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
last_status: "deleted",
status_text: `Uninstalling package...`,
})
global._relic_eventBus.emit(`pkg:remove`, pkg)
Log.info(`Package uninstalled successfully!`)
return pkg
} catch (error) {
global._relic_eventBus.emit(`pkg:error`, {
id: pkg_id,
error
})
BaseLog.error(`Failed to uninstall package [${pkg_id}]`, error)
BaseLog.error(error.stack)
return null
}
}

View File

@ -1,128 +0,0 @@
import Logger from "../logger"
import DB from "../db"
import ManifestReader from "../manifest/reader"
import ManifestVM from "../manifest/vm"
import GenericSteps from "../generic_steps"
import PatchManager from "../classes/PatchManager"
const BaseLog = Logger.child({ service: "UPDATER" })
const AllowedPkgChanges = [
"id",
"name",
"version",
"description",
"author",
"license",
"icon",
"core_minimum_version",
"remote_manifest",
]
const ManifestKeysMap = {
"name": "pkg_name",
}
export default async function update(pkg_id) {
try {
const pkg = await DB.getPackages(pkg_id)
if (!pkg) {
BaseLog.error(`Package not found [${pkg_id}]`)
return null
}
const Log = BaseLog.child({ service: `UPDATER|${pkg.id}` })
let ManifestRead = await ManifestReader(pkg.local_manifest)
let manifest = await ManifestVM(ManifestRead.code)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
last_status: "updating",
status_text: `Updating package...`,
})
pkg.last_status = "updating"
await DB.writePackage(pkg)
if (typeof manifest.update === "function") {
Log.info(`Performing update hook...`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Performing update hook...`,
})
await manifest.update(pkg)
}
if (manifest.updateSteps) {
Log.info(`Performing update steps...`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Performing update steps...`,
})
await GenericSteps(pkg, manifest.updateSteps, Log)
}
if (Array.isArray(pkg.applied_patches)) {
const patchManager = new PatchManager(pkg, manifest)
await patchManager.reapply()
}
if (typeof manifest.afterUpdate === "function") {
Log.info(`Performing after update hook...`)
global._relic_eventBus.emit(`pkg:update:state`, {
id: pkg.id,
status_text: `Performing after update hook...`,
})
await manifest.afterUpdate(pkg)
}
ManifestRead = await ManifestReader(pkg.local_manifest)
manifest = await ManifestVM(ManifestRead.code)
// override public static values
for await (const key of AllowedPkgChanges) {
if (key in manifest.constructor) {
const mapKey = ManifestKeysMap[key] || key
pkg[key] = manifest.constructor[mapKey]
}
}
pkg.last_status = "installed"
pkg.last_update = Date.now()
await DB.writePackage(pkg)
Log.info(`Package updated successfully`)
global._relic_eventBus.emit(`pkg:update:state`, {
...pkg,
id: pkg.id,
})
return pkg
} catch (error) {
global._relic_eventBus.emit(`pkg:error`, {
id: pkg_id,
error
})
BaseLog.error(`Failed to update package [${pkg_id}]`, error)
BaseLog.error(error.stack)
return null
}
}

View File

@ -1,73 +0,0 @@
import fs from "node:fs"
import axios from "axios"
import humanFormat from "human-format"
import cliProgress from "cli-progress"
function convertSize(size) {
return `${humanFormat(size, {
decimals: 2,
})}B`
}
export default async (url, destination, progressCallback) => {
const progressBar = new cliProgress.SingleBar({
format: "[{bar}] {percentage}% | {total_formatted} | {speed}/s | {eta_formatted}",
barCompleteChar: "\u2588",
barIncompleteChar: "\u2591",
hideCursor: true
}, cliProgress.Presets.shades_classic)
const { data: remoteStream, headers } = await axios.get(url, {
responseType: "stream",
})
const localStream = fs.createWriteStream(destination)
let progress = {
total: Number(headers["content-length"] ?? 0),
transferred: 0,
speed: 0,
}
let lastTickTransferred = 0
progressBar.start(progress.total, 0, {
speed: "0B/s",
total_formatted: convertSize(progress.total),
})
remoteStream.pipe(localStream)
remoteStream.on("data", (data) => {
progress.transferred = progress.transferred + Buffer.byteLength(data)
})
const progressInterval = setInterval(() => {
progress.speed = ((progress.transferred ?? 0) - lastTickTransferred) / 1
lastTickTransferred = progress.transferred ?? 0
progress.transferredString = convertSize(progress.transferred ?? 0)
progress.totalString = convertSize(progress.total)
progress.speedString = convertSize(progress.speed)
progressBar.update(progress.transferred, {
speed: progress.speedString,
})
if (typeof progressCallback === "function") {
progressCallback(progress)
}
}, 1000)
await new Promise((resolve, reject) => {
localStream.on("finish", resolve)
localStream.on("error", reject)
})
progressBar.stop()
clearInterval(progressInterval)
return destination
}

View File

@ -1,43 +0,0 @@
import lodash from "lodash"
const forbidden = [
"libraries"
]
export default (event, data) => {
if (!global.win) {
return false
}
try {
function serializeIpc(data) {
if (!data) {
return undefined
}
data = JSON.stringify(data)
data = JSON.parse(data)
const copy = lodash.cloneDeep(data)
if (!Array.isArray(copy)) {
Object.keys(copy).forEach((key) => {
if (forbidden.includes(key)) {
delete copy[key]
}
if (typeof copy[key] === "function") {
delete copy[key]
}
})
}
return copy
}
global.win.webContents.send(event, serializeIpc(data))
} catch (error) {
console.error(error)
}
}

View File

@ -1,201 +0,0 @@
import Logger from "../logger"
const Log = Logger.child({ service: "SETUP" })
import path from "node:path"
import fs from "node:fs"
import os from "node:os"
import admzip from "adm-zip"
import resolveOs from "../utils/resolveOs"
import chmodRecursive from "../utils/chmodRecursive"
import downloadFile from "../helpers/downloadHttpFile"
import Vars from "../vars"
import Prerequisites from "../prerequisites"
export default async () => {
if (!fs.existsSync(Vars.binaries_path)) {
Log.info(`Creating binaries directory: ${Vars.binaries_path}...`)
await fs.promises.mkdir(Vars.binaries_path, { recursive: true })
}
for await (let prerequisite of Prerequisites) {
try {
Log.info(`Checking prerequisite: ${prerequisite.id}...`)
if (Array.isArray(prerequisite.requireOs) && !prerequisite.requireOs.includes(os.platform())) {
Log.info(`Prerequisite: ${prerequisite.id} is not required for this os.`)
continue
}
if (!fs.existsSync(prerequisite.finalBin)) {
Log.info(`Missing prerequisite: ${prerequisite.id}, installing...`)
global._relic_eventBus.emit("app:setup", {
installed: false,
message: `Installing ${prerequisite.id}`,
})
if (fs.existsSync(prerequisite.destination)) {
Log.info(`Deleting temporal file [${prerequisite.destination}]`)
global._relic_eventBus.emit("app:setup", {
installed: false,
message: `Deleting temporal file [${prerequisite.destination}]`,
})
await fs.promises.rm(prerequisite.destination)
}
if (fs.existsSync(prerequisite.extract)) {
Log.info(`Deleting temporal directory [${prerequisite.extract}]`)
global._relic_eventBus.emit("app:setup", {
installed: false,
message: `Deleting temporal directory [${prerequisite.extract}]`,
})
await fs.promises.rm(prerequisite.extract, { recursive: true })
}
Log.info(`Creating base directory: ${Vars.binaries_path}/${prerequisite.id}...`)
global._relic_eventBus.emit("app:setup", {
installed: false,
message: `Creating base directory: ${Vars.binaries_path}/${prerequisite.id}`,
})
await fs.promises.mkdir(path.resolve(Vars.binaries_path, prerequisite.id), { recursive: true })
if (typeof prerequisite.url === "function") {
prerequisite.url = await prerequisite.url(resolveOs(), os.arch())
Log.info(`Resolved url: ${prerequisite.url}`)
}
Log.info(`Downloading ${prerequisite.id} from [${prerequisite.url}] to destination [${prerequisite.destination}]...`)
global._relic_eventBus.emit("app:setup", {
installed: false,
message: `Starting download ${prerequisite.id} from [${prerequisite.url}] to destination [${prerequisite.destination}]`,
})
try {
await downloadFile(
prerequisite.url,
prerequisite.destination,
(progress) => {
global._relic_eventBus.emit("app:setup", {
installed: false,
message: `Downloaded ${progress.transferredString} / ${progress.totalString} | ${progress.speedString}/s`,
})
}
)
} catch (error) {
if (fs.existsSync(prerequisite.destination)) {
await fs.promises.rm(prerequisite.destination)
}
throw error
}
if (typeof prerequisite.extract === "string") {
Log.info(`Extracting ${prerequisite.id} to destination [${prerequisite.extract}]...`)
global._relic_eventBus.emit("app:setup", {
installed: false,
message: `Extracting ${prerequisite.id} to destination [${prerequisite.extract}]`,
})
const zip = new admzip(prerequisite.destination)
await zip.extractAllTo(prerequisite.extract, true)
Log.info(`Extraction ok...`)
}
if (prerequisite.extractTargetFromName === true) {
let name = path.basename(prerequisite.url)
const ext = path.extname(name)
name = name.replace(ext, "")
if (fs.existsSync(path.resolve(prerequisite.extract, name))) {
await fs.promises.rename(path.resolve(prerequisite.extract, name), `${prerequisite.extract}_old`)
await fs.promises.rm(prerequisite.extract, { recursive: true })
await fs.promises.rename(`${prerequisite.extract}_old`, prerequisite.extract)
}
}
if (prerequisite.deleteBeforeExtract === true) {
Log.info(`Deleting temporal file [${prerequisite.destination}]`)
global._relic_eventBus.emit("app:setup", {
installed: false,
message: `Deleting temporal file [${prerequisite.destination}]`,
})
await fs.promises.unlink(prerequisite.destination)
}
if (typeof prerequisite.rewriteExecutionPermission !== "undefined") {
const to = typeof prerequisite.rewriteExecutionPermission === "string" ?
prerequisite.rewriteExecutionPermission :
prerequisite.finalBin
Log.info(`Rewriting permissions to ${to}...`)
global._relic_eventBus.emit("app:setup", {
installed: false,
message: `Rewriting permissions to ${to}`,
})
await chmodRecursive(to, 0o755)
}
if (Array.isArray(prerequisite.moveDirs)) {
for (const dir of prerequisite.moveDirs) {
if (Array.isArray(dir.requireOs)) {
if (!dir.requireOs.includes(resolveOs())) {
continue
}
}
Log.info(`Moving ${dir.from} to ${dir.to}...`)
global._relic_eventBus.emit("app:setup", {
installed: false,
message: `Moving ${dir.from} to ${dir.to}`,
})
await fs.promises.rename(dir.from, dir.to)
if (dir.deleteParentBefore === true) {
await fs.promises.rm(path.dirname(dir.from), { recursive: true })
}
}
}
}
global._relic_eventBus.emit("app:setup", {
installed: true,
message: null,
})
Log.info(`Prerequisite: ${prerequisite.id} is ready!`)
} catch (error) {
global._relic_eventBus.emit("app:setup", {
installed: false,
error: error,
message: error.message,
})
Log.error("Aborting setup due to an error...")
Log.error(error)
throw error
}
Log.info(`All prerequisites are ready!`)
}
}

View File

@ -1,68 +0,0 @@
import fs from "node:fs"
import { EventEmitter } from "@foxify/events"
import { onExit } from "signal-exit"
import open from "open"
import SetupHelper from "./helpers/setup"
import Logger from "./logger"
import Vars from "./vars"
import DB from "./db"
import PackageInstall from "./handlers/install"
import PackageExecute from "./handlers/execute"
import PackageUninstall from "./handlers/uninstall"
import PackageUpdate from "./handlers/update"
import PackageApply from "./handlers/apply"
import PackageList from "./handlers/list"
import PackageRead from "./handlers/read"
import PackageAuthorize from "./handlers/authorize"
import PackageCheckUpdate from "./handlers/checkUpdate"
export default class RelicCore {
constructor(params) {
this.params = params
}
eventBus = global._relic_eventBus = new EventEmitter()
logger = Logger
db = DB
async initialize() {
await DB.initialize()
onExit(this.onExit)
}
onExit = () => {
if (fs.existsSync(Vars.cache_path)) {
fs.rmSync(Vars.cache_path, { recursive: true, force: true })
}
}
async setup() {
return await SetupHelper()
}
package = {
install: PackageInstall,
execute: PackageExecute,
uninstall: PackageUninstall,
update: PackageUpdate,
apply: PackageApply,
list: PackageList,
read: PackageRead,
authorize: PackageAuthorize,
checkUpdate: PackageCheckUpdate
}
openPath(pkg_id) {
if (!pkg_id) {
return open(Vars.runtime_path)
}
return open(Vars.packages_path + "/" + pkg_id)
}
}

View File

@ -1,955 +0,0 @@
import {type Buffer} from 'node:buffer';
import {type ChildProcess} from 'node:child_process';
import {type Stream, type Readable as ReadableStream, type Writable as WritableStream} from 'node:stream';
export type StdioOption =
| 'pipe'
| 'overlapped'
| 'ipc'
| 'ignore'
| 'inherit'
| Stream
| number
| undefined;
type EncodingOption =
| 'utf8'
// eslint-disable-next-line unicorn/text-encoding-identifier-case
| 'utf-8'
| 'utf16le'
| 'utf-16le'
| 'ucs2'
| 'ucs-2'
| 'latin1'
| 'binary'
| 'ascii'
| 'hex'
| 'base64'
| 'base64url'
| 'buffer'
| null
| undefined;
type DefaultEncodingOption = 'utf8';
type BufferEncodingOption = 'buffer' | null;
export type CommonOptions<EncodingType extends EncodingOption = DefaultEncodingOption> = {
/**
Kill the spawned process when the parent process exits unless either:
- the spawned process is [`detached`](https://nodejs.org/api/child_process.html#child_process_options_detached)
- the parent process is terminated abruptly, for example, with `SIGKILL` as opposed to `SIGTERM` or a normal exit
@default true
*/
readonly cleanup?: boolean;
/**
Prefer locally installed binaries when looking for a binary to execute.
If you `$ npm install foo`, you can then `execa('foo')`.
@default `true` with `$`, `false` otherwise
*/
readonly preferLocal?: boolean;
/**
Preferred path to find locally installed binaries in (use with `preferLocal`).
@default process.cwd()
*/
readonly localDir?: string | URL;
/**
Path to the Node.js executable to use in child processes.
This can be either an absolute path or a path relative to the `cwd` option.
Requires `preferLocal` to be `true`.
For example, this can be used together with [`get-node`](https://github.com/ehmicky/get-node) to run a specific Node.js version in a child process.
@default process.execPath
*/
readonly execPath?: string;
/**
Buffer the output from the spawned process. When set to `false`, you must read the output of `stdout` and `stderr` (or `all` if the `all` option is `true`). Otherwise the returned promise will not be resolved/rejected.
If the spawned process fails, `error.stdout`, `error.stderr`, and `error.all` will contain the buffered data.
@default true
*/
readonly buffer?: boolean;
/**
Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio).
@default `inherit` with `$`, `pipe` otherwise
*/
readonly stdin?: StdioOption;
/**
Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio).
@default 'pipe'
*/
readonly stdout?: StdioOption;
/**
Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio).
@default 'pipe'
*/
readonly stderr?: StdioOption;
/**
Setting this to `false` resolves the promise with the error instead of rejecting it.
@default true
*/
readonly reject?: boolean;
/**
Add an `.all` property on the promise and the resolved value. The property contains the output of the process with `stdout` and `stderr` interleaved.
@default false
*/
readonly all?: boolean;
/**
Strip the final [newline character](https://en.wikipedia.org/wiki/Newline) from the output.
@default true
*/
readonly stripFinalNewline?: boolean;
/**
Set to `false` if you don't want to extend the environment variables when providing the `env` property.
@default true
*/
readonly extendEnv?: boolean;
/**
Current working directory of the child process.
@default process.cwd()
*/
readonly cwd?: string | URL;
/**
Environment key-value pairs. Extends automatically from `process.env`. Set `extendEnv` to `false` if you don't want this.
@default process.env
*/
readonly env?: NodeJS.ProcessEnv;
/**
Explicitly set the value of `argv[0]` sent to the child process. This will be set to `command` or `file` if not specified.
*/
readonly argv0?: string;
/**
Child's [stdio](https://nodejs.org/api/child_process.html#child_process_options_stdio) configuration.
@default 'pipe'
*/
readonly stdio?: 'pipe' | 'overlapped' | 'ignore' | 'inherit' | readonly StdioOption[];
/**
Specify the kind of serialization used for sending messages between processes when using the `stdio: 'ipc'` option or `execaNode()`:
- `json`: Uses `JSON.stringify()` and `JSON.parse()`.
- `advanced`: Uses [`v8.serialize()`](https://nodejs.org/api/v8.html#v8_v8_serialize_value)
[More info.](https://nodejs.org/api/child_process.html#child_process_advanced_serialization)
@default 'json'
*/
readonly serialization?: 'json' | 'advanced';
/**
Prepare child to run independently of its parent process. Specific behavior [depends on the platform](https://nodejs.org/api/child_process.html#child_process_options_detached).
@default false
*/
readonly detached?: boolean;
/**
Sets the user identity of the process.
*/
readonly uid?: number;
/**
Sets the group identity of the process.
*/
readonly gid?: number;
/**
If `true`, runs `command` inside of a shell. Uses `/bin/sh` on UNIX and `cmd.exe` on Windows. A different shell can be specified as a string. The shell should understand the `-c` switch on UNIX or `/d /s /c` on Windows.
We recommend against using this option since it is:
- not cross-platform, encouraging shell-specific syntax.
- slower, because of the additional shell interpretation.
- unsafe, potentially allowing command injection.
@default false
*/
readonly shell?: boolean | string;
/**
Specify the character encoding used to decode the `stdout` and `stderr` output. If set to `'buffer'` or `null`, then `stdout` and `stderr` will be a `Buffer` instead of a string.
@default 'utf8'
*/
readonly encoding?: EncodingType;
/**
If `timeout` is greater than `0`, the parent will send the signal identified by the `killSignal` property (the default is `SIGTERM`) if the child runs longer than `timeout` milliseconds.
@default 0
*/
readonly timeout?: number;
/**
Largest amount of data in bytes allowed on `stdout` or `stderr`. Default: 100 MB.
@default 100_000_000
*/
readonly maxBuffer?: number;
/**
Signal value to be used when the spawned process will be killed.
@default 'SIGTERM'
*/
readonly killSignal?: string | number;
/**
You can abort the spawned process using [`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController).
When `AbortController.abort()` is called, [`.isCanceled`](https://github.com/sindresorhus/execa#iscanceled) becomes `true`.
@example
```
import {execa} from 'execa';
const abortController = new AbortController();
const subprocess = execa('node', [], {signal: abortController.signal});
setTimeout(() => {
abortController.abort();
}, 1000);
try {
await subprocess;
} catch (error) {
console.log(subprocess.killed); // true
console.log(error.isCanceled); // true
}
```
*/
readonly signal?: AbortSignal;
/**
If `true`, no quoting or escaping of arguments is done on Windows. Ignored on other platforms. This is set to `true` automatically when the `shell` option is `true`.
@default false
*/
readonly windowsVerbatimArguments?: boolean;
/**
On Windows, do not create a new console window. Please note this also prevents `CTRL-C` [from working](https://github.com/nodejs/node/issues/29837) on Windows.
@default true
*/
readonly windowsHide?: boolean;
/**
Print each command on `stderr` before executing it.
This can also be enabled by setting the `NODE_DEBUG=execa` environment variable in the current process.
@default false
*/
readonly verbose?: boolean;
};
export type Options<EncodingType extends EncodingOption = DefaultEncodingOption> = {
/**
Write some input to the `stdin` of your binary.
If the input is a file, use the `inputFile` option instead.
*/
readonly input?: string | Buffer | ReadableStream;
/**
Use a file as input to the the `stdin` of your binary.
If the input is not a file, use the `input` option instead.
*/
readonly inputFile?: string;
} & CommonOptions<EncodingType>;
export type SyncOptions<EncodingType extends EncodingOption = DefaultEncodingOption> = {
/**
Write some input to the `stdin` of your binary.
If the input is a file, use the `inputFile` option instead.
*/
readonly input?: string | Buffer;
/**
Use a file as input to the the `stdin` of your binary.
If the input is not a file, use the `input` option instead.
*/
readonly inputFile?: string;
} & CommonOptions<EncodingType>;
export type NodeOptions<EncodingType extends EncodingOption = DefaultEncodingOption> = {
/**
The Node.js executable to use.
@default process.execPath
*/
readonly nodePath?: string;
/**
List of [CLI options](https://nodejs.org/api/cli.html#cli_options) passed to the Node.js executable.
@default process.execArgv
*/
readonly nodeOptions?: string[];
} & Options<EncodingType>;
type StdoutStderrAll = string | Buffer | undefined;
export type ExecaReturnBase<StdoutStderrType extends StdoutStderrAll> = {
/**
The file and arguments that were run, for logging purposes.
This is not escaped and should not be executed directly as a process, including using `execa()` or `execaCommand()`.
*/
command: string;
/**
Same as `command` but escaped.
This is meant to be copy and pasted into a shell, for debugging purposes.
Since the escaping is fairly basic, this should not be executed directly as a process, including using `execa()` or `execaCommand()`.
*/
escapedCommand: string;
/**
The numeric exit code of the process that was run.
*/
exitCode: number;
/**
The output of the process on stdout.
*/
stdout: StdoutStderrType;
/**
The output of the process on stderr.
*/
stderr: StdoutStderrType;
/**
Whether the process failed to run.
*/
failed: boolean;
/**
Whether the process timed out.
*/
timedOut: boolean;
/**
Whether the process was killed.
*/
killed: boolean;
/**
The name of the signal that was used to terminate the process. For example, `SIGFPE`.
If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`.
*/
signal?: string;
/**
A human-friendly description of the signal that was used to terminate the process. For example, `Floating point arithmetic error`.
If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`. It is also `undefined` when the signal is very uncommon which should seldomly happen.
*/
signalDescription?: string;
/**
The `cwd` of the command if provided in the command options. Otherwise it is `process.cwd()`.
*/
cwd: string;
};
export type ExecaSyncReturnValue<StdoutStderrType extends StdoutStderrAll = string> = {
} & ExecaReturnBase<StdoutStderrType>;
/**
Result of a child process execution. On success this is a plain object. On failure this is also an `Error` instance.
The child process fails when:
- its exit code is not `0`
- it was killed with a signal
- timing out
- being canceled
- there's not enough memory or there are already too many child processes
*/
export type ExecaReturnValue<StdoutStderrType extends StdoutStderrAll = string> = {
/**
The output of the process with `stdout` and `stderr` interleaved.
This is `undefined` if either:
- the `all` option is `false` (default value)
- `execaSync()` was used
*/
all?: StdoutStderrType;
/**
Whether the process was canceled.
You can cancel the spawned process using the [`signal`](https://github.com/sindresorhus/execa#signal-1) option.
*/
isCanceled: boolean;
} & ExecaSyncReturnValue<StdoutStderrType>;
export type ExecaSyncError<StdoutStderrType extends StdoutStderrAll = string> = {
/**
Error message when the child process failed to run. In addition to the underlying error message, it also contains some information related to why the child process errored.
The child process stderr then stdout are appended to the end, separated with newlines and not interleaved.
*/
message: string;
/**
This is the same as the `message` property except it does not include the child process stdout/stderr.
*/
shortMessage: string;
/**
Original error message. This is the same as the `message` property except it includes neither the child process stdout/stderr nor some additional information added by Execa.
This is `undefined` unless the child process exited due to an `error` event or a timeout.
*/
originalMessage?: string;
} & Error & ExecaReturnBase<StdoutStderrType>;
export type ExecaError<StdoutStderrType extends StdoutStderrAll = string> = {
/**
The output of the process with `stdout` and `stderr` interleaved.
This is `undefined` if either:
- the `all` option is `false` (default value)
- `execaSync()` was used
*/
all?: StdoutStderrType;
/**
Whether the process was canceled.
*/
isCanceled: boolean;
} & ExecaSyncError<StdoutStderrType>;
export type KillOptions = {
/**
Milliseconds to wait for the child process to terminate before sending `SIGKILL`.
Can be disabled with `false`.
@default 5000
*/
forceKillAfterTimeout?: number | false;
};
export type ExecaChildPromise<StdoutStderrType extends StdoutStderrAll> = {
/**
Stream combining/interleaving [`stdout`](https://nodejs.org/api/child_process.html#child_process_subprocess_stdout) and [`stderr`](https://nodejs.org/api/child_process.html#child_process_subprocess_stderr).
This is `undefined` if either:
- the `all` option is `false` (the default value)
- both `stdout` and `stderr` options are set to [`'inherit'`, `'ipc'`, `Stream` or `integer`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio)
*/
all?: ReadableStream;
catch<ResultType = never>(
onRejected?: (reason: ExecaError<StdoutStderrType>) => ResultType | PromiseLike<ResultType>
): Promise<ExecaReturnValue<StdoutStderrType> | ResultType>;
/**
Same as the original [`child_process#kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal), except if `signal` is `SIGTERM` (the default value) and the child process is not terminated after 5 seconds, force it by sending `SIGKILL`. Note that this graceful termination does not work on Windows, because Windows [doesn't support signals](https://nodejs.org/api/process.html#process_signal_events) (`SIGKILL` and `SIGTERM` has the same effect of force-killing the process immediately.) If you want to achieve graceful termination on Windows, you have to use other means, such as [`taskkill`](https://github.com/sindresorhus/taskkill).
*/
kill(signal?: string, options?: KillOptions): void;
/**
Similar to [`childProcess.kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal). This used to be preferred when cancelling the child process execution as the error is more descriptive and [`childProcessResult.isCanceled`](#iscanceled) is set to `true`. But now this is deprecated and you should either use `.kill()` or the `signal` option when creating the child process.
*/
cancel(): void;
/**
[Pipe](https://nodejs.org/api/stream.html#readablepipedestination-options) the child process's `stdout` to `target`, which can be:
- Another `execa()` return value
- A writable stream
- A file path string
If the `target` is another `execa()` return value, it is returned. Otherwise, the original `execa()` return value is returned. This allows chaining `pipeStdout()` then `await`ing the final result.
The `stdout` option] must be kept as `pipe`, its default value.
*/
pipeStdout?<Target extends ExecaChildPromise<StdoutStderrAll>>(target: Target): Target;
pipeStdout?(target: WritableStream | string): ExecaChildProcess<StdoutStderrType>;
/**
Like `pipeStdout()` but piping the child process's `stderr` instead.
The `stderr` option must be kept as `pipe`, its default value.
*/
pipeStderr?<Target extends ExecaChildPromise<StdoutStderrAll>>(target: Target): Target;
pipeStderr?(target: WritableStream | string): ExecaChildProcess<StdoutStderrType>;
/**
Combines both `pipeStdout()` and `pipeStderr()`.
Either the `stdout` option or the `stderr` option must be kept as `pipe`, their default value. Also, the `all` option must be set to `true`.
*/
pipeAll?<Target extends ExecaChildPromise<StdoutStderrAll>>(target: Target): Target;
pipeAll?(target: WritableStream | string): ExecaChildProcess<StdoutStderrType>;
};
export type ExecaChildProcess<StdoutStderrType extends StdoutStderrAll = string> = ChildProcess &
ExecaChildPromise<StdoutStderrType> &
Promise<ExecaReturnValue<StdoutStderrType>>;
/**
Executes a command using `file ...arguments`. `arguments` are specified as an array of strings. Returns a `childProcess`.
Arguments are automatically escaped. They can contain any character, including spaces.
This is the preferred method when executing single commands.
@param file - The program/script to execute.
@param arguments - Arguments to pass to `file` on execution.
@returns An `ExecaChildProcess` that is both:
- a `Promise` resolving or rejecting with a `childProcessResult`.
- a [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess) with some additional methods and properties.
@throws A `childProcessResult` error
@example <caption>Promise interface</caption>
```
import {execa} from 'execa';
const {stdout} = await execa('echo', ['unicorns']);
console.log(stdout);
//=> 'unicorns'
```
@example <caption>Redirect output to a file</caption>
```
import {execa} from 'execa';
// Similar to `echo unicorns > stdout.txt` in Bash
await execa('echo', ['unicorns']).pipeStdout('stdout.txt');
// Similar to `echo unicorns 2> stdout.txt` in Bash
await execa('echo', ['unicorns']).pipeStderr('stderr.txt');
// Similar to `echo unicorns &> stdout.txt` in Bash
await execa('echo', ['unicorns'], {all: true}).pipeAll('all.txt');
```
@example <caption>Redirect input from a file</caption>
```
import {execa} from 'execa';
// Similar to `cat < stdin.txt` in Bash
const {stdout} = await execa('cat', {inputFile: 'stdin.txt'});
console.log(stdout);
//=> 'unicorns'
```
@example <caption>Save and pipe output from a child process</caption>
```
import {execa} from 'execa';
const {stdout} = await execa('echo', ['unicorns']).pipeStdout(process.stdout);
// Prints `unicorns`
console.log(stdout);
// Also returns 'unicorns'
```
@example <caption>Pipe multiple processes</caption>
```
import {execa} from 'execa';
// Similar to `echo unicorns | cat` in Bash
const {stdout} = await execa('echo', ['unicorns']).pipeStdout(execa('cat'));
console.log(stdout);
//=> 'unicorns'
```
@example <caption>Handling errors</caption>
```
import {execa} from 'execa';
// Catching an error
try {
await execa('unknown', ['command']);
} catch (error) {
console.log(error);
/*
{
message: 'Command failed with ENOENT: unknown command spawn unknown ENOENT',
errno: -2,
code: 'ENOENT',
syscall: 'spawn unknown',
path: 'unknown',
spawnargs: ['command'],
originalMessage: 'spawn unknown ENOENT',
shortMessage: 'Command failed with ENOENT: unknown command spawn unknown ENOENT',
command: 'unknown command',
escapedCommand: 'unknown command',
stdout: '',
stderr: '',
failed: true,
timedOut: false,
isCanceled: false,
killed: false,
cwd: '/path/to/cwd'
}
\*\/
}
```
@example <caption>Graceful termination</caption>
```
const subprocess = execa('node');
setTimeout(() => {
subprocess.kill('SIGTERM', {
forceKillAfterTimeout: 2000
});
}, 1000);
```
*/
export function execa(
file: string,
arguments?: readonly string[],
options?: Options
): ExecaChildProcess;
export function execa(
file: string,
arguments?: readonly string[],
options?: Options<BufferEncodingOption>
): ExecaChildProcess<Buffer>;
export function execa(file: string, options?: Options): ExecaChildProcess;
export function execa(file: string, options?: Options<BufferEncodingOption>): ExecaChildProcess<Buffer>;
/**
Same as `execa()` but synchronous.
@param file - The program/script to execute.
@param arguments - Arguments to pass to `file` on execution.
@returns A `childProcessResult` object
@throws A `childProcessResult` error
@example <caption>Promise interface</caption>
```
import {execa} from 'execa';
const {stdout} = execaSync('echo', ['unicorns']);
console.log(stdout);
//=> 'unicorns'
```
@example <caption>Redirect input from a file</caption>
```
import {execa} from 'execa';
// Similar to `cat < stdin.txt` in Bash
const {stdout} = execaSync('cat', {inputFile: 'stdin.txt'});
console.log(stdout);
//=> 'unicorns'
```
@example <caption>Handling errors</caption>
```
import {execa} from 'execa';
// Catching an error
try {
execaSync('unknown', ['command']);
} catch (error) {
console.log(error);
/*
{
message: 'Command failed with ENOENT: unknown command spawnSync unknown ENOENT',
errno: -2,
code: 'ENOENT',
syscall: 'spawnSync unknown',
path: 'unknown',
spawnargs: ['command'],
originalMessage: 'spawnSync unknown ENOENT',
shortMessage: 'Command failed with ENOENT: unknown command spawnSync unknown ENOENT',
command: 'unknown command',
escapedCommand: 'unknown command',
stdout: '',
stderr: '',
failed: true,
timedOut: false,
isCanceled: false,
killed: false,
cwd: '/path/to/cwd'
}
\*\/
}
```
*/
export function execaSync(
file: string,
arguments?: readonly string[],
options?: SyncOptions
): ExecaSyncReturnValue;
export function execaSync(
file: string,
arguments?: readonly string[],
options?: SyncOptions<BufferEncodingOption>
): ExecaSyncReturnValue<Buffer>;
export function execaSync(file: string, options?: SyncOptions): ExecaSyncReturnValue;
export function execaSync(
file: string,
options?: SyncOptions<BufferEncodingOption>
): ExecaSyncReturnValue<Buffer>;
/**
Executes a command. The `command` string includes both the `file` and its `arguments`. Returns a `childProcess`.
Arguments are automatically escaped. They can contain any character, but spaces must be escaped with a backslash like `execaCommand('echo has\\ space')`.
This is the preferred method when executing a user-supplied `command` string, such as in a REPL.
@param command - The program/script to execute and its arguments.
@returns An `ExecaChildProcess` that is both:
- a `Promise` resolving or rejecting with a `childProcessResult`.
- a [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess) with some additional methods and properties.
@throws A `childProcessResult` error
@example
```
import {execaCommand} from 'execa';
const {stdout} = await execaCommand('echo unicorns');
console.log(stdout);
//=> 'unicorns'
```
*/
export function execaCommand(command: string, options?: Options): ExecaChildProcess;
export function execaCommand(command: string, options?: Options<BufferEncodingOption>): ExecaChildProcess<Buffer>;
/**
Same as `execaCommand()` but synchronous.
@param command - The program/script to execute and its arguments.
@returns A `childProcessResult` object
@throws A `childProcessResult` error
@example
```
import {execaCommandSync} from 'execa';
const {stdout} = execaCommandSync('echo unicorns');
console.log(stdout);
//=> 'unicorns'
```
*/
export function execaCommandSync(command: string, options?: SyncOptions): ExecaSyncReturnValue;
export function execaCommandSync(command: string, options?: SyncOptions<BufferEncodingOption>): ExecaSyncReturnValue<Buffer>;
type TemplateExpression =
| string
| number
| ExecaReturnValue<string | Buffer>
| ExecaSyncReturnValue<string | Buffer>
| Array<string | number | ExecaReturnValue<string | Buffer> | ExecaSyncReturnValue<string | Buffer>>;
type Execa$<StdoutStderrType extends StdoutStderrAll = string> = {
/**
Returns a new instance of `$` but with different default `options`. Consecutive calls are merged to previous ones.
This can be used to either:
- Set options for a specific command: `` $(options)`command` ``
- Share options for multiple commands: `` const $$ = $(options); $$`command`; $$`otherCommand` ``
@param options - Options to set
@returns A new instance of `$` with those `options` set
@example
```
import {$} from 'execa';
const $$ = $({stdio: 'inherit'});
await $$`echo unicorns`;
//=> 'unicorns'
await $$`echo rainbows`;
//=> 'rainbows'
```
*/
(options: Options<undefined>): Execa$<StdoutStderrType>;
(options: Options): Execa$;
(options: Options<BufferEncodingOption>): Execa$<Buffer>;
(
templates: TemplateStringsArray,
...expressions: TemplateExpression[]
): ExecaChildProcess<StdoutStderrType>;
/**
Same as $\`command\` but synchronous.
@returns A `childProcessResult` object
@throws A `childProcessResult` error
@example <caption>Basic</caption>
```
import {$} from 'execa';
const branch = $.sync`git branch --show-current`;
$.sync`dep deploy --branch=${branch}`;
```
@example <caption>Multiple arguments</caption>
```
import {$} from 'execa';
const args = ['unicorns', '&', 'rainbows!'];
const {stdout} = $.sync`echo ${args}`;
console.log(stdout);
//=> 'unicorns & rainbows!'
```
@example <caption>With options</caption>
```
import {$} from 'execa';
$.sync({stdio: 'inherit'})`echo unicorns`;
//=> 'unicorns'
```
@example <caption>Shared options</caption>
```
import {$} from 'execa';
const $$ = $({stdio: 'inherit'});
$$.sync`echo unicorns`;
//=> 'unicorns'
$$.sync`echo rainbows`;
//=> 'rainbows'
```
*/
sync(
templates: TemplateStringsArray,
...expressions: TemplateExpression[]
): ExecaSyncReturnValue<StdoutStderrType>;
};
/**
Executes a command. The `command` string includes both the `file` and its `arguments`. Returns a `childProcess`.
Arguments are automatically escaped. They can contain any character, but spaces must use `${}` like `` $`echo ${'has space'}` ``.
This is the preferred method when executing multiple commands in a script file.
The `command` string can inject any `${value}` with the following types: string, number, `childProcess` or an array of those types. For example: `` $`echo one ${'two'} ${3} ${['four', 'five']}` ``. For `${childProcess}`, the process's `stdout` is used.
@returns An `ExecaChildProcess` that is both:
- a `Promise` resolving or rejecting with a `childProcessResult`.
- a [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess) with some additional methods and properties.
@throws A `childProcessResult` error
@example <caption>Basic</caption>
```
import {$} from 'execa';
const branch = await $`git branch --show-current`;
await $`dep deploy --branch=${branch}`;
```
@example <caption>Multiple arguments</caption>
```
import {$} from 'execa';
const args = ['unicorns', '&', 'rainbows!'];
const {stdout} = await $`echo ${args}`;
console.log(stdout);
//=> 'unicorns & rainbows!'
```
@example <caption>With options</caption>
```
import {$} from 'execa';
await $({stdio: 'inherit'})`echo unicorns`;
//=> 'unicorns'
```
@example <caption>Shared options</caption>
```
import {$} from 'execa';
const $$ = $({stdio: 'inherit'});
await $$`echo unicorns`;
//=> 'unicorns'
await $$`echo rainbows`;
//=> 'rainbows'
```
*/
export const $: Execa$;
/**
Execute a Node.js script as a child process.
Arguments are automatically escaped. They can contain any character, including spaces.
This is the preferred method when executing Node.js files.
Like [`child_process#fork()`](https://nodejs.org/api/child_process.html#child_process_child_process_fork_modulepath_args_options):
- the current Node version and options are used. This can be overridden using the `nodePath` and `nodeOptions` options.
- the `shell` option cannot be used
- an extra channel [`ipc`](https://nodejs.org/api/child_process.html#child_process_options_stdio) is passed to `stdio`
@param scriptPath - Node.js script to execute.
@param arguments - Arguments to pass to `scriptPath` on execution.
@returns An `ExecaChildProcess` that is both:
- a `Promise` resolving or rejecting with a `childProcessResult`.
- a [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess) with some additional methods and properties.
@throws A `childProcessResult` error
@example
```
import {execa} from 'execa';
await execaNode('scriptPath', ['argument']);
```
*/
export function execaNode(
scriptPath: string,
arguments?: readonly string[],
options?: NodeOptions
): ExecaChildProcess;
export function execaNode(
scriptPath: string,
arguments?: readonly string[],
options?: NodeOptions<BufferEncodingOption>
): ExecaChildProcess<Buffer>;
export function execaNode(scriptPath: string, options?: NodeOptions): ExecaChildProcess;
export function execaNode(scriptPath: string, options?: NodeOptions<BufferEncodingOption>): ExecaChildProcess<Buffer>;

View File

@ -1,309 +0,0 @@
import {Buffer} from 'node:buffer';
import path from 'node:path';
import childProcess from 'node:child_process';
import process from 'node:process';
import crossSpawn from 'cross-spawn';
import stripFinalNewline from '../strip-final-newline';
import {npmRunPathEnv} from '../npm-run-path';
import onetime from '../onetime';
import {makeError} from './lib/error.js';
import {normalizeStdio, normalizeStdioNode} from './lib/stdio.js';
import {spawnedKill, spawnedCancel, setupTimeout, validateTimeout, setExitHandler} from './lib/kill.js';
import {addPipeMethods} from './lib/pipe.js';
import {handleInput, getSpawnedResult, makeAllStream, handleInputSync} from './lib/stream.js';
import {mergePromise, getSpawnedPromise} from './lib/promise.js';
import {joinCommand, parseCommand, parseTemplates, getEscapedCommand} from './lib/command.js';
import {logCommand, verboseDefault} from './lib/verbose.js';
const DEFAULT_MAX_BUFFER = 1000 * 1000 * 100;
const getEnv = ({env: envOption, extendEnv, preferLocal, localDir, execPath}) => {
const env = extendEnv ? {...process.env, ...envOption} : envOption;
if (preferLocal) {
return npmRunPathEnv({env, cwd: localDir, execPath});
}
return env;
};
const handleArguments = (file, args, options = {}) => {
const parsed = crossSpawn._parse(file, args, options);
file = parsed.command;
args = parsed.args;
options = parsed.options;
options = {
maxBuffer: DEFAULT_MAX_BUFFER,
buffer: true,
stripFinalNewline: true,
extendEnv: true,
preferLocal: false,
localDir: options.cwd || process.cwd(),
execPath: process.execPath,
encoding: 'utf8',
reject: true,
cleanup: true,
all: false,
windowsHide: true,
verbose: verboseDefault,
...options,
};
options.env = getEnv(options);
options.stdio = normalizeStdio(options);
if (process.platform === 'win32' && path.basename(file, '.exe') === 'cmd') {
// #116
args.unshift('/q');
}
return {file, args, options, parsed};
};
const handleOutput = (options, value, error) => {
if (typeof value !== 'string' && !Buffer.isBuffer(value)) {
// When `execaSync()` errors, we normalize it to '' to mimic `execa()`
return error === undefined ? undefined : '';
}
if (options.stripFinalNewline) {
return stripFinalNewline(value);
}
return value;
};
export function execa(file, args, options) {
const parsed = handleArguments(file, args, options);
const command = joinCommand(file, args);
const escapedCommand = getEscapedCommand(file, args);
logCommand(escapedCommand, parsed.options);
validateTimeout(parsed.options);
let spawned;
try {
spawned = childProcess.spawn(parsed.file, parsed.args, parsed.options);
} catch (error) {
// Ensure the returned error is always both a promise and a child process
const dummySpawned = new childProcess.ChildProcess();
const errorPromise = Promise.reject(makeError({
error,
stdout: '',
stderr: '',
all: '',
command,
escapedCommand,
parsed,
timedOut: false,
isCanceled: false,
killed: false,
}));
mergePromise(dummySpawned, errorPromise);
return dummySpawned;
}
const spawnedPromise = getSpawnedPromise(spawned);
const timedPromise = setupTimeout(spawned, parsed.options, spawnedPromise);
const processDone = setExitHandler(spawned, parsed.options, timedPromise);
const context = {isCanceled: false};
spawned.kill = spawnedKill.bind(null, spawned.kill.bind(spawned));
spawned.cancel = spawnedCancel.bind(null, spawned, context);
const handlePromise = async () => {
const [{error, exitCode, signal, timedOut}, stdoutResult, stderrResult, allResult] = await getSpawnedResult(spawned, parsed.options, processDone);
const stdout = handleOutput(parsed.options, stdoutResult);
const stderr = handleOutput(parsed.options, stderrResult);
const all = handleOutput(parsed.options, allResult);
if (error || exitCode !== 0 || signal !== null) {
const returnedError = makeError({
error,
exitCode,
signal,
stdout,
stderr,
all,
command,
escapedCommand,
parsed,
timedOut,
isCanceled: context.isCanceled || (parsed.options.signal ? parsed.options.signal.aborted : false),
killed: spawned.killed,
});
if (!parsed.options.reject) {
return returnedError;
}
throw returnedError;
}
return {
command,
escapedCommand,
exitCode: 0,
stdout,
stderr,
all,
failed: false,
timedOut: false,
isCanceled: false,
killed: false,
};
};
const handlePromiseOnce = onetime(handlePromise);
handleInput(spawned, parsed.options);
spawned.all = makeAllStream(spawned, parsed.options);
addPipeMethods(spawned);
mergePromise(spawned, handlePromiseOnce);
return spawned;
}
export function execaSync(file, args, options) {
const parsed = handleArguments(file, args, options);
const command = joinCommand(file, args);
const escapedCommand = getEscapedCommand(file, args);
logCommand(escapedCommand, parsed.options);
const input = handleInputSync(parsed.options);
let result;
try {
result = childProcess.spawnSync(parsed.file, parsed.args, {...parsed.options, input});
} catch (error) {
throw makeError({
error,
stdout: '',
stderr: '',
all: '',
command,
escapedCommand,
parsed,
timedOut: false,
isCanceled: false,
killed: false,
});
}
const stdout = handleOutput(parsed.options, result.stdout, result.error);
const stderr = handleOutput(parsed.options, result.stderr, result.error);
if (result.error || result.status !== 0 || result.signal !== null) {
const error = makeError({
stdout,
stderr,
error: result.error,
signal: result.signal,
exitCode: result.status,
command,
escapedCommand,
parsed,
timedOut: result.error && result.error.code === 'ETIMEDOUT',
isCanceled: false,
killed: result.signal !== null,
});
if (!parsed.options.reject) {
return error;
}
throw error;
}
return {
command,
escapedCommand,
exitCode: 0,
stdout,
stderr,
failed: false,
timedOut: false,
isCanceled: false,
killed: false,
};
}
const normalizeScriptStdin = ({input, inputFile, stdio}) => input === undefined && inputFile === undefined && stdio === undefined
? {stdin: 'inherit'}
: {};
const normalizeScriptOptions = (options = {}) => ({
preferLocal: true,
...normalizeScriptStdin(options),
...options,
});
function create$(options) {
function $(templatesOrOptions, ...expressions) {
if (!Array.isArray(templatesOrOptions)) {
return create$({...options, ...templatesOrOptions});
}
const [file, ...args] = parseTemplates(templatesOrOptions, expressions);
return execa(file, args, normalizeScriptOptions(options));
}
$.sync = (templates, ...expressions) => {
if (!Array.isArray(templates)) {
throw new TypeError('Please use $(options).sync`command` instead of $.sync(options)`command`.');
}
const [file, ...args] = parseTemplates(templates, expressions);
return execaSync(file, args, normalizeScriptOptions(options));
};
return $;
}
export const $ = create$();
export function execaCommand(command, options) {
const [file, ...args] = parseCommand(command);
return execa(file, args, options);
}
export function execaCommandSync(command, options) {
const [file, ...args] = parseCommand(command);
return execaSync(file, args, options);
}
export function execaNode(scriptPath, args, options = {}) {
if (args && !Array.isArray(args) && typeof args === 'object') {
options = args;
args = [];
}
const stdio = normalizeStdioNode(options);
const defaultExecArgv = process.execArgv.filter(arg => !arg.startsWith('--inspect'));
const {
nodePath = process.execPath,
nodeOptions = defaultExecArgv,
} = options;
return execa(
nodePath,
[
...nodeOptions,
scriptPath,
...(Array.isArray(args) ? args : []),
],
{
...options,
stdin: undefined,
stdout: undefined,
stderr: undefined,
stdio,
shell: false,
},
);
}

View File

@ -1,119 +0,0 @@
import {Buffer} from 'node:buffer';
import {ChildProcess} from 'node:child_process';
const normalizeArgs = (file, args = []) => {
if (!Array.isArray(args)) {
return [file];
}
return [file, ...args];
};
const NO_ESCAPE_REGEXP = /^[\w.-]+$/;
const escapeArg = arg => {
if (typeof arg !== 'string' || NO_ESCAPE_REGEXP.test(arg)) {
return arg;
}
return `"${arg.replaceAll('"', '\\"')}"`;
};
export const joinCommand = (file, args) => normalizeArgs(file, args).join(' ');
export const getEscapedCommand = (file, args) => normalizeArgs(file, args).map(arg => escapeArg(arg)).join(' ');
const SPACES_REGEXP = / +/g;
// Handle `execaCommand()`
export const parseCommand = command => {
const tokens = [];
for (const token of command.trim().split(SPACES_REGEXP)) {
// Allow spaces to be escaped by a backslash if not meant as a delimiter
const previousToken = tokens.at(-1);
if (previousToken && previousToken.endsWith('\\')) {
// Merge previous token with current one
tokens[tokens.length - 1] = `${previousToken.slice(0, -1)} ${token}`;
} else {
tokens.push(token);
}
}
return tokens;
};
const parseExpression = expression => {
const typeOfExpression = typeof expression;
if (typeOfExpression === 'string') {
return expression;
}
if (typeOfExpression === 'number') {
return String(expression);
}
if (
typeOfExpression === 'object'
&& expression !== null
&& !(expression instanceof ChildProcess)
&& 'stdout' in expression
) {
const typeOfStdout = typeof expression.stdout;
if (typeOfStdout === 'string') {
return expression.stdout;
}
if (Buffer.isBuffer(expression.stdout)) {
return expression.stdout.toString();
}
throw new TypeError(`Unexpected "${typeOfStdout}" stdout in template expression`);
}
throw new TypeError(`Unexpected "${typeOfExpression}" in template expression`);
};
const concatTokens = (tokens, nextTokens, isNew) => isNew || tokens.length === 0 || nextTokens.length === 0
? [...tokens, ...nextTokens]
: [
...tokens.slice(0, -1),
`${tokens.at(-1)}${nextTokens[0]}`,
...nextTokens.slice(1),
];
const parseTemplate = ({templates, expressions, tokens, index, template}) => {
const templateString = template ?? templates.raw[index];
const templateTokens = templateString.split(SPACES_REGEXP).filter(Boolean);
const newTokens = concatTokens(
tokens,
templateTokens,
templateString.startsWith(' '),
);
if (index === expressions.length) {
return newTokens;
}
const expression = expressions[index];
const expressionTokens = Array.isArray(expression)
? expression.map(expression => parseExpression(expression))
: [parseExpression(expression)];
return concatTokens(
newTokens,
expressionTokens,
templateString.endsWith(' '),
);
};
export const parseTemplates = (templates, expressions) => {
let tokens = [];
for (const [index, template] of templates.entries()) {
tokens = parseTemplate({templates, expressions, tokens, index, template});
}
return tokens;
};

View File

@ -1,87 +0,0 @@
import process from 'node:process';
import {signalsByName} from '../../human-signals';
const getErrorPrefix = ({timedOut, timeout, errorCode, signal, signalDescription, exitCode, isCanceled}) => {
if (timedOut) {
return `timed out after ${timeout} milliseconds`;
}
if (isCanceled) {
return 'was canceled';
}
if (errorCode !== undefined) {
return `failed with ${errorCode}`;
}
if (signal !== undefined) {
return `was killed with ${signal} (${signalDescription})`;
}
if (exitCode !== undefined) {
return `failed with exit code ${exitCode}`;
}
return 'failed';
};
export const makeError = ({
stdout,
stderr,
all,
error,
signal,
exitCode,
command,
escapedCommand,
timedOut,
isCanceled,
killed,
parsed: {options: {timeout, cwd = process.cwd()}},
}) => {
// `signal` and `exitCode` emitted on `spawned.on('exit')` event can be `null`.
// We normalize them to `undefined`
exitCode = exitCode === null ? undefined : exitCode;
signal = signal === null ? undefined : signal;
const signalDescription = signal === undefined ? undefined : signalsByName[signal].description;
const errorCode = error && error.code;
const prefix = getErrorPrefix({timedOut, timeout, errorCode, signal, signalDescription, exitCode, isCanceled});
const execaMessage = `Command ${prefix}: ${command}`;
const isError = Object.prototype.toString.call(error) === '[object Error]';
const shortMessage = isError ? `${execaMessage}\n${error.message}` : execaMessage;
const message = [shortMessage, stderr, stdout].filter(Boolean).join('\n');
if (isError) {
error.originalMessage = error.message;
error.message = message;
} else {
error = new Error(message);
}
error.shortMessage = shortMessage;
error.command = command;
error.escapedCommand = escapedCommand;
error.exitCode = exitCode;
error.signal = signal;
error.signalDescription = signalDescription;
error.stdout = stdout;
error.stderr = stderr;
error.cwd = cwd;
if (all !== undefined) {
error.all = all;
}
if ('bufferedData' in error) {
delete error.bufferedData;
}
error.failed = true;
error.timedOut = Boolean(timedOut);
error.isCanceled = isCanceled;
error.killed = killed && !timedOut;
return error;
};

View File

@ -1,102 +0,0 @@
import os from 'node:os';
import {onExit} from 'signal-exit';
const DEFAULT_FORCE_KILL_TIMEOUT = 1000 * 5;
// Monkey-patches `childProcess.kill()` to add `forceKillAfterTimeout` behavior
export const spawnedKill = (kill, signal = 'SIGTERM', options = {}) => {
const killResult = kill(signal);
setKillTimeout(kill, signal, options, killResult);
return killResult;
};
const setKillTimeout = (kill, signal, options, killResult) => {
if (!shouldForceKill(signal, options, killResult)) {
return;
}
const timeout = getForceKillAfterTimeout(options);
const t = setTimeout(() => {
kill('SIGKILL');
}, timeout);
// Guarded because there's no `.unref()` when `execa` is used in the renderer
// process in Electron. This cannot be tested since we don't run tests in
// Electron.
// istanbul ignore else
if (t.unref) {
t.unref();
}
};
const shouldForceKill = (signal, {forceKillAfterTimeout}, killResult) => isSigterm(signal) && forceKillAfterTimeout !== false && killResult;
const isSigterm = signal => signal === os.constants.signals.SIGTERM
|| (typeof signal === 'string' && signal.toUpperCase() === 'SIGTERM');
const getForceKillAfterTimeout = ({forceKillAfterTimeout = true}) => {
if (forceKillAfterTimeout === true) {
return DEFAULT_FORCE_KILL_TIMEOUT;
}
if (!Number.isFinite(forceKillAfterTimeout) || forceKillAfterTimeout < 0) {
throw new TypeError(`Expected the \`forceKillAfterTimeout\` option to be a non-negative integer, got \`${forceKillAfterTimeout}\` (${typeof forceKillAfterTimeout})`);
}
return forceKillAfterTimeout;
};
// `childProcess.cancel()`
export const spawnedCancel = (spawned, context) => {
const killResult = spawned.kill();
if (killResult) {
context.isCanceled = true;
}
};
const timeoutKill = (spawned, signal, reject) => {
spawned.kill(signal);
reject(Object.assign(new Error('Timed out'), {timedOut: true, signal}));
};
// `timeout` option handling
export const setupTimeout = (spawned, {timeout, killSignal = 'SIGTERM'}, spawnedPromise) => {
if (timeout === 0 || timeout === undefined) {
return spawnedPromise;
}
let timeoutId;
const timeoutPromise = new Promise((resolve, reject) => {
timeoutId = setTimeout(() => {
timeoutKill(spawned, killSignal, reject);
}, timeout);
});
const safeSpawnedPromise = spawnedPromise.finally(() => {
clearTimeout(timeoutId);
});
return Promise.race([timeoutPromise, safeSpawnedPromise]);
};
export const validateTimeout = ({timeout}) => {
if (timeout !== undefined && (!Number.isFinite(timeout) || timeout < 0)) {
throw new TypeError(`Expected the \`timeout\` option to be a non-negative integer, got \`${timeout}\` (${typeof timeout})`);
}
};
// `cleanup` option handling
export const setExitHandler = async (spawned, {cleanup, detached}, timedPromise) => {
if (!cleanup || detached) {
return timedPromise;
}
const removeExitHandler = onExit(() => {
spawned.kill();
});
return timedPromise.finally(() => {
removeExitHandler();
});
};

View File

@ -1,42 +0,0 @@
import {createWriteStream} from 'node:fs';
import {ChildProcess} from 'node:child_process';
import {isWritableStream} from '../../is-stream';
const isExecaChildProcess = target => target instanceof ChildProcess && typeof target.then === 'function';
const pipeToTarget = (spawned, streamName, target) => {
if (typeof target === 'string') {
spawned[streamName].pipe(createWriteStream(target));
return spawned;
}
if (isWritableStream(target)) {
spawned[streamName].pipe(target);
return spawned;
}
if (!isExecaChildProcess(target)) {
throw new TypeError('The second argument must be a string, a stream or an Execa child process.');
}
if (!isWritableStream(target.stdin)) {
throw new TypeError('The target child process\'s stdin must be available.');
}
spawned[streamName].pipe(target.stdin);
return target;
};
export const addPipeMethods = spawned => {
if (spawned.stdout !== null) {
spawned.pipeStdout = pipeToTarget.bind(undefined, spawned, 'stdout');
}
if (spawned.stderr !== null) {
spawned.pipeStderr = pipeToTarget.bind(undefined, spawned, 'stderr');
}
if (spawned.all !== undefined) {
spawned.pipeAll = pipeToTarget.bind(undefined, spawned, 'all');
}
};

View File

@ -1,36 +0,0 @@
// eslint-disable-next-line unicorn/prefer-top-level-await
const nativePromisePrototype = (async () => {})().constructor.prototype;
const descriptors = ['then', 'catch', 'finally'].map(property => [
property,
Reflect.getOwnPropertyDescriptor(nativePromisePrototype, property),
]);
// The return value is a mixin of `childProcess` and `Promise`
export const mergePromise = (spawned, promise) => {
for (const [property, descriptor] of descriptors) {
// Starting the main `promise` is deferred to avoid consuming streams
const value = typeof promise === 'function'
? (...args) => Reflect.apply(descriptor.value, promise(), args)
: descriptor.value.bind(promise);
Reflect.defineProperty(spawned, property, {...descriptor, value});
}
};
// Use promises instead of `child_process` events
export const getSpawnedPromise = spawned => new Promise((resolve, reject) => {
spawned.on('exit', (exitCode, signal) => {
resolve({exitCode, signal});
});
spawned.on('error', error => {
reject(error);
});
if (spawned.stdin) {
spawned.stdin.on('error', error => {
reject(error);
});
}
});

View File

@ -1,49 +0,0 @@
const aliases = ['stdin', 'stdout', 'stderr'];
const hasAlias = options => aliases.some(alias => options[alias] !== undefined);
export const normalizeStdio = options => {
if (!options) {
return;
}
const {stdio} = options;
if (stdio === undefined) {
return aliases.map(alias => options[alias]);
}
if (hasAlias(options)) {
throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${aliases.map(alias => `\`${alias}\``).join(', ')}`);
}
if (typeof stdio === 'string') {
return stdio;
}
if (!Array.isArray(stdio)) {
throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``);
}
const length = Math.max(stdio.length, aliases.length);
return Array.from({length}, (value, index) => stdio[index]);
};
// `ipc` is pushed unless it is already present
export const normalizeStdioNode = options => {
const stdio = normalizeStdio(options);
if (stdio === 'ipc') {
return 'ipc';
}
if (stdio === undefined || typeof stdio === 'string') {
return [stdio, stdio, stdio, 'ipc'];
}
if (stdio.includes('ipc')) {
return stdio;
}
return [...stdio, 'ipc'];
};

View File

@ -1,133 +0,0 @@
import {createReadStream, readFileSync} from 'node:fs';
import {setTimeout} from 'node:timers/promises';
import {isStream} from '../../is-stream';
import getStream, {getStreamAsBuffer} from '../../get-stream';
import mergeStream from 'merge-stream';
const validateInputOptions = input => {
if (input !== undefined) {
throw new TypeError('The `input` and `inputFile` options cannot be both set.');
}
};
const getInputSync = ({input, inputFile}) => {
if (typeof inputFile !== 'string') {
return input;
}
validateInputOptions(input);
return readFileSync(inputFile);
};
// `input` and `inputFile` option in sync mode
export const handleInputSync = options => {
const input = getInputSync(options);
if (isStream(input)) {
throw new TypeError('The `input` option cannot be a stream in sync mode');
}
return input;
};
const getInput = ({input, inputFile}) => {
if (typeof inputFile !== 'string') {
return input;
}
validateInputOptions(input);
return createReadStream(inputFile);
};
// `input` and `inputFile` option in async mode
export const handleInput = (spawned, options) => {
const input = getInput(options);
if (input === undefined) {
return;
}
if (isStream(input)) {
input.pipe(spawned.stdin);
} else {
spawned.stdin.end(input);
}
};
// `all` interleaves `stdout` and `stderr`
export const makeAllStream = (spawned, {all}) => {
if (!all || (!spawned.stdout && !spawned.stderr)) {
return;
}
const mixed = mergeStream();
if (spawned.stdout) {
mixed.add(spawned.stdout);
}
if (spawned.stderr) {
mixed.add(spawned.stderr);
}
return mixed;
};
// On failure, `result.stdout|stderr|all` should contain the currently buffered stream
const getBufferedData = async (stream, streamPromise) => {
// When `buffer` is `false`, `streamPromise` is `undefined` and there is no buffered data to retrieve
if (!stream || streamPromise === undefined) {
return;
}
// Wait for the `all` stream to receive the last chunk before destroying the stream
await setTimeout(0);
stream.destroy();
try {
return await streamPromise;
} catch (error) {
return error.bufferedData;
}
};
const getStreamPromise = (stream, {encoding, buffer, maxBuffer}) => {
if (!stream || !buffer) {
return;
}
// eslint-disable-next-line unicorn/text-encoding-identifier-case
if (encoding === 'utf8' || encoding === 'utf-8') {
return getStream(stream, {maxBuffer});
}
if (encoding === null || encoding === 'buffer') {
return getStreamAsBuffer(stream, {maxBuffer});
}
return applyEncoding(stream, maxBuffer, encoding);
};
const applyEncoding = async (stream, maxBuffer, encoding) => {
const buffer = await getStreamAsBuffer(stream, {maxBuffer});
return buffer.toString(encoding);
};
// Retrieve result of child process: exit code, signal, error, streams (stdout/stderr/all)
export const getSpawnedResult = async ({stdout, stderr, all}, {encoding, buffer, maxBuffer}, processDone) => {
const stdoutPromise = getStreamPromise(stdout, {encoding, buffer, maxBuffer});
const stderrPromise = getStreamPromise(stderr, {encoding, buffer, maxBuffer});
const allPromise = getStreamPromise(all, {encoding, buffer, maxBuffer: maxBuffer * 2});
try {
return await Promise.all([processDone, stdoutPromise, stderrPromise, allPromise]);
} catch (error) {
return Promise.all([
{error, signal: error.signal, timedOut: error.timedOut},
getBufferedData(stdout, stdoutPromise),
getBufferedData(stderr, stderrPromise),
getBufferedData(all, allPromise),
]);
}
};

View File

@ -1,19 +0,0 @@
import {debuglog} from 'node:util';
import process from 'node:process';
export const verboseDefault = debuglog('execa').enabled;
const padField = (field, padding) => String(field).padStart(padding, '0');
const getTimestamp = () => {
const date = new Date();
return `${padField(date.getHours(), 2)}:${padField(date.getMinutes(), 2)}:${padField(date.getSeconds(), 2)}.${padField(date.getMilliseconds(), 3)}`;
};
export const logCommand = (escapedCommand, {verbose}) => {
if (!verbose) {
return;
}
process.stderr.write(`[${getTimestamp()}] ${escapedCommand}\n`);
};

View File

@ -1,84 +0,0 @@
import {getStreamContents} from './contents.js';
import {noop, throwObjectStream, getLengthProp} from './utils.js';
export async function getStreamAsArrayBuffer(stream, options) {
return getStreamContents(stream, arrayBufferMethods, options);
}
const initArrayBuffer = () => ({contents: new ArrayBuffer(0)});
const useTextEncoder = chunk => textEncoder.encode(chunk);
const textEncoder = new TextEncoder();
const useUint8Array = chunk => new Uint8Array(chunk);
const useUint8ArrayWithOffset = chunk => new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength);
const truncateArrayBufferChunk = (convertedChunk, chunkSize) => convertedChunk.slice(0, chunkSize);
// `contents` is an increasingly growing `Uint8Array`.
const addArrayBufferChunk = (convertedChunk, {contents, length: previousLength}, length) => {
const newContents = hasArrayBufferResize() ? resizeArrayBuffer(contents, length) : resizeArrayBufferSlow(contents, length);
new Uint8Array(newContents).set(convertedChunk, previousLength);
return newContents;
};
// Without `ArrayBuffer.resize()`, `contents` size is always a power of 2.
// This means its last bytes are zeroes (not stream data), which need to be
// trimmed at the end with `ArrayBuffer.slice()`.
const resizeArrayBufferSlow = (contents, length) => {
if (length <= contents.byteLength) {
return contents;
}
const arrayBuffer = new ArrayBuffer(getNewContentsLength(length));
new Uint8Array(arrayBuffer).set(new Uint8Array(contents), 0);
return arrayBuffer;
};
// With `ArrayBuffer.resize()`, `contents` size matches exactly the size of
// the stream data. It does not include extraneous zeroes to trim at the end.
// The underlying `ArrayBuffer` does allocate a number of bytes that is a power
// of 2, but those bytes are only visible after calling `ArrayBuffer.resize()`.
const resizeArrayBuffer = (contents, length) => {
if (length <= contents.maxByteLength) {
contents.resize(length);
return contents;
}
const arrayBuffer = new ArrayBuffer(length, {maxByteLength: getNewContentsLength(length)});
new Uint8Array(arrayBuffer).set(new Uint8Array(contents), 0);
return arrayBuffer;
};
// Retrieve the closest `length` that is both >= and a power of 2
const getNewContentsLength = length => SCALE_FACTOR ** Math.ceil(Math.log(length) / Math.log(SCALE_FACTOR));
const SCALE_FACTOR = 2;
const finalizeArrayBuffer = ({contents, length}) => hasArrayBufferResize() ? contents : contents.slice(0, length);
// `ArrayBuffer.slice()` is slow. When `ArrayBuffer.resize()` is available
// (Node >=20.0.0, Safari >=16.4 and Chrome), we can use it instead.
// eslint-disable-next-line no-warning-comments
// TODO: remove after dropping support for Node 20.
// eslint-disable-next-line no-warning-comments
// TODO: use `ArrayBuffer.transferToFixedLength()` instead once it is available
const hasArrayBufferResize = () => 'resize' in ArrayBuffer.prototype;
const arrayBufferMethods = {
init: initArrayBuffer,
convertChunk: {
string: useTextEncoder,
buffer: useUint8Array,
arrayBuffer: useUint8Array,
dataView: useUint8ArrayWithOffset,
typedArray: useUint8ArrayWithOffset,
others: throwObjectStream,
},
getSize: getLengthProp,
truncateChunk: truncateArrayBufferChunk,
addChunk: addArrayBufferChunk,
getFinalChunk: noop,
finalize: finalizeArrayBuffer,
};

View File

@ -1,32 +0,0 @@
import {getStreamContents} from './contents.js';
import {identity, noop, getContentsProp} from './utils.js';
export async function getStreamAsArray(stream, options) {
return getStreamContents(stream, arrayMethods, options);
}
const initArray = () => ({contents: []});
const increment = () => 1;
const addArrayChunk = (convertedChunk, {contents}) => {
contents.push(convertedChunk);
return contents;
};
const arrayMethods = {
init: initArray,
convertChunk: {
string: identity,
buffer: identity,
arrayBuffer: identity,
dataView: identity,
typedArray: identity,
others: identity,
},
getSize: increment,
truncateChunk: noop,
addChunk: addArrayChunk,
getFinalChunk: noop,
finalize: getContentsProp,
};

View File

@ -1,20 +0,0 @@
import {getStreamAsArrayBuffer} from './array-buffer.js';
export async function getStreamAsBuffer(stream, options) {
if (!('Buffer' in globalThis)) {
throw new Error('getStreamAsBuffer() is only supported in Node.js');
}
try {
return arrayBufferToNodeBuffer(await getStreamAsArrayBuffer(stream, options));
} catch (error) {
if (error.bufferedData !== undefined) {
error.bufferedData = arrayBufferToNodeBuffer(error.bufferedData);
}
throw error;
}
}
// eslint-disable-next-line n/prefer-global/buffer
const arrayBufferToNodeBuffer = arrayBuffer => globalThis.Buffer.from(arrayBuffer);

View File

@ -1,101 +0,0 @@
export const getStreamContents = async (stream, {init, convertChunk, getSize, truncateChunk, addChunk, getFinalChunk, finalize}, {maxBuffer = Number.POSITIVE_INFINITY} = {}) => {
if (!isAsyncIterable(stream)) {
throw new Error('The first argument must be a Readable, a ReadableStream, or an async iterable.');
}
const state = init();
state.length = 0;
try {
for await (const chunk of stream) {
const chunkType = getChunkType(chunk);
const convertedChunk = convertChunk[chunkType](chunk, state);
appendChunk({convertedChunk, state, getSize, truncateChunk, addChunk, maxBuffer});
}
appendFinalChunk({state, convertChunk, getSize, truncateChunk, addChunk, getFinalChunk, maxBuffer});
return finalize(state);
} catch (error) {
error.bufferedData = finalize(state);
throw error;
}
};
const appendFinalChunk = ({state, getSize, truncateChunk, addChunk, getFinalChunk, maxBuffer}) => {
const convertedChunk = getFinalChunk(state);
if (convertedChunk !== undefined) {
appendChunk({convertedChunk, state, getSize, truncateChunk, addChunk, maxBuffer});
}
};
const appendChunk = ({convertedChunk, state, getSize, truncateChunk, addChunk, maxBuffer}) => {
const chunkSize = getSize(convertedChunk);
const newLength = state.length + chunkSize;
if (newLength <= maxBuffer) {
addNewChunk(convertedChunk, state, addChunk, newLength);
return;
}
const truncatedChunk = truncateChunk(convertedChunk, maxBuffer - state.length);
if (truncatedChunk !== undefined) {
addNewChunk(truncatedChunk, state, addChunk, maxBuffer);
}
throw new MaxBufferError();
};
const addNewChunk = (convertedChunk, state, addChunk, newLength) => {
state.contents = addChunk(convertedChunk, state, newLength);
state.length = newLength;
};
const isAsyncIterable = stream => typeof stream === 'object' && stream !== null && typeof stream[Symbol.asyncIterator] === 'function';
const getChunkType = chunk => {
const typeOfChunk = typeof chunk;
if (typeOfChunk === 'string') {
return 'string';
}
if (typeOfChunk !== 'object' || chunk === null) {
return 'others';
}
// eslint-disable-next-line n/prefer-global/buffer
if (globalThis.Buffer?.isBuffer(chunk)) {
return 'buffer';
}
const prototypeName = objectToString.call(chunk);
if (prototypeName === '[object ArrayBuffer]') {
return 'arrayBuffer';
}
if (prototypeName === '[object DataView]') {
return 'dataView';
}
if (
Number.isInteger(chunk.byteLength)
&& Number.isInteger(chunk.byteOffset)
&& objectToString.call(chunk.buffer) === '[object ArrayBuffer]'
) {
return 'typedArray';
}
return 'others';
};
const {toString: objectToString} = Object.prototype;
export class MaxBufferError extends Error {
name = 'MaxBufferError';
constructor() {
super('maxBuffer exceeded');
}
}

View File

@ -1,119 +0,0 @@
import {type Readable} from 'node:stream';
import {type Buffer} from 'node:buffer';
export class MaxBufferError extends Error {
readonly name: 'MaxBufferError';
constructor();
}
type TextStreamItem = string | Buffer | ArrayBuffer | ArrayBufferView;
export type AnyStream<SteamItem = TextStreamItem> = Readable | ReadableStream<SteamItem> | AsyncIterable<SteamItem>;
export type Options = {
/**
Maximum length of the stream. If exceeded, the promise will be rejected with a `MaxBufferError`.
Depending on the [method](#api), the length is measured with [`string.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/length), [`buffer.length`](https://nodejs.org/api/buffer.html#buflength), [`arrayBuffer.byteLength`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/byteLength) or [`array.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/length).
@default Infinity
*/
readonly maxBuffer?: number;
};
/**
Get the given `stream` as a string.
@returns The stream's contents as a promise.
@example
```
import fs from 'node:fs';
import getStream from 'get-stream';
const stream = fs.createReadStream('unicorn.txt');
console.log(await getStream(stream));
// ,,))))))));,
// __)))))))))))))),
// \|/ -\(((((''''((((((((.
// -*-==//////(('' . `)))))),
// /|\ ))| o ;-. '((((( ,(,
// ( `| / ) ;))))' ,_))^;(~
// | | | ,))((((_ _____------~~~-. %,;(;(>';'~
// o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~
// ; ''''```` `: `:::|\,__,%% );`'; ~
// | _ ) / `:|`----' `-'
// ______/\/~ | / /
// /~;;.____/;;' / ___--,-( `;;;/
// / // _;______;'------~~~~~ /;;/\ /
// // | | / ; \;;,\
// (<_ | ; /',/-----' _>
// \_| ||_ //~;~~~~~~~~~
// `\_| (,~~
// \~\
// ~~
```
@example
```
import getStream from 'get-stream';
const {body: readableStream} = await fetch('https://example.com');
console.log(await getStream(readableStream));
```
@example
```
import {opendir} from 'node:fs/promises';
import {getStreamAsArray} from 'get-stream';
const asyncIterable = await opendir(directory);
console.log(await getStreamAsArray(asyncIterable));
```
*/
export default function getStream(stream: AnyStream, options?: Options): Promise<string>;
/**
Get the given `stream` as a Node.js [`Buffer`](https://nodejs.org/api/buffer.html#class-buffer).
@returns The stream's contents as a promise.
@example
```
import {getStreamAsBuffer} from 'get-stream';
const stream = fs.createReadStream('unicorn.png');
console.log(await getStreamAsBuffer(stream));
```
*/
export function getStreamAsBuffer(stream: AnyStream, options?: Options): Promise<Buffer>;
/**
Get the given `stream` as an [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer).
@returns The stream's contents as a promise.
@example
```
import {getStreamAsArrayBuffer} from 'get-stream';
const {body: readableStream} = await fetch('https://example.com');
console.log(await getStreamAsArrayBuffer(readableStream));
```
*/
export function getStreamAsArrayBuffer(stream: AnyStream, options?: Options): Promise<ArrayBuffer>;
/**
Get the given `stream` as an array. Unlike [other methods](#api), this supports [streams of objects](https://nodejs.org/api/stream.html#object-mode).
@returns The stream's contents as a promise.
@example
```
import {getStreamAsArray} from 'get-stream';
const {body: readableStream} = await fetch('https://example.com');
console.log(await getStreamAsArray(readableStream));
```
*/
export function getStreamAsArray<Item>(stream: AnyStream<Item>, options?: Options): Promise<Item[]>;

View File

@ -1,5 +0,0 @@
export {getStreamAsArray} from './array.js';
export {getStreamAsArrayBuffer} from './array-buffer.js';
export {getStreamAsBuffer} from './buffer.js';
export {getStreamAsString as default} from './string.js';
export {MaxBufferError} from './contents.js';

View File

@ -1,98 +0,0 @@
import {Buffer} from 'node:buffer';
import {open} from 'node:fs/promises';
import {type Readable} from 'node:stream';
import fs from 'node:fs';
import {expectType, expectError, expectAssignable, expectNotAssignable} from 'tsd';
import getStream, {getStreamAsBuffer, getStreamAsArrayBuffer, getStreamAsArray, MaxBufferError, type Options, type AnyStream} from './index.js';
const nodeStream = fs.createReadStream('foo') as Readable;
const fileHandle = await open('test');
const readableStream = fileHandle.readableWebStream();
const asyncIterable = <T>(value: T): AsyncGenerator<T> => (async function * () {
yield value;
})();
const stringAsyncIterable = asyncIterable('');
const bufferAsyncIterable = asyncIterable(Buffer.from(''));
const arrayBufferAsyncIterable = asyncIterable(new ArrayBuffer(0));
const dataViewAsyncIterable = asyncIterable(new DataView(new ArrayBuffer(0)));
const typedArrayAsyncIterable = asyncIterable(new Uint8Array([]));
const objectItem = {test: true};
const objectAsyncIterable = asyncIterable(objectItem);
expectType<string>(await getStream(nodeStream));
expectType<string>(await getStream(nodeStream, {maxBuffer: 10}));
expectType<string>(await getStream(readableStream));
expectType<string>(await getStream(stringAsyncIterable));
expectType<string>(await getStream(bufferAsyncIterable));
expectType<string>(await getStream(arrayBufferAsyncIterable));
expectType<string>(await getStream(dataViewAsyncIterable));
expectType<string>(await getStream(typedArrayAsyncIterable));
expectError(await getStream(objectAsyncIterable));
expectError(await getStream({}));
expectError(await getStream(nodeStream, {maxBuffer: '10'}));
expectError(await getStream(nodeStream, {unknownOption: 10}));
expectError(await getStream(nodeStream, {maxBuffer: 10}, {}));
expectType<Buffer>(await getStreamAsBuffer(nodeStream));
expectType<Buffer>(await getStreamAsBuffer(nodeStream, {maxBuffer: 10}));
expectType<Buffer>(await getStreamAsBuffer(readableStream));
expectType<Buffer>(await getStreamAsBuffer(stringAsyncIterable));
expectType<Buffer>(await getStreamAsBuffer(bufferAsyncIterable));
expectType<Buffer>(await getStreamAsBuffer(arrayBufferAsyncIterable));
expectType<Buffer>(await getStreamAsBuffer(dataViewAsyncIterable));
expectType<Buffer>(await getStreamAsBuffer(typedArrayAsyncIterable));
expectError(await getStreamAsBuffer(objectAsyncIterable));
expectError(await getStreamAsBuffer({}));
expectError(await getStreamAsBuffer(nodeStream, {maxBuffer: '10'}));
expectError(await getStreamAsBuffer(nodeStream, {unknownOption: 10}));
expectError(await getStreamAsBuffer(nodeStream, {maxBuffer: 10}, {}));
expectType<ArrayBuffer>(await getStreamAsArrayBuffer(nodeStream));
expectType<ArrayBuffer>(await getStreamAsArrayBuffer(nodeStream, {maxBuffer: 10}));
expectType<ArrayBuffer>(await getStreamAsArrayBuffer(readableStream));
expectType<ArrayBuffer>(await getStreamAsArrayBuffer(stringAsyncIterable));
expectType<ArrayBuffer>(await getStreamAsArrayBuffer(bufferAsyncIterable));
expectType<ArrayBuffer>(await getStreamAsArrayBuffer(arrayBufferAsyncIterable));
expectType<ArrayBuffer>(await getStreamAsArrayBuffer(dataViewAsyncIterable));
expectType<ArrayBuffer>(await getStreamAsArrayBuffer(typedArrayAsyncIterable));
expectError(await getStreamAsArrayBuffer(objectAsyncIterable));
expectError(await getStreamAsArrayBuffer({}));
expectError(await getStreamAsArrayBuffer(nodeStream, {maxBuffer: '10'}));
expectError(await getStreamAsArrayBuffer(nodeStream, {unknownOption: 10}));
expectError(await getStreamAsArrayBuffer(nodeStream, {maxBuffer: 10}, {}));
expectType<any[]>(await getStreamAsArray(nodeStream));
expectType<any[]>(await getStreamAsArray(nodeStream, {maxBuffer: 10}));
expectType<any[]>(await getStreamAsArray(readableStream));
expectType<Uint8Array[]>(await getStreamAsArray(readableStream as ReadableStream<Uint8Array>));
expectType<string[]>(await getStreamAsArray(stringAsyncIterable));
expectType<Buffer[]>(await getStreamAsArray(bufferAsyncIterable));
expectType<ArrayBuffer[]>(await getStreamAsArray(arrayBufferAsyncIterable));
expectType<DataView[]>(await getStreamAsArray(dataViewAsyncIterable));
expectType<Uint8Array[]>(await getStreamAsArray(typedArrayAsyncIterable));
expectType<Array<typeof objectItem>>(await getStreamAsArray(objectAsyncIterable));
expectError(await getStreamAsArray({}));
expectError(await getStreamAsArray(nodeStream, {maxBuffer: '10'}));
expectError(await getStreamAsArray(nodeStream, {unknownOption: 10}));
expectError(await getStreamAsArray(nodeStream, {maxBuffer: 10}, {}));
expectAssignable<AnyStream>(nodeStream);
expectAssignable<AnyStream>(readableStream);
expectAssignable<AnyStream>(stringAsyncIterable);
expectAssignable<AnyStream>(bufferAsyncIterable);
expectAssignable<AnyStream>(arrayBufferAsyncIterable);
expectAssignable<AnyStream>(dataViewAsyncIterable);
expectAssignable<AnyStream>(typedArrayAsyncIterable);
expectAssignable<AnyStream<unknown>>(objectAsyncIterable);
expectNotAssignable<AnyStream>(objectAsyncIterable);
expectAssignable<AnyStream<string>>(stringAsyncIterable);
expectNotAssignable<AnyStream<string>>(bufferAsyncIterable);
expectNotAssignable<AnyStream>({});
expectAssignable<Options>({maxBuffer: 10});
expectNotAssignable<Options>({maxBuffer: '10'});
expectNotAssignable<Options>({unknownOption: 10});
expectType<MaxBufferError>(new MaxBufferError());

View File

@ -1,36 +0,0 @@
import {getStreamContents} from './contents.js';
import {identity, getContentsProp, throwObjectStream, getLengthProp} from './utils.js';
export async function getStreamAsString(stream, options) {
return getStreamContents(stream, stringMethods, options);
}
const initString = () => ({contents: '', textDecoder: new TextDecoder()});
const useTextDecoder = (chunk, {textDecoder}) => textDecoder.decode(chunk, {stream: true});
const addStringChunk = (convertedChunk, {contents}) => contents + convertedChunk;
const truncateStringChunk = (convertedChunk, chunkSize) => convertedChunk.slice(0, chunkSize);
const getFinalStringChunk = ({textDecoder}) => {
const finalChunk = textDecoder.decode();
return finalChunk === '' ? undefined : finalChunk;
};
const stringMethods = {
init: initString,
convertChunk: {
string: identity,
buffer: useTextDecoder,
arrayBuffer: useTextDecoder,
dataView: useTextDecoder,
typedArray: useTextDecoder,
others: throwObjectStream,
},
getSize: getLengthProp,
truncateChunk: truncateStringChunk,
addChunk: addStringChunk,
getFinalChunk: getFinalStringChunk,
finalize: getContentsProp,
};

View File

@ -1,11 +0,0 @@
export const identity = value => value;
export const noop = () => undefined;
export const getContentsProp = ({contents}) => contents;
export const throwObjectStream = chunk => {
throw new Error(`Streams in object mode are not supported: ${String(chunk)}`);
};
export const getLengthProp = convertedChunk => convertedChunk.length;

View File

@ -1,275 +0,0 @@
/* eslint-disable max-lines */
// List of known process signals with information about them
export const SIGNALS = [
{
name: 'SIGHUP',
number: 1,
action: 'terminate',
description: 'Terminal closed',
standard: 'posix',
},
{
name: 'SIGINT',
number: 2,
action: 'terminate',
description: 'User interruption with CTRL-C',
standard: 'ansi',
},
{
name: 'SIGQUIT',
number: 3,
action: 'core',
description: 'User interruption with CTRL-\\',
standard: 'posix',
},
{
name: 'SIGILL',
number: 4,
action: 'core',
description: 'Invalid machine instruction',
standard: 'ansi',
},
{
name: 'SIGTRAP',
number: 5,
action: 'core',
description: 'Debugger breakpoint',
standard: 'posix',
},
{
name: 'SIGABRT',
number: 6,
action: 'core',
description: 'Aborted',
standard: 'ansi',
},
{
name: 'SIGIOT',
number: 6,
action: 'core',
description: 'Aborted',
standard: 'bsd',
},
{
name: 'SIGBUS',
number: 7,
action: 'core',
description:
'Bus error due to misaligned, non-existing address or paging error',
standard: 'bsd',
},
{
name: 'SIGEMT',
number: 7,
action: 'terminate',
description: 'Command should be emulated but is not implemented',
standard: 'other',
},
{
name: 'SIGFPE',
number: 8,
action: 'core',
description: 'Floating point arithmetic error',
standard: 'ansi',
},
{
name: 'SIGKILL',
number: 9,
action: 'terminate',
description: 'Forced termination',
standard: 'posix',
forced: true,
},
{
name: 'SIGUSR1',
number: 10,
action: 'terminate',
description: 'Application-specific signal',
standard: 'posix',
},
{
name: 'SIGSEGV',
number: 11,
action: 'core',
description: 'Segmentation fault',
standard: 'ansi',
},
{
name: 'SIGUSR2',
number: 12,
action: 'terminate',
description: 'Application-specific signal',
standard: 'posix',
},
{
name: 'SIGPIPE',
number: 13,
action: 'terminate',
description: 'Broken pipe or socket',
standard: 'posix',
},
{
name: 'SIGALRM',
number: 14,
action: 'terminate',
description: 'Timeout or timer',
standard: 'posix',
},
{
name: 'SIGTERM',
number: 15,
action: 'terminate',
description: 'Termination',
standard: 'ansi',
},
{
name: 'SIGSTKFLT',
number: 16,
action: 'terminate',
description: 'Stack is empty or overflowed',
standard: 'other',
},
{
name: 'SIGCHLD',
number: 17,
action: 'ignore',
description: 'Child process terminated, paused or unpaused',
standard: 'posix',
},
{
name: 'SIGCLD',
number: 17,
action: 'ignore',
description: 'Child process terminated, paused or unpaused',
standard: 'other',
},
{
name: 'SIGCONT',
number: 18,
action: 'unpause',
description: 'Unpaused',
standard: 'posix',
forced: true,
},
{
name: 'SIGSTOP',
number: 19,
action: 'pause',
description: 'Paused',
standard: 'posix',
forced: true,
},
{
name: 'SIGTSTP',
number: 20,
action: 'pause',
description: 'Paused using CTRL-Z or "suspend"',
standard: 'posix',
},
{
name: 'SIGTTIN',
number: 21,
action: 'pause',
description: 'Background process cannot read terminal input',
standard: 'posix',
},
{
name: 'SIGBREAK',
number: 21,
action: 'terminate',
description: 'User interruption with CTRL-BREAK',
standard: 'other',
},
{
name: 'SIGTTOU',
number: 22,
action: 'pause',
description: 'Background process cannot write to terminal output',
standard: 'posix',
},
{
name: 'SIGURG',
number: 23,
action: 'ignore',
description: 'Socket received out-of-band data',
standard: 'bsd',
},
{
name: 'SIGXCPU',
number: 24,
action: 'core',
description: 'Process timed out',
standard: 'bsd',
},
{
name: 'SIGXFSZ',
number: 25,
action: 'core',
description: 'File too big',
standard: 'bsd',
},
{
name: 'SIGVTALRM',
number: 26,
action: 'terminate',
description: 'Timeout or timer',
standard: 'bsd',
},
{
name: 'SIGPROF',
number: 27,
action: 'terminate',
description: 'Timeout or timer',
standard: 'bsd',
},
{
name: 'SIGWINCH',
number: 28,
action: 'ignore',
description: 'Terminal window size changed',
standard: 'bsd',
},
{
name: 'SIGIO',
number: 29,
action: 'terminate',
description: 'I/O is available',
standard: 'other',
},
{
name: 'SIGPOLL',
number: 29,
action: 'terminate',
description: 'Watched event',
standard: 'other',
},
{
name: 'SIGINFO',
number: 29,
action: 'ignore',
description: 'Request for process information',
standard: 'other',
},
{
name: 'SIGPWR',
number: 30,
action: 'terminate',
description: 'Device running out of power',
standard: 'systemv',
},
{
name: 'SIGSYS',
number: 31,
action: 'core',
description: 'Invalid system call',
standard: 'other',
},
{
name: 'SIGUNUSED',
number: 31,
action: 'terminate',
description: 'Invalid system call',
standard: 'other',
},
]
/* eslint-enable max-lines */

View File

@ -1,70 +0,0 @@
import { constants } from 'node:os'
import { SIGRTMAX } from './realtime.js'
import { getSignals } from './signals.js'
// Retrieve `signalsByName`, an object mapping signal name to signal properties.
// We make sure the object is sorted by `number`.
const getSignalsByName = () => {
const signals = getSignals()
return Object.fromEntries(signals.map(getSignalByName))
}
const getSignalByName = ({
name,
number,
description,
supported,
action,
forced,
standard,
}) => [name, { name, number, description, supported, action, forced, standard }]
export const signalsByName = getSignalsByName()
// Retrieve `signalsByNumber`, an object mapping signal number to signal
// properties.
// We make sure the object is sorted by `number`.
const getSignalsByNumber = () => {
const signals = getSignals()
const length = SIGRTMAX + 1
const signalsA = Array.from({ length }, (value, number) =>
getSignalByNumber(number, signals),
)
return Object.assign({}, ...signalsA)
}
const getSignalByNumber = (number, signals) => {
const signal = findSignalByNumber(number, signals)
if (signal === undefined) {
return {}
}
const { name, description, supported, action, forced, standard } = signal
return {
[number]: {
name,
number,
description,
supported,
action,
forced,
standard,
},
}
}
// Several signals might end up sharing the same number because of OS-specific
// numbers, in which case those prevail.
const findSignalByNumber = (number, signals) => {
const signal = signals.find(({ name }) => constants.signals[name] === number)
if (signal !== undefined) {
return signal
}
return signals.find((signalA) => signalA.number === number)
}
export const signalsByNumber = getSignalsByNumber()

View File

@ -1,16 +0,0 @@
// List of realtime signals with information about them
export const getRealtimeSignals = () => {
const length = SIGRTMAX - SIGRTMIN + 1
return Array.from({ length }, getRealtimeSignal)
}
const getRealtimeSignal = (value, index) => ({
name: `SIGRT${index + 1}`,
number: SIGRTMIN + index,
action: 'terminate',
description: 'Application-specific signal (realtime)',
standard: 'posix',
})
const SIGRTMIN = 34
export const SIGRTMAX = 64

View File

@ -1,34 +0,0 @@
import { constants } from 'node:os'
import { SIGNALS } from './core.js'
import { getRealtimeSignals } from './realtime.js'
// Retrieve list of know signals (including realtime) with information about
// them
export const getSignals = () => {
const realtimeSignals = getRealtimeSignals()
const signals = [...SIGNALS, ...realtimeSignals].map(normalizeSignal)
return signals
}
// Normalize signal:
// - `number`: signal numbers are OS-specific. This is taken into account by
// `os.constants.signals`. However we provide a default `number` since some
// signals are not defined for some OS.
// - `forced`: set default to `false`
// - `supported`: set value
const normalizeSignal = ({
name,
number: defaultNumber,
description,
action,
forced = false,
standard,
}) => {
const {
signals: { [name]: constantSignal },
} = constants
const supported = constantSignal !== undefined
const number = supported ? constantSignal : defaultNumber
return { name, number, description, supported, action, forced, standard }
}

View File

@ -1,81 +0,0 @@
import {
Stream,
Writable as WritableStream,
Readable as ReadableStream,
Duplex as DuplexStream,
Transform as TransformStream,
} from 'node:stream';
/**
@returns Whether `stream` is a [`Stream`](https://nodejs.org/api/stream.html#stream_stream).
@example
```
import fs from 'node:fs';
import {isStream} from 'is-stream';
isStream(fs.createReadStream('unicorn.png'));
//=> true
isStream({});
//=> false
```
*/
export function isStream(stream: unknown): stream is Stream;
/**
@returns Whether `stream` is a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable).
@example
```
import fs from 'node:fs';
import {isWritableStream} from 'is-stream';
isWritableStream(fs.createWriteStrem('unicorn.txt'));
//=> true
```
*/
export function isWritableStream(stream: unknown): stream is WritableStream;
/**
@returns Whether `stream` is a [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable).
@example
```
import fs from 'node:fs';
import {isReadableStream} from 'is-stream';
isReadableStream(fs.createReadStream('unicorn.png'));
//=> true
```
*/
export function isReadableStream(stream: unknown): stream is ReadableStream;
/**
@returns Whether `stream` is a [`stream.Duplex`](https://nodejs.org/api/stream.html#stream_class_stream_duplex).
@example
```
import {Duplex as DuplexStream} from 'node:stream';
import {isDuplexStream} from 'is-stream';
isDuplexStream(new DuplexStream());
//=> true
```
*/
export function isDuplexStream(stream: unknown): stream is DuplexStream;
/**
@returns Whether `stream` is a [`stream.Transform`](https://nodejs.org/api/stream.html#stream_class_stream_transform).
@example
```
import fs from 'node:fs';
import StringifyStream from 'streaming-json-stringify';
import {isTransformStream} from 'is-stream';
isTransformStream(StringifyStream());
//=> true
```
*/
export function isTransformStream(stream: unknown): stream is TransformStream;

View File

@ -1,29 +0,0 @@
export function isStream(stream) {
return stream !== null
&& typeof stream === 'object'
&& typeof stream.pipe === 'function';
}
export function isWritableStream(stream) {
return isStream(stream)
&& stream.writable !== false
&& typeof stream._write === 'function'
&& typeof stream._writableState === 'object';
}
export function isReadableStream(stream) {
return isStream(stream)
&& stream.readable !== false
&& typeof stream._read === 'function'
&& typeof stream._readableState === 'object';
}
export function isDuplexStream(stream) {
return isWritableStream(stream)
&& isReadableStream(stream);
}
export function isTransformStream(stream) {
return isDuplexStream(stream)
&& typeof stream._transform === 'function';
}

View File

@ -1,24 +0,0 @@
export class Memory {
#data = null
read() {
return Promise.resolve(this.#data)
}
write(obj) {
this.#data = obj
return Promise.resolve()
}
}
export class MemorySync {
#data = null
read() {
return this.#data || null
}
write(obj) {
this.#data = obj
}
}

View File

@ -1,51 +0,0 @@
import { TextFile, TextFileSync } from "./TextFile.js"
export class DataFile {
#adapter
#parse
#stringify
constructor(filename, { parse, stringify }) {
this.#adapter = new TextFile(filename)
this.#parse = parse
this.#stringify = stringify
}
async read() {
const data = await this.#adapter.read()
if (data === null) {
return null
} else {
return this.#parse(data)
}
}
write(obj) {
return this.#adapter.write(this.#stringify(obj))
}
}
export class DataFileSync {
#adapter
#parse
#stringify
constructor(filename, { parse, stringify }) {
this.#adapter = new TextFileSync(filename)
this.#parse = parse
this.#stringify = stringify
}
read() {
const data = this.#adapter.read()
if (data === null) {
return null
} else {
return this.#parse(data)
}
}
write(obj) {
this.#adapter.write(this.#stringify(obj))
}
}

View File

@ -1,19 +0,0 @@
import { DataFile, DataFileSync } from "./DataFile.js";
export class JSONFile extends DataFile {
constructor(filename) {
super(filename, {
parse: JSON.parse,
stringify: (data) => JSON.stringify(data, null, 2),
});
}
}
export class JSONFileSync extends DataFileSync {
constructor(filename) {
super(filename, {
parse: JSON.parse,
stringify: (data) => JSON.stringify(data, null, 2),
});
}
}

View File

@ -1,65 +0,0 @@
import { readFileSync, renameSync, writeFileSync } from "node:fs"
import { readFile } from "node:fs/promises"
import path from "node:path"
import { Writer } from "../../steno"
export class TextFile {
#filename
#writer
constructor(filename) {
this.#filename = filename
this.#writer = new Writer(filename)
}
async read() {
let data
try {
data = await readFile(this.#filename, "utf-8")
} catch (e) {
if (e.code === "ENOENT") {
return null
}
throw e
}
return data
}
write(str) {
return this.#writer.write(str)
}
}
export class TextFileSync {
#tempFilename
#filename
constructor(filename) {
this.#filename = filename
const f = filename.toString()
this.#tempFilename = path.join(path.dirname(f), `.${path.basename(f)}.tmp`)
}
read() {
let data
try {
data = readFileSync(this.#filename, "utf-8")
} catch (e) {
if (e.code === "ENOENT") {
return null
}
throw e
}
return data
}
write(str) {
writeFileSync(this.#tempFilename, str)
renameSync(this.#tempFilename, this.#filename)
}
}

View File

@ -1,48 +0,0 @@
function checkArgs(adapter, defaultData) {
if (adapter === undefined) throw new Error("lowdb: missing adapter")
if (defaultData === undefined) throw new Error("lowdb: missing default data")
}
export class Low {
constructor(adapter, defaultData) {
checkArgs(adapter, defaultData)
this.adapter = adapter
this.data = defaultData
}
async read() {
const data = await this.adapter.read()
if (data) this.data = data
}
async write() {
if (this.data) await this.adapter.write(this.data)
}
async update(fn) {
fn(this.data)
await this.write()
}
}
export class LowSync {
constructor(adapter, defaultData) {
checkArgs(adapter, defaultData)
this.adapter = adapter
this.data = defaultData
}
read() {
const data = this.adapter.read()
if (data) this.data = data
}
write() {
if (this.data) this.adapter.write(this.data)
}
update(fn) {
fn(this.data)
this.write()
}
}

View File

@ -1,23 +0,0 @@
import { Memory, MemorySync } from "../adapters/Memory.js"
import { JSONFile, JSONFileSync } from "../adapters/node/JSONFile.js"
import { Low, LowSync } from "../core/Low.js"
export async function JSONFilePreset(filename, defaultData) {
const adapter = process.env.NODE_ENV === "test" ? new Memory() : new JSONFile(filename)
const db = new Low(adapter, defaultData)
await db.read()
return db
}
export function JSONFileSyncPreset(filename, defaultData) {
const adapter = process.env.NODE_ENV === "test" ? new MemorySync() : new JSONFileSync(filename)
const db = new LowSync(adapter, defaultData)
db.read()
return db
}

View File

@ -1,98 +0,0 @@
import { rename, writeFile } from "node:fs/promises"
import { basename, dirname, join } from "node:path"
import { fileURLToPath } from "node:url"
// Returns a temporary file
// Example: for /some/file will return /some/.file.tmp
function getTempFilename(file) {
const f = file instanceof URL ? fileURLToPath(file) : file.toString()
return join(dirname(f), `.${basename(f)}.tmp`)
}
// Retries an asynchronous operation with a delay between retries and a maximum retry count
async function retryAsyncOperation(fn, maxRetries, delayMs) {
for (let i = 0; i < maxRetries; i++) {
try {
return await fn()
} catch (error) {
if (i < maxRetries - 1) {
await new Promise(resolve => setTimeout(resolve, delayMs))
} else {
throw error // Rethrow the error if max retries reached
}
}
}
}
export class Writer {
#filename
#tempFilename
#locked = false
#prev = null
#next = null
#nextPromise = null
#nextData = null
// File is locked, add data for later
#add(data) {
// Only keep most recent data
this.#nextData = data
// Create a singleton promise to resolve all next promises once next data is written
this.#nextPromise ||= new Promise((resolve, reject) => {
this.#next = [resolve, reject]
})
// Return a promise that will resolve at the same time as next promise
return new Promise((resolve, reject) => {
this.#nextPromise?.then(resolve).catch(reject)
})
}
// File isn't locked, write data
async #write(data) {
// Lock file
this.#locked = true
try {
// Atomic write
await writeFile(this.#tempFilename, data, "utf-8")
await retryAsyncOperation(
async () => {
await rename(this.#tempFilename, this.#filename)
},
10,
100
)
// Call resolve
this.#prev?.[0]()
} catch (err) {
// Call reject
if (err instanceof Error) {
this.#prev?.[1](err)
}
throw err
} finally {
// Unlock file
this.#locked = false
this.#prev = this.#next
this.#next = this.#nextPromise = null
if (this.#nextData !== null) {
const nextData = this.#nextData
this.#nextData = null
await this.write(nextData)
}
}
}
constructor(filename) {
this.#filename = filename
this.#tempFilename = getTempFilename(filename)
}
async write(data) {
return this.#locked ? this.#add(data) : this.#write(data)
}
}

View File

@ -1,71 +0,0 @@
const copyProperty = (to, from, property, ignoreNonConfigurable) => {
// `Function#length` should reflect the parameters of `to` not `from` since we keep its body.
// `Function#prototype` is non-writable and non-configurable so can never be modified.
if (property === 'length' || property === 'prototype') {
return;
}
// `Function#arguments` and `Function#caller` should not be copied. They were reported to be present in `Reflect.ownKeys` for some devices in React Native (#41), so we explicitly ignore them here.
if (property === 'arguments' || property === 'caller') {
return;
}
const toDescriptor = Object.getOwnPropertyDescriptor(to, property);
const fromDescriptor = Object.getOwnPropertyDescriptor(from, property);
if (!canCopyProperty(toDescriptor, fromDescriptor) && ignoreNonConfigurable) {
return;
}
Object.defineProperty(to, property, fromDescriptor);
};
// `Object.defineProperty()` throws if the property exists, is not configurable and either:
// - one its descriptors is changed
// - it is non-writable and its value is changed
const canCopyProperty = function (toDescriptor, fromDescriptor) {
return toDescriptor === undefined || toDescriptor.configurable || (
toDescriptor.writable === fromDescriptor.writable
&& toDescriptor.enumerable === fromDescriptor.enumerable
&& toDescriptor.configurable === fromDescriptor.configurable
&& (toDescriptor.writable || toDescriptor.value === fromDescriptor.value)
);
};
const changePrototype = (to, from) => {
const fromPrototype = Object.getPrototypeOf(from);
if (fromPrototype === Object.getPrototypeOf(to)) {
return;
}
Object.setPrototypeOf(to, fromPrototype);
};
const wrappedToString = (withName, fromBody) => `/* Wrapped ${withName}*/\n${fromBody}`;
const toStringDescriptor = Object.getOwnPropertyDescriptor(Function.prototype, 'toString');
const toStringName = Object.getOwnPropertyDescriptor(Function.prototype.toString, 'name');
// We call `from.toString()` early (not lazily) to ensure `from` can be garbage collected.
// We use `bind()` instead of a closure for the same reason.
// Calling `from.toString()` early also allows caching it in case `to.toString()` is called several times.
const changeToString = (to, from, name) => {
const withName = name === '' ? '' : `with ${name.trim()}() `;
const newToString = wrappedToString.bind(null, withName, from.toString());
// Ensure `to.toString.toString` is non-enumerable and has the same `same`
Object.defineProperty(newToString, 'name', toStringName);
Object.defineProperty(to, 'toString', { ...toStringDescriptor, value: newToString });
};
export default function mimicFunction(to, from, { ignoreNonConfigurable = false } = {}) {
const { name } = to;
for (const property of Reflect.ownKeys(from)) {
copyProperty(to, from, property, ignoreNonConfigurable);
}
changePrototype(to, from);
changeToString(to, from, name);
return to;
}

View File

@ -1,84 +0,0 @@
export interface RunPathOptions {
/**
Working directory.
@default process.cwd()
*/
readonly cwd?: string | URL;
/**
PATH to be appended. Default: [`PATH`](https://github.com/sindresorhus/path-key).
Set it to an empty string to exclude the default PATH.
*/
readonly path?: string;
/**
Path to the Node.js executable to use in child processes if that is different from the current one. Its directory is pushed to the front of PATH.
This can be either an absolute path or a path relative to the `cwd` option.
@default process.execPath
*/
readonly execPath?: string | URL;
}
export type ProcessEnv = Record<string, string | undefined>;
export interface EnvOptions {
/**
The working directory.
@default process.cwd()
*/
readonly cwd?: string | URL;
/**
Accepts an object of environment variables, like `process.env`, and modifies the PATH using the correct [PATH key](https://github.com/sindresorhus/path-key). Use this if you're modifying the PATH for use in the `child_process` options.
*/
readonly env?: ProcessEnv;
/**
The path to the current Node.js executable. Its directory is pushed to the front of PATH.
This can be either an absolute path or a path relative to the `cwd` option.
@default process.execPath
*/
readonly execPath?: string | URL;
}
/**
Get your [PATH](https://en.wikipedia.org/wiki/PATH_(variable)) prepended with locally installed binaries.
@returns The augmented path string.
@example
```
import childProcess from 'node:child_process';
import {npmRunPath} from 'npm-run-path';
console.log(process.env.PATH);
//=> '/usr/local/bin'
console.log(npmRunPath());
//=> '/Users/sindresorhus/dev/foo/node_modules/.bin:/Users/sindresorhus/dev/node_modules/.bin:/Users/sindresorhus/node_modules/.bin:/Users/node_modules/.bin:/node_modules/.bin:/usr/local/bin'
```
*/
export function npmRunPath(options?: RunPathOptions): string;
/**
@returns The augmented [`process.env`](https://nodejs.org/api/process.html#process_process_env) object.
@example
```
import childProcess from 'node:child_process';
import {npmRunPathEnv} from 'npm-run-path';
// `foo` is a locally installed binary
childProcess.execFileSync('foo', {
env: npmRunPathEnv()
});
```
*/
export function npmRunPathEnv(options?: EnvOptions): ProcessEnv;

View File

@ -1,51 +0,0 @@
import process from 'node:process';
import path from 'node:path';
import url from 'node:url';
function pathKey(options = {}) {
const {
env = process.env,
platform = process.platform
} = options;
if (platform !== 'win32') {
return 'PATH';
}
return Object.keys(env).reverse().find(key => key.toUpperCase() === 'PATH') || 'Path';
}
export function npmRunPath(options = {}) {
const {
cwd = process.cwd(),
path: path_ = process.env[pathKey()],
execPath = process.execPath,
} = options;
let previous;
const execPathString = execPath instanceof URL ? url.fileURLToPath(execPath) : execPath;
const cwdString = cwd instanceof URL ? url.fileURLToPath(cwd) : cwd;
let cwdPath = path.resolve(cwdString);
const result = [];
while (previous !== cwdPath) {
result.push(path.join(cwdPath, 'node_modules/.bin'));
previous = cwdPath;
cwdPath = path.resolve(cwdPath, '..');
}
// Ensure the running `node` binary is used.
result.push(path.resolve(cwdString, execPathString, '..'));
return [...result, path_].join(path.delimiter);
}
export function npmRunPathEnv({ env = process.env, ...options } = {}) {
env = { ...env };
const path = pathKey({ env });
options.path = env[path];
env[path] = npmRunPath(options);
return env;
}

View File

@ -1,59 +0,0 @@
export type Options = {
/**
Throw an error when called more than once.
@default false
*/
readonly throw?: boolean;
};
declare const onetime: {
/**
Ensure a function is only called once. When called multiple times it will return the return value from the first call.
@param fn - The function that should only be called once.
@returns A function that only calls `fn` once.
@example
```
import onetime from 'onetime';
let index = 0;
const foo = onetime(() => ++index);
foo(); //=> 1
foo(); //=> 1
foo(); //=> 1
onetime.callCount(foo); //=> 3
```
*/
<ArgumentsType extends unknown[], ReturnType>(
fn: (...arguments_: ArgumentsType) => ReturnType,
options?: Options
): (...arguments_: ArgumentsType) => ReturnType;
/**
Get the number of times `fn` has been called.
@param fn - The function to get call count from.
@returns A number representing how many times `fn` has been called.
@example
```
import onetime from 'onetime';
const foo = onetime(() => {});
foo();
foo();
foo();
console.log(onetime.callCount(foo));
//=> 3
```
*/
callCount(fn: (...arguments_: any[]) => unknown): number;
};
export default onetime;

View File

@ -1,41 +0,0 @@
import mimicFunction from '../mimic-function';
const calledFunctions = new WeakMap();
const onetime = (function_, options = {}) => {
if (typeof function_ !== 'function') {
throw new TypeError('Expected a function');
}
let returnValue;
let callCount = 0;
const functionName = function_.displayName || function_.name || '<anonymous>';
const onetime = function (...arguments_) {
calledFunctions.set(onetime, ++callCount);
if (callCount === 1) {
returnValue = function_.apply(this, arguments_);
function_ = undefined;
} else if (options.throw === true) {
throw new Error(`Function \`${functionName}\` can only be called once`);
}
return returnValue;
};
mimicFunction(onetime, function_);
calledFunctions.set(onetime, callCount);
return onetime;
};
onetime.callCount = function_ => {
if (!calledFunctions.has(function_)) {
throw new Error(`The given function \`${function_.name}\` is not wrapped by the \`onetime\` package`);
}
return calledFunctions.get(function_);
};
export default onetime;

View File

@ -1,18 +0,0 @@
/**
Strip the final [newline character](https://en.wikipedia.org/wiki/Newline) from a string or Uint8Array.
@returns The input without any final newline.
@example
```
import stripFinalNewline from 'strip-final-newline';
stripFinalNewline('foo\nbar\n\n');
//=> 'foo\nbar\n'
const uint8Array = new TextEncoder().encode('foo\nbar\n\n')
new TextDecoder().decode(stripFinalNewline(uint8Array));
//=> 'foo\nbar\n'
```
*/
export default function stripFinalNewline<T extends string | Uint8Array>(input: T): T;

View File

@ -1,26 +0,0 @@
export default function stripFinalNewline(input) {
if (typeof input === 'string') {
return stripFinalNewlineString(input);
}
if (!(ArrayBuffer.isView(input) && input.BYTES_PER_ELEMENT === 1)) {
throw new Error('Input must be a string or a Uint8Array');
}
return stripFinalNewlineBinary(input);
}
const stripFinalNewlineString = input =>
input.at(-1) === LF
? input.slice(0, input.at(-2) === CR ? -2 : -1)
: input;
const stripFinalNewlineBinary = input =>
input.at(-1) === LF_BINARY
? input.subarray(0, input.at(-2) === CR_BINARY ? -2 : -1)
: input;
const LF = '\n';
const LF_BINARY = LF.codePointAt(0);
const CR = '\r';
const CR_BINARY = CR.codePointAt(0);

View File

@ -1,40 +0,0 @@
import winston from "winston"
import colors from "cli-color"
const servicesToColor = {
"CORE": {
color: "whiteBright",
background: "bgBlackBright",
},
"INSTALL": {
color: "whiteBright",
background: "bgBlueBright",
},
}
const paintText = (level, service, ...args) => {
let { color, background } = servicesToColor[service ?? "CORE"] ?? servicesToColor["CORE"]
if (level === "error") {
color = "whiteBright"
background = "bgRedBright"
}
return colors[background][color](...args)
}
const format = winston.format.printf(({ timestamp, service = "CORE", level, message, }) => {
return `${paintText(level, service, `(${level}) [${service}]`)} > ${message}`
})
export default winston.createLogger({
format: winston.format.combine(
winston.format.timestamp(),
format
),
transports: [
new winston.transports.Console(),
//new winston.transports.File({ filename: "error.log", level: "error" }),
//new winston.transports.File({ filename: "combined.log" }),
],
})

View File

@ -1,23 +0,0 @@
import PublicInternalLibraries from "./libs"
const isAClass = (x) => x && typeof x === "function" && x.prototype && typeof x.prototype.constructor === "function"
export default async (dependencies, bindCtx) => {
const libraries = {}
for await (const lib of dependencies) {
if (PublicInternalLibraries[lib]) {
if (typeof PublicInternalLibraries[lib] === "function" && isAClass(PublicInternalLibraries[lib])) {
libraries[lib] = new PublicInternalLibraries[lib](bindCtx)
if (libraries[lib].initialize) {
await libraries[lib].initialize()
}
} else {
libraries[lib] = PublicInternalLibraries[lib]
}
}
}
return libraries
}

View File

@ -1,54 +0,0 @@
import open from "open"
import axios from "axios"
import ManifestAuthDB from "../../../classes/ManifestAuthDB"
export default class Auth {
constructor(ctx) {
this.manifest = ctx.manifest
}
async get() {
const storagedData = await ManifestAuthDB.get(this.manifest.id)
if (storagedData && this.manifest.authService) {
if (!this.manifest.authService.getter) {
return storagedData
}
const result = await axios({
method: "POST",
url: this.manifest.authService.getter,
headers: {
"Content-Type": "application/json",
},
data: {
auth_data: storagedData,
}
}).catch((err) => {
global._relic_eventBus.emit("auth:getter:error", err)
return err
})
if (result instanceof Error) {
throw result
}
console.log(result.data)
return result.data
}
return storagedData
}
request() {
if (!this.manifest.authService || !this.manifest.authService.fetcher) {
return false
}
const authURL = this.manifest.authService.fetcher
open(authURL)
}
}

View File

@ -1,39 +0,0 @@
import fs from "node:fs"
import path from "node:path"
// Protect from reading or write operations outside of the package directory
export default class SecureFileSystem {
constructor(ctx) {
this.jailPath = ctx.manifest.install_path
}
checkOutsideJail(target) {
// if (!path.resolve(target).startsWith(this.jailPath)) {
// throw new Error("Cannot access resource outside of package directory")
// }
}
readFileSync(destination, options) {
this.checkOutsideJail(destination)
return fs.readFileSync(finalPath, options)
}
copyFileSync(from, to) {
this.checkOutsideJail(from)
this.checkOutsideJail(to)
return fs.copyFileSync(from, to)
}
writeFileSync(destination, data, options) {
this.checkOutsideJail(destination)
return fs.writeFileSync(finalPath, data, options)
}
// don't need to check finalPath
existsSync(...args) {
return fs.existsSync(...args)
}
}

View File

@ -1,15 +0,0 @@
import Open from "./open"
import Path from "./path"
import Fs from "./fs"
import Auth from "./auth"
// Third party libraries
import Mcl from "./mcl"
export default {
fs: Fs,
path: Path,
open: Open,
auth: Auth,
mcl: Mcl
}

View File

@ -1,167 +0,0 @@
const request = require('request')
const { v3 } = require('uuid')
let uuid
let api_url = 'https://authserver.mojang.com'
function parsePropts(array) {
if (array) {
const newObj = {}
for (const entry of array) {
if (newObj[entry.name]) {
newObj[entry.name].push(entry.value)
} else {
newObj[entry.name] = [entry.value]
}
}
return JSON.stringify(newObj)
} else {
return '{}'
}
}
function getUUID(value) {
if (!uuid) {
uuid = v3(value, v3.DNS)
}
return uuid
}
const Authenticator = {
getAuth: (username, password, client_token = null) => {
return new Promise((resolve, reject) => {
getUUID(username)
if (!password) {
const user = {
access_token: uuid,
client_token: client_token || uuid,
uuid,
name: username,
user_properties: '{}'
}
return resolve(user)
}
const requestObject = {
url: api_url + '/authenticate',
json: {
agent: {
name: 'Minecraft',
version: 1
},
username,
password,
clientToken: uuid,
requestUser: true
}
}
request.post(requestObject, function (error, response, body) {
if (error) return reject(error)
if (!body || !body.selectedProfile) {
return reject(new Error('Validation error: ' + response.statusMessage))
}
const userProfile = {
access_token: body.accessToken,
client_token: body.clientToken,
uuid: body.selectedProfile.id,
name: body.selectedProfile.name,
selected_profile: body.selectedProfile,
user_properties: parsePropts(body.user.properties)
}
resolve(userProfile)
})
})
},
validate: (accessToken, clientToken) => {
return new Promise((resolve, reject) => {
const requestObject = {
url: api_url + '/validate',
json: {
accessToken,
clientToken
}
}
request.post(requestObject, async function (error, response, body) {
if (error) return reject(error)
if (!body) resolve(true)
else reject(body)
})
})
},
refreshAuth: (accessToken, clientToken) => {
return new Promise((resolve, reject) => {
const requestObject = {
url: api_url + '/refresh',
json: {
accessToken,
clientToken,
requestUser: true
}
}
request.post(requestObject, function (error, response, body) {
if (error) return reject(error)
if (!body || !body.selectedProfile) {
return reject(new Error('Validation error: ' + response.statusMessage))
}
const userProfile = {
access_token: body.accessToken,
client_token: getUUID(body.selectedProfile.name),
uuid: body.selectedProfile.id,
name: body.selectedProfile.name,
user_properties: parsePropts(body.user.properties)
}
return resolve(userProfile)
})
})
},
invalidate: (accessToken, clientToken) => {
return new Promise((resolve, reject) => {
const requestObject = {
url: api_url + '/invalidate',
json: {
accessToken,
clientToken
}
}
request.post(requestObject, function (error, response, body) {
if (error) return reject(error)
if (!body) return resolve(true)
else return reject(body)
})
})
},
signOut: (username, password) => {
return new Promise((resolve, reject) => {
const requestObject = {
url: api_url + '/signout',
json: {
username,
password
}
}
request.post(requestObject, function (error, response, body) {
if (error) return reject(error)
if (!body) return resolve(true)
else return reject(body)
})
})
},
changeApiUrl: (url) => {
api_url = url
}
}
export default Authenticator

View File

@ -1,783 +0,0 @@
const fs = require('fs')
const path = require('path')
const request = require('request')
const checksum = require('checksum')
const Zip = require('adm-zip')
const child = require('child_process')
let counter = 0
export default class Handler {
constructor (client) {
this.client = client
this.options = client.options
this.baseRequest = request.defaults({
pool: { maxSockets: this.options.overrides.maxSockets || 2 },
timeout: this.options.timeout || 10000
})
}
checkJava (java) {
return new Promise(resolve => {
child.exec(`"${java}" -version`, (error, stdout, stderr) => {
if (error) {
resolve({
run: false,
message: error
})
} else {
this.client.emit('debug', `[MCLC]: Using Java version ${stderr.match(/"(.*?)"/).pop()} ${stderr.includes('64-Bit') ? '64-bit' : '32-Bit'}`)
resolve({
run: true
})
}
})
})
}
downloadAsync (url, directory, name, retry, type) {
return new Promise(resolve => {
fs.mkdirSync(directory, { recursive: true })
const _request = this.baseRequest(url)
let receivedBytes = 0
let totalBytes = 0
_request.on('response', (data) => {
if (data.statusCode === 404) {
this.client.emit('debug', `[MCLC]: Failed to download ${url} due to: File not found...`)
return resolve(false)
}
totalBytes = parseInt(data.headers['content-length'])
})
_request.on('error', async (error) => {
this.client.emit('debug', `[MCLC]: Failed to download asset to ${path.join(directory, name)} due to\n${error}.` +
` Retrying... ${retry}`)
if (retry) await this.downloadAsync(url, directory, name, false, type)
resolve()
})
_request.on('data', (data) => {
receivedBytes += data.length
this.client.emit('download-status', {
name: name,
type: type,
current: receivedBytes,
total: totalBytes
})
})
const file = fs.createWriteStream(path.join(directory, name))
_request.pipe(file)
file.once('finish', () => {
this.client.emit('download', name)
resolve({
failed: false,
asset: null
})
})
file.on('error', async (e) => {
this.client.emit('debug', `[MCLC]: Failed to download asset to ${path.join(directory, name)} due to\n${e}.` +
` Retrying... ${retry}`)
if (fs.existsSync(path.join(directory, name))) fs.unlinkSync(path.join(directory, name))
if (retry) await this.downloadAsync(url, directory, name, false, type)
resolve()
})
})
}
checkSum (hash, file) {
return new Promise((resolve, reject) => {
checksum.file(file, (err, sum) => {
if (err) {
this.client.emit('debug', `[MCLC]: Failed to check file hash due to ${err}`)
resolve(false)
} else {
resolve(hash === sum)
}
})
})
}
getVersion () {
return new Promise(resolve => {
const versionJsonPath = this.options.overrides.versionJson || path.join(this.options.directory, `${this.options.version.number}.json`)
if (fs.existsSync(versionJsonPath)) {
this.version = JSON.parse(fs.readFileSync(versionJsonPath))
return resolve(this.version)
}
const manifest = `${this.options.overrides.url.meta}/mc/game/version_manifest.json`
const cache = this.options.cache ? `${this.options.cache}/json` : `${this.options.root}/cache/json`
request.get(manifest, (error, response, body) => {
if (error && error.code !== 'ENOTFOUND') {
return resolve(error)
}
if (!error) {
if (!fs.existsSync(cache)) {
fs.mkdirSync(cache, { recursive: true })
this.client.emit('debug', '[MCLC]: Cache directory created.')
}
fs.writeFile(path.join(`${cache}/version_manifest.json`), body, (err) => {
if (err) {
return resolve(err)
}
this.client.emit('debug', '[MCLC]: Cached version_manifest.json (from request)')
})
}
let parsed = null
if (error && (error.code === 'ENOTFOUND')) {
parsed = JSON.parse(fs.readFileSync(`${cache}/version_manifest.json`))
} else {
parsed = JSON.parse(body)
}
const versionManifest = parsed.versions.find((version) => {
return version.id === this.options.version.number
})
if (!versionManifest) {
return resolve(new Error(`Version not found`))
}
request.get(versionManifest.url, (error, response, body) => {
if (error && error.code !== 'ENOTFOUND') {
return resolve(error)
}
if (!error) {
fs.writeFile(path.join(`${cache}/${this.options.version.number}.json`), body, (err) => {
if (err) {
return resolve(err)
}
this.client.emit('debug', `[MCLC]: Cached ${this.options.version.number}.json`)
})
}
this.client.emit('debug', '[MCLC]: Parsed version from version manifest')
if (error && (error.code === 'ENOTFOUND')) {
this.version = JSON.parse(fs.readFileSync(`${cache}/${this.options.version.number}.json`))
} else {
this.version = JSON.parse(body)
}
this.client.emit('debug', this.version)
return resolve(this.version)
})
})
})
}
async getJar () {
await this.downloadAsync(this.version.downloads.client.url, this.options.directory, `${this.options.version.custom ? this.options.version.custom : this.options.version.number}.jar`, true, 'version-jar')
fs.writeFileSync(path.join(this.options.directory, `${this.options.version.number}.json`), JSON.stringify(this.version, null, 4))
return this.client.emit('debug', '[MCLC]: Downloaded version jar and wrote version json')
}
async getAssets () {
const assetDirectory = path.resolve(this.options.overrides.assetRoot || path.join(this.options.root, 'assets'))
const assetId = this.options.version.custom || this.options.version.number
if (!fs.existsSync(path.join(assetDirectory, 'indexes', `${assetId}.json`))) {
await this.downloadAsync(this.version.assetIndex.url, path.join(assetDirectory, 'indexes'),
`${assetId}.json`, true, 'asset-json')
}
const index = JSON.parse(fs.readFileSync(path.join(assetDirectory, 'indexes', `${assetId}.json`), { encoding: 'utf8' }))
this.client.emit('progress', {
type: 'assets',
task: 0,
total: Object.keys(index.objects).length
})
await Promise.all(Object.keys(index.objects).map(async asset => {
const hash = index.objects[asset].hash
const subhash = hash.substring(0, 2)
const subAsset = path.join(assetDirectory, 'objects', subhash)
if (!fs.existsSync(path.join(subAsset, hash)) || !await this.checkSum(hash, path.join(subAsset, hash))) {
await this.downloadAsync(`${this.options.overrides.url.resource}/${subhash}/${hash}`, subAsset, hash,
true, 'assets')
}
counter++
this.client.emit('progress', {
type: 'assets',
task: counter,
total: Object.keys(index.objects).length
})
}))
counter = 0
// Copy assets to legacy if it's an older Minecraft version.
if (this.isLegacy()) {
if (fs.existsSync(path.join(assetDirectory, 'legacy'))) {
this.client.emit('debug', '[MCLC]: The \'legacy\' directory is no longer used as Minecraft looks ' +
'for the resouces folder regardless of what is passed in the assetDirecotry launch option. I\'d ' +
`recommend removing the directory (${path.join(assetDirectory, 'legacy')})`)
}
const legacyDirectory = path.join(this.options.root, 'resources')
this.client.emit('debug', `[MCLC]: Copying assets over to ${legacyDirectory}`)
this.client.emit('progress', {
type: 'assets-copy',
task: 0,
total: Object.keys(index.objects).length
})
await Promise.all(Object.keys(index.objects).map(async asset => {
const hash = index.objects[asset].hash
const subhash = hash.substring(0, 2)
const subAsset = path.join(assetDirectory, 'objects', subhash)
const legacyAsset = asset.split('/')
legacyAsset.pop()
if (!fs.existsSync(path.join(legacyDirectory, legacyAsset.join('/')))) {
fs.mkdirSync(path.join(legacyDirectory, legacyAsset.join('/')), { recursive: true })
}
if (!fs.existsSync(path.join(legacyDirectory, asset))) {
fs.copyFileSync(path.join(subAsset, hash), path.join(legacyDirectory, asset))
}
counter++
this.client.emit('progress', {
type: 'assets-copy',
task: counter,
total: Object.keys(index.objects).length
})
}))
}
counter = 0
this.client.emit('debug', '[MCLC]: Downloaded assets')
}
parseRule (lib) {
if (lib.rules) {
if (lib.rules.length > 1) {
if (lib.rules[0].action === 'allow' &&
lib.rules[1].action === 'disallow' &&
lib.rules[1].os.name === 'osx') {
return this.getOS() === 'osx'
} else {
return true
}
} else {
if (lib.rules[0].action === 'allow' && lib.rules[0].os) return lib.rules[0].os.name !== this.getOS()
}
} else {
return false
}
}
async getNatives () {
const nativeDirectory = path.resolve(this.options.overrides.natives || path.join(this.options.root, 'natives', this.version.id))
if (parseInt(this.version.id.split('.')[1]) >= 19) return this.options.overrides.cwd || this.options.root
if (!fs.existsSync(nativeDirectory) || !fs.readdirSync(nativeDirectory).length) {
fs.mkdirSync(nativeDirectory, { recursive: true })
const natives = async () => {
const natives = []
await Promise.all(this.version.libraries.map(async (lib) => {
if (!lib.downloads || !lib.downloads.classifiers) return
if (this.parseRule(lib)) return
const native = this.getOS() === 'osx'
? lib.downloads.classifiers['natives-osx'] || lib.downloads.classifiers['natives-macos']
: lib.downloads.classifiers[`natives-${this.getOS()}`]
natives.push(native)
}))
return natives
}
const stat = await natives()
this.client.emit('progress', {
type: 'natives',
task: 0,
total: stat.length
})
await Promise.all(stat.map(async (native) => {
if (!native) return
const name = native.path.split('/').pop()
await this.downloadAsync(native.url, nativeDirectory, name, true, 'natives')
if (!await this.checkSum(native.sha1, path.join(nativeDirectory, name))) {
await this.downloadAsync(native.url, nativeDirectory, name, true, 'natives')
}
try {
new Zip(path.join(nativeDirectory, name)).extractAllTo(nativeDirectory, true)
} catch (e) {
// Only doing a console.warn since a stupid error happens. You can basically ignore this.
// if it says Invalid file name, just means two files were downloaded and both were deleted.
// All is well.
console.warn(e)
}
fs.unlinkSync(path.join(nativeDirectory, name))
counter++
this.client.emit('progress', {
type: 'natives',
task: counter,
total: stat.length
})
}))
this.client.emit('debug', '[MCLC]: Downloaded and extracted natives')
}
counter = 0
this.client.emit('debug', `[MCLC]: Set native path to ${nativeDirectory}`)
return nativeDirectory
}
fwAddArgs () {
const forgeWrapperAgrs = [
`-Dforgewrapper.librariesDir=${path.resolve(this.options.overrides.libraryRoot || path.join(this.options.root, 'libraries'))}`,
`-Dforgewrapper.installer=${this.options.forge}`,
`-Dforgewrapper.minecraft=${this.options.mcPath}`
]
this.options.customArgs
? this.options.customArgs = this.options.customArgs.concat(forgeWrapperAgrs)
: this.options.customArgs = forgeWrapperAgrs
}
isModernForge (json) {
return json.inheritsFrom && json.inheritsFrom.split('.')[1] >= 12 && !(json.inheritsFrom === '1.12.2' && (json.id.split('.')[json.id.split('.').length - 1]) === '2847')
}
async getForgedWrapped () {
let json = null
let installerJson = null
const versionPath = path.join(this.options.root, 'forge', `${this.version.id}`, 'version.json')
// Since we're building a proper "custom" JSON that will work nativly with MCLC, the version JSON will not
// be re-generated on the next run.
if (fs.existsSync(versionPath)) {
try {
json = JSON.parse(fs.readFileSync(versionPath))
if (!json.forgeWrapperVersion || !(json.forgeWrapperVersion === this.options.overrides.fw.version)) {
this.client.emit('debug', '[MCLC]: Old ForgeWrapper has generated this version JSON, re-generating')
} else {
// If forge is modern, add ForgeWrappers launch arguments and set forge to null so MCLC treats it as a custom json.
if (this.isModernForge(json)) {
this.fwAddArgs()
this.options.forge = null
}
return json
}
} catch (e) {
console.warn(e)
this.client.emit('debug', '[MCLC]: Failed to parse Forge version JSON, re-generating')
}
}
this.client.emit('debug', '[MCLC]: Generating a proper version json, this might take a bit')
const zipFile = new Zip(this.options.forge)
json = zipFile.readAsText('version.json')
if (zipFile.getEntry('install_profile.json')) installerJson = zipFile.readAsText('install_profile.json')
try {
json = JSON.parse(json)
if (installerJson) installerJson = JSON.parse(installerJson)
} catch (e) {
this.client.emit('debug', '[MCLC]: Failed to load json files for ForgeWrapper, using Vanilla instead')
return null
}
// Adding the installer libraries as mavenFiles so MCLC downloads them but doesn't add them to the class paths.
if (installerJson) {
json.mavenFiles
? json.mavenFiles = json.mavenFiles.concat(installerJson.libraries)
: json.mavenFiles = installerJson.libraries
}
// Holder for the specifc jar ending which depends on the specifc forge version.
let jarEnding = 'universal'
// We need to handle modern forge differently than legacy.
if (this.isModernForge(json)) {
// If forge is modern and above 1.12.2, we add ForgeWrapper to the libraries so MCLC includes it in the classpaths.
if (json.inheritsFrom !== '1.12.2') {
this.fwAddArgs()
const fwName = `ForgeWrapper-${this.options.overrides.fw.version}.jar`
const fwPathArr = ['io', 'github', 'zekerzhayard', 'ForgeWrapper', this.options.overrides.fw.version]
json.libraries.push({
name: fwPathArr.join(':'),
downloads: {
artifact: {
path: [...fwPathArr, fwName].join('/'),
url: `${this.options.overrides.fw.baseUrl}${this.options.overrides.fw.version}/${fwName}`,
sha1: this.options.overrides.fw.sh1,
size: this.options.overrides.fw.size
}
}
})
json.mainClass = 'io.github.zekerzhayard.forgewrapper.installer.Main'
jarEnding = 'launcher'
// Providing a download URL to the universal jar mavenFile so it can be downloaded properly.
for (const library of json.mavenFiles) {
const lib = library.name.split(':')
if (lib[0] === 'net.minecraftforge' && lib[1].includes('forge')) {
library.downloads.artifact.url = 'https://files.minecraftforge.net/maven/' + library.downloads.artifact.path
break
}
}
} else {
// Remove the forge dependent since we're going to overwrite the first entry anyways.
for (const library in json.mavenFiles) {
const lib = json.mavenFiles[library].name.split(':')
if (lib[0] === 'net.minecraftforge' && lib[1].includes('forge')) {
delete json.mavenFiles[library]
break
}
}
}
} else {
// Modifying legacy library format to play nice with MCLC's downloadToDirectory function.
await Promise.all(json.libraries.map(async library => {
const lib = library.name.split(':')
if (lib[0] === 'net.minecraftforge' && lib[1].includes('forge')) return
let url = this.options.overrides.url.mavenForge
const name = `${lib[1]}-${lib[2]}.jar`
if (!library.url) {
if (library.serverreq || library.clientreq) {
url = this.options.overrides.url.defaultRepoForge
} else {
return
}
}
library.url = url
const downloadLink = `${url}${lib[0].replace(/\./g, '/')}/${lib[1]}/${lib[2]}/${name}`
// Checking if the file still exists on Forge's server, if not, replace it with the fallback.
// Not checking for sucess, only if it 404s.
this.baseRequest(downloadLink, (error, response, body) => {
if (error) {
this.client.emit('debug', `[MCLC]: Failed checking request for ${downloadLink}`)
} else {
if (response.statusCode === 404) library.url = this.options.overrides.url.fallbackMaven
}
})
}))
}
// If a downloads property exists, we modify the inital forge entry to include ${jarEnding} so ForgeWrapper can work properly.
// If it doesn't, we simply remove it since we're already providing the universal jar.
if (json.libraries[0].downloads) {
if (json.libraries[0].name.includes('minecraftforge')) {
json.libraries[0].name = json.libraries[0].name + `:${jarEnding}`
json.libraries[0].downloads.artifact.path = json.libraries[0].downloads.artifact.path.replace('.jar', `-${jarEnding}.jar`)
json.libraries[0].downloads.artifact.url = 'https://files.minecraftforge.net/maven/' + json.libraries[0].downloads.artifact.path
}
} else {
delete json.libraries[0]
}
// Removing duplicates and null types
json.libraries = this.cleanUp(json.libraries)
if (json.mavenFiles) json.mavenFiles = this.cleanUp(json.mavenFiles)
json.forgeWrapperVersion = this.options.overrides.fw.version
// Saving file for next run!
if (!fs.existsSync(path.join(this.options.root, 'forge', this.version.id))) {
fs.mkdirSync(path.join(this.options.root, 'forge', this.version.id), { recursive: true })
}
fs.writeFileSync(versionPath, JSON.stringify(json, null, 4))
// Make MCLC treat modern forge as a custom version json rather then legacy forge.
if (this.isModernForge(json)) this.options.forge = null
return json
}
runInstaller (path) {
return new Promise(resolve => {
const installer = child.exec(path)
installer.on('close', (code) => resolve(code))
})
}
async downloadToDirectory (directory, libraries, eventName) {
const libs = []
await Promise.all(libraries.map(async library => {
if (!library) return
if (this.parseRule(library)) return
const lib = library.name.split(':')
let jarPath
let name
if (library.downloads && library.downloads.artifact && library.downloads.artifact.path) {
name = library.downloads.artifact.path.split('/')[library.downloads.artifact.path.split('/').length - 1]
jarPath = path.join(directory, this.popString(library.downloads.artifact.path))
} else {
name = `${lib[1]}-${lib[2]}${lib[3] ? '-' + lib[3] : ''}.jar`
jarPath = path.join(directory, `${lib[0].replace(/\./g, '/')}/${lib[1]}/${lib[2]}`)
}
const downloadLibrary = async library => {
if (library.url) {
const url = `${library.url}${lib[0].replace(/\./g, '/')}/${lib[1]}/${lib[2]}/${name}`
await this.downloadAsync(url, jarPath, name, true, eventName)
} else if (library.downloads && library.downloads.artifact) {
await this.downloadAsync(library.downloads.artifact.url, jarPath, name, true, eventName)
}
}
if (!fs.existsSync(path.join(jarPath, name))) downloadLibrary(library)
else if (library.downloads && library.downloads.artifact) {
if (!this.checkSum(library.downloads.artifact.sha1, path.join(jarPath, name))) downloadLibrary(library)
}
counter++
this.client.emit('progress', {
type: eventName,
task: counter,
total: libraries.length
})
libs.push(`${jarPath}${path.sep}${name}`)
}))
counter = 0
return libs
}
async getClasses (classJson) {
let libs = []
const libraryDirectory = path.resolve(this.options.overrides.libraryRoot || path.join(this.options.root, 'libraries'))
if (classJson) {
if (classJson.mavenFiles) {
await this.downloadToDirectory(libraryDirectory, classJson.mavenFiles, 'classes-maven-custom')
}
libs = (await this.downloadToDirectory(libraryDirectory, classJson.libraries, 'classes-custom'))
}
const parsed = this.version.libraries.map(lib => {
if (lib.downloads && lib.downloads.artifact && !this.parseRule(lib)) return lib
})
libs = libs.concat((await this.downloadToDirectory(libraryDirectory, parsed, 'classes')))
counter = 0
// Temp Quilt support
if (classJson) libs.sort()
this.client.emit('debug', '[MCLC]: Collected class paths')
return libs
}
popString (path) {
const tempArray = path.split('/')
tempArray.pop()
return tempArray.join('/')
}
cleanUp (array) {
const newArray = []
for (const classPath in array) {
if (newArray.includes(array[classPath]) || array[classPath] === null) continue
newArray.push(array[classPath])
}
return newArray
}
formatQuickPlay () {
const types = {
singleplayer: '--quickPlaySingleplayer',
multiplayer: '--quickPlayMultiplayer',
realms: '--quickPlayRealms',
legacy: null
}
const { type, identifier, path } = this.options.quickPlay
const keys = Object.keys(types)
if (!keys.includes(type)) {
this.client.emit('debug', `[MCLC]: quickPlay type is not valid. Valid types are: ${keys.join(', ')}`)
return null
}
const returnArgs = type === 'legacy'
? ['--server', identifier.split(':')[0], '--port', identifier.split(':')[1] || '25565']
: [types[type], identifier]
if (path) returnArgs.push('--quickPlayPath', path)
return returnArgs
}
async getLaunchOptions (modification) {
const type = Object.assign({}, this.version, modification)
let args = type.minecraftArguments
? type.minecraftArguments.split(' ')
: type.arguments.game
const assetRoot = path.resolve(this.options.overrides.assetRoot || path.join(this.options.root, 'assets'))
const assetPath = this.isLegacy()
? path.join(this.options.root, 'resources')
: path.join(assetRoot)
const minArgs = this.options.overrides.minArgs || this.isLegacy() ? 5 : 11
if (args.length < minArgs) args = args.concat(this.version.minecraftArguments ? this.version.minecraftArguments.split(' ') : this.version.arguments.game)
if (this.options.customLaunchArgs) args = args.concat(this.options.customLaunchArgs)
this.options.authorization = await Promise.resolve(this.options.authorization)
this.options.authorization.meta = this.options.authorization.meta ? this.options.authorization.meta : { type: 'mojang' }
const fields = {
'${auth_access_token}': this.options.authorization.access_token,
'${auth_session}': this.options.authorization.access_token,
'${auth_player_name}': this.options.authorization.name,
'${auth_uuid}': this.options.authorization.uuid,
'${auth_xuid}': this.options.authorization.meta.xuid || this.options.authorization.access_token,
'${user_properties}': this.options.authorization.user_properties,
'${user_type}': this.options.authorization.meta.type,
'${version_name}': this.options.version.number,
'${assets_index_name}': this.options.overrides.assetIndex || this.options.version.custom || this.options.version.number,
'${game_directory}': this.options.overrides.gameDirectory || this.options.root,
'${assets_root}': assetPath,
'${game_assets}': assetPath,
'${version_type}': this.options.version.type,
'${clientid}': this.options.authorization.meta.clientId || (this.options.authorization.client_token || this.options.authorization.access_token),
'${resolution_width}': this.options.window ? this.options.window.width : 856,
'${resolution_height}': this.options.window ? this.options.window.height : 482
}
if (this.options.authorization.meta.demo && (this.options.features ? !this.options.features.includes('is_demo_user') : true)) {
args.push('--demo')
}
const replaceArg = (obj, index) => {
if (Array.isArray(obj.value)) {
for (const arg of obj.value) {
args.push(arg)
}
} else {
args.push(obj.value)
}
delete args[index]
}
for (let index = 0; index < args.length; index++) {
if (typeof args[index] === 'object') {
if (args[index].rules) {
if (!this.options.features) continue
const featureFlags = []
for (const rule of args[index].rules) {
featureFlags.push(...Object.keys(rule.features))
}
let hasAllRules = true
for (const feature of this.options.features) {
if (!featureFlags.includes(feature)) {
hasAllRules = false
}
}
if (hasAllRules) replaceArg(args[index], index)
} else {
replaceArg(args[index], index)
}
} else {
if (Object.keys(fields).includes(args[index])) {
args[index] = fields[args[index]]
}
}
}
if (this.options.window) {
// eslint-disable-next-line no-unused-expressions
this.options.window.fullscreen
? args.push('--fullscreen')
: () => {
if (this.options.features ? !this.options.features.includes('has_custom_resolution') : true) {
args.push('--width', this.options.window.width, '--height', this.options.window.height)
}
}
}
if (this.options.server) this.client.emit('debug', '[MCLC]: server and port are deprecated launch flags. Use the quickPlay field.')
if (this.options.quickPlay) args = args.concat(this.formatQuickPlay())
if (this.options.proxy) {
args.push(
'--proxyHost',
this.options.proxy.host,
'--proxyPort',
this.options.proxy.port || '8080',
'--proxyUser',
this.options.proxy.username,
'--proxyPass',
this.options.proxy.password
)
}
args = args.filter(value => typeof value === 'string' || typeof value === 'number')
this.client.emit('debug', '[MCLC]: Set launch options')
return args
}
async getJVM () {
const opts = {
windows: '-XX:HeapDumpPath=MojangTricksIntelDriversForPerformance_javaw.exe_minecraft.exe.heapdump',
osx: '-XstartOnFirstThread',
linux: '-Xss1M'
}
return opts[this.getOS()]
}
isLegacy () {
return this.version.assets === 'legacy' || this.version.assets === 'pre-1.6'
}
getOS () {
if (this.options.os) {
return this.options.os
} else {
switch (process.platform) {
case 'win32': return 'windows'
case 'darwin': return 'osx'
default: return 'linux'
}
}
}
// To prevent launchers from breaking when they update. Will be reworked with rewrite.
getMemory () {
if (!this.options.memory) {
this.client.emit('debug', '[MCLC]: Memory not set! Setting 1GB as MAX!')
this.options.memory = {
min: 512,
max: 1023
}
}
if (!isNaN(this.options.memory.max) && !isNaN(this.options.memory.min)) {
if (this.options.memory.max < this.options.memory.min) {
this.client.emit('debug', '[MCLC]: MIN memory is higher then MAX! Resetting!')
this.options.memory.max = 1023
this.options.memory.min = 512
}
return [`${this.options.memory.max}M`, `${this.options.memory.min}M`]
} else { return [`${this.options.memory.max}`, `${this.options.memory.min}`] }
}
async extractPackage (options = this.options) {
if (options.clientPackage.startsWith('http')) {
await this.downloadAsync(options.clientPackage, options.root, 'clientPackage.zip', true, 'client-package')
options.clientPackage = path.join(options.root, 'clientPackage.zip')
}
new Zip(options.clientPackage).extractAllTo(options.root, true)
if (options.removePackage) fs.unlinkSync(options.clientPackage)
return this.client.emit('package-extract', true)
}
}

View File

@ -1,49 +0,0 @@
import Logger from "../../../logger"
import Client from "./launcher"
import Authenticator from "./authenticator"
const Log = Logger.child({ service: "MCL" })
export default class MCL {
/**
* Asynchronously authenticate the user using the provided username and password.
*
* @param {string} username - the username of the user
* @param {string} password - the password of the user
* @return {Promise<Object>} the authentication information
*/
async auth(username, password) {
return await Authenticator.getAuth(username, password)
}
/**
* Launches a new client with the given options.
*
* @param {Object} opts - The options to be passed for launching the client.
* @return {Promise<Client>} A promise that resolves with the launched client.
*/
async launch(opts, callbacks) {
const launcher = new Client()
launcher.on("debug", (e) => console.log(e))
launcher.on("data", (e) => console.log(e))
launcher.on("close", (e) => console.log(e))
launcher.on("error", (e) => console.log(e))
if (typeof callbacks === "undefined") {
callbacks = {
install: () => {
Log.info("Downloading Minecraft assets...")
},
init_assets: () => {
Log.info("Initializing Minecraft assets...")
}
}
}
await launcher.launch(opts, callbacks)
return launcher
}
}

View File

@ -1,224 +0,0 @@
import fs from "node:fs"
import path from "node:path"
import { EventEmitter } from "events"
import child from "child_process"
import Handler from "./handler"
export default class MCLCore extends EventEmitter {
async launch(options, callbacks = {}) {
try {
this.options = { ...options }
this.options.root = path.resolve(this.options.root)
this.options.overrides = {
detached: true,
...this.options.overrides,
url: {
meta: 'https://launchermeta.mojang.com',
resource: 'https://resources.download.minecraft.net',
mavenForge: 'http://files.minecraftforge.net/maven/',
defaultRepoForge: 'https://libraries.minecraft.net/',
fallbackMaven: 'https://search.maven.org/remotecontent?filepath=',
...this.options.overrides
? this.options.overrides.url
: undefined
},
fw: {
baseUrl: 'https://github.com/ZekerZhayard/ForgeWrapper/releases/download/',
version: '1.5.6',
sh1: 'b38d28e8b7fde13b1bc0db946a2da6760fecf98d',
size: 34715,
...this.options.overrides
? this.options.overrides.fw
: undefined
}
}
this.handler = new Handler(this)
this.printVersion()
const java = await this.handler.checkJava(this.options.javaPath || 'java')
if (!java.run) {
this.emit('debug', `[MCLC]: Couldn't start Minecraft due to: ${java.message}`)
this.emit('close', 1)
return null
}
this.createRootDirectory()
this.createGameDirectory()
await this.extractPackage()
if (this.options.installer) {
// So installers that create a profile in launcher_profiles.json can run without breaking.
const profilePath = path.join(this.options.root, 'launcher_profiles.json')
if (!fs.existsSync(profilePath) || !JSON.parse(fs.readFileSync(profilePath)).profiles) {
fs.writeFileSync(profilePath, JSON.stringify({ profiles: {} }, null, 4))
}
const code = await this.handler.runInstaller(this.options.installer)
if (!this.options.version.custom && code === 0) {
this.emit('debug', '[MCLC]: Installer successfully ran, but no custom version was provided')
}
this.emit('debug', `[MCLC]: Installer closed with code ${code}`)
}
const directory = this.options.overrides.directory || path.join(this.options.root, 'versions', this.options.version.custom ? this.options.version.custom : this.options.version.number)
this.options.directory = directory
const versionFile = await this.handler.getVersion()
const mcPath = this.options.overrides.minecraftJar || (this.options.version.custom
? path.join(this.options.root, 'versions', this.options.version.custom, `${this.options.version.custom}.jar`)
: path.join(directory, `${this.options.version.number}.jar`))
this.options.mcPath = mcPath
const nativePath = await this.handler.getNatives()
if (!fs.existsSync(mcPath)) {
this.emit('debug', '[MCLC]: Attempting to download Minecraft version jar')
if (typeof callbacks.install === "function") {
callbacks.install()
}
await this.handler.getJar()
}
const modifyJson = await this.getModifyJson()
const args = []
let jvm = [
'-XX:-UseAdaptiveSizePolicy',
'-XX:-OmitStackTraceInFastThrow',
'-Dfml.ignorePatchDiscrepancies=true',
'-Dfml.ignoreInvalidMinecraftCertificates=true',
`-Djava.library.path=${nativePath}`,
`-Xmx${this.handler.getMemory()[0]}`,
`-Xms${this.handler.getMemory()[1]}`
]
if (this.handler.getOS() === 'osx') {
if (parseInt(versionFile.id.split('.')[1]) > 12) jvm.push(await this.handler.getJVM())
} else jvm.push(await this.handler.getJVM())
if (this.options.customArgs) jvm = jvm.concat(this.options.customArgs)
if (this.options.overrides.logj4ConfigurationFile) {
jvm.push(`-Dlog4j.configurationFile=${path.resolve(this.options.overrides.logj4ConfigurationFile)}`)
}
// https://help.minecraft.net/hc/en-us/articles/4416199399693-Security-Vulnerability-in-Minecraft-Java-Edition
if (parseInt(versionFile.id.split('.')[1]) === 18 && !parseInt(versionFile.id.split('.')[2])) jvm.push('-Dlog4j2.formatMsgNoLookups=true')
if (parseInt(versionFile.id.split('.')[1]) === 17) jvm.push('-Dlog4j2.formatMsgNoLookups=true')
if (parseInt(versionFile.id.split('.')[1]) < 17) {
if (!jvm.find(arg => arg.includes('Dlog4j.configurationFile'))) {
const configPath = path.resolve(this.options.overrides.cwd || this.options.root)
const intVersion = parseInt(versionFile.id.split('.')[1])
if (intVersion >= 12) {
await this.handler.downloadAsync('https://launcher.mojang.com/v1/objects/02937d122c86ce73319ef9975b58896fc1b491d1/log4j2_112-116.xml',
configPath, 'log4j2_112-116.xml', true, 'log4j')
jvm.push('-Dlog4j.configurationFile=log4j2_112-116.xml')
} else if (intVersion >= 7) {
await this.handler.downloadAsync('https://launcher.mojang.com/v1/objects/dd2b723346a8dcd48e7f4d245f6bf09e98db9696/log4j2_17-111.xml',
configPath, 'log4j2_17-111.xml', true, 'log4j')
jvm.push('-Dlog4j.configurationFile=log4j2_17-111.xml')
}
}
}
const classes = this.options.overrides.classes || this.handler.cleanUp(await this.handler.getClasses(modifyJson))
const classPaths = ['-cp']
const separator = this.handler.getOS() === 'windows' ? ';' : ':'
this.emit('debug', `[MCLC]: Using ${separator} to separate class paths`)
// Handling launch arguments.
const file = modifyJson || versionFile
// So mods like fabric work.
const jar = fs.existsSync(mcPath)
? `${separator}${mcPath}`
: `${separator}${path.join(directory, `${this.options.version.number}.jar`)}`
classPaths.push(`${this.options.forge ? this.options.forge + separator : ''}${classes.join(separator)}${jar}`)
classPaths.push(file.mainClass)
this.emit('debug', '[MCLC]: Attempting to download assets')
if (typeof callbacks.init_assets === "function") {
callbacks.init_assets()
}
await this.handler.getAssets()
// Forge -> Custom -> Vanilla
const launchOptions = await this.handler.getLaunchOptions(modifyJson)
const launchArguments = args.concat(jvm, classPaths, launchOptions)
this.emit('arguments', launchArguments)
this.emit('debug', `[MCLC]: Launching with arguments ${launchArguments.join(' ')}`)
return this.startMinecraft(launchArguments)
} catch (e) {
this.emit('debug', `[MCLC]: Failed to start due to ${e}, closing...`)
return null
}
}
printVersion() {
if (fs.existsSync(path.join(__dirname, '..', 'package.json'))) {
const { version } = require('../package.json')
this.emit('debug', `[MCLC]: MCLC version ${version}`)
} else { this.emit('debug', '[MCLC]: Package JSON not found, skipping MCLC version check.') }
}
createRootDirectory() {
if (!fs.existsSync(this.options.root)) {
this.emit('debug', '[MCLC]: Attempting to create root folder')
fs.mkdirSync(this.options.root)
}
}
createGameDirectory() {
if (this.options.overrides.gameDirectory) {
this.options.overrides.gameDirectory = path.resolve(this.options.overrides.gameDirectory)
if (!fs.existsSync(this.options.overrides.gameDirectory)) {
fs.mkdirSync(this.options.overrides.gameDirectory, { recursive: true })
}
}
}
async extractPackage() {
if (this.options.clientPackage) {
this.emit('debug', `[MCLC]: Extracting client package to ${this.options.root}`)
await this.handler.extractPackage()
}
}
async getModifyJson() {
let modifyJson = null
if (this.options.forge) {
this.options.forge = path.resolve(this.options.forge)
this.emit('debug', '[MCLC]: Detected Forge in options, getting dependencies')
modifyJson = await this.handler.getForgedWrapped()
} else if (this.options.version.custom) {
this.emit('debug', '[MCLC]: Detected custom in options, setting custom version file')
modifyJson = modifyJson || JSON.parse(fs.readFileSync(path.join(this.options.root, 'versions', this.options.version.custom, `${this.options.version.custom}.json`), { encoding: 'utf8' }))
}
return modifyJson
}
startMinecraft(launchArguments) {
const minecraft = child.spawn(this.options.javaPath ? this.options.javaPath : 'java', launchArguments,
{ cwd: this.options.overrides.cwd || this.options.root, detached: this.options.overrides.detached })
minecraft.stdout.on('data', (data) => this.emit('data', data.toString('utf-8')))
minecraft.stderr.on('data', (data) => this.emit('data', data.toString('utf-8')))
minecraft.on('close', (code) => this.emit('close', code))
return minecraft
}
}

View File

@ -1,15 +0,0 @@
import Logger from "../../../logger"
import open, { apps } from "open"
const Log = Logger.child({ service: "OPEN-LIB" })
export default {
spawn: async (...args) => {
Log.info("Open spawned with args >")
console.log(...args)
return await open(...args)
},
apps: apps,
}

View File

@ -1,3 +0,0 @@
import path from "node:path"
export default path

View File

@ -1,51 +0,0 @@
import fs from "node:fs"
import path from "node:path"
import axios from "axios"
import checksum from "checksum"
import Vars from "../vars"
export async function readManifest(manifest) {
// check if manifest is a directory or a url
const urlRegex = /https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\+.~#?&//=]*)/gi
const target = manifest?.remote_url ?? manifest
if (urlRegex.test(target)) {
if (!fs.existsSync(Vars.cache_path)) {
fs.mkdirSync(Vars.cache_path, { recursive: true })
}
const { data: code } = await axios.get(target)
const manifestChecksum = checksum(code, { algorithm: "md5" })
const cachedManifest = path.join(Vars.cache_path, `${manifestChecksum}.rmanifest`)
await fs.promises.writeFile(cachedManifest, code)
return {
remote_manifest: manifest,
local_manifest: cachedManifest,
is_catched: true,
code: code,
}
} else {
if (!fs.existsSync(target)) {
throw new Error(`Manifest not found: ${target}`)
}
if (!fs.statSync(target).isFile()) {
throw new Error(`Manifest is not a file: ${target}`)
}
return {
remote_manifest: undefined,
local_manifest: target,
is_catched: false,
code: fs.readFileSync(target, "utf8"),
}
}
}
export default readManifest

View File

@ -1,83 +0,0 @@
import Logger from "../logger"
import os from "node:os"
import vm from "node:vm"
import path from "node:path"
import ManifestConfigManager from "../classes/ManifestConfig"
import resolveOs from "../utils/resolveOs"
import FetchLibraries from "./libraries"
import Vars from "../vars"
async function BuildManifest(baseClass, context, { soft = false } = {}) {
// inject install_path
context.install_path = path.resolve(Vars.packages_path, baseClass.id)
baseClass.install_path = context.install_path
if (soft === true) {
return baseClass
}
const configManager = new ManifestConfigManager(baseClass.id)
await configManager.initialize()
let dependencies = []
if (Array.isArray(baseClass.useLib)) {
dependencies = [
...dependencies,
...baseClass.useLib
]
}
// modify context
context.Log = Logger.child({ service: `VM|${baseClass.id}` })
context.Lib = await FetchLibraries(dependencies, {
manifest: baseClass,
install_path: context.install_path,
})
context.Config = configManager
// Construct the instance
const instance = new baseClass()
instance.install_path = context.install_path
return instance
}
function injectUseManifest(code) {
return code + "\n\nuse(Manifest);"
}
export default async (code, { soft = false } = {}) => {
return await new Promise(async (resolve, reject) => {
try {
code = injectUseManifest(code)
const context = {
Vars: Vars,
Log: Logger.child({ service: "MANIFEST_VM" }),
use: (baseClass) => {
return BuildManifest(
baseClass,
context,
{
soft: soft,
}
).then(resolve)
},
os_string: resolveOs(),
arch: os.arch(),
}
vm.createContext(context)
await vm.runInContext(code, context)
} catch (error) {
reject(error)
}
})
}

View File

@ -1,70 +0,0 @@
import resolveRemoteBinPath from "./utils/resolveRemoteBinPath"
import Vars from "./vars"
import path from "node:path"
import axios from "axios"
const baseURL = "https://storage.ragestudio.net/rstudio/binaries"
export default [
{
id: "7z-bin",
finalBin: Vars.sevenzip_bin,
url: resolveRemoteBinPath(`${baseURL}/7zip-bin`, process.platform === "win32" ? "7za.exe" : "7za"),
destination: Vars.sevenzip_bin,
rewriteExecutionPermission: true,
},
{
id: "git-bin",
finalBin: Vars.git_bin,
url: resolveRemoteBinPath(`${baseURL}/git`, "git-bundle-2.4.0.zip"),
destination: path.resolve(Vars.binaries_path, "git-bundle.zip"),
extract: path.resolve(Vars.binaries_path, "git-bin"),
requireOs: ["win32"],
rewriteExecutionPermission: true,
deleteBeforeExtract: true,
},
{
id: "rclone-bin",
finalBin: Vars.rclone_bin,
url: resolveRemoteBinPath(`${baseURL}/rclone`, "rclone-bin.zip"),
destination: path.resolve(Vars.binaries_path, "rclone-bin.zip"),
extract: path.resolve(Vars.binaries_path, "rclone-bin"),
requireOs: ["win32"],
rewriteExecutionPermission: true,
deleteBeforeExtract: true,
},
{
id: "java_jre_bin",
finalBin: Vars.java_jre_bin,
url: async (os, arch) => {
const { data } = await axios({
method: "GET",
url: "https://api.azul.com/metadata/v1/zulu/packages",
params: {
arch: arch,
java_version: "JAVA_22",
os: os,
archive_type: "zip",
javafx_bundled: "false",
java_package_type: "jre",
page_size: "1",
}
})
return data[0].download_url
},
destination: path.resolve(Vars.binaries_path, "java-jre.zip"),
extract: path.resolve(Vars.binaries_path, "java_jre_bin"),
extractTargetFromName: true,
moveDirs: [
{
requireOs: ["macos"],
from: path.resolve(Vars.binaries_path, "java_jre_bin", "zulu-22.jre", "Contents"),
to: path.resolve(Vars.binaries_path, "java_jre_bin", "Contents"),
deleteParentBefore: true
}
],
rewriteExecutionPermission: path.resolve(Vars.binaries_path, "java_jre_bin"),
deleteBeforeExtract: true,
},
]

View File

@ -1,16 +0,0 @@
import fs from "node:fs"
import path from "node:path"
async function chmodRecursive(target, mode) {
if (fs.lstatSync(target).isDirectory()) {
const files = await fs.promises.readdir(target, { withFileTypes: true })
for (const file of files) {
await chmodRecursive(path.join(target, file.name), mode)
}
} else {
await fs.promises.chmod(target, mode)
}
}
export default chmodRecursive

View File

@ -1,48 +0,0 @@
import Logger from "../logger"
import fs from "node:fs"
import path from "node:path"
import { pipeline as streamPipeline } from "node:stream/promises"
import { extractFull } from "node-7z"
import unzipper from "unzipper"
import Vars from "../vars"
const Log = Logger.child({ service: "EXTRACTOR" })
export async function extractFile(file, dest) {
const ext = path.extname(file)
Log.info(`Extracting ${file} to ${dest}`)
switch (ext) {
case ".zip": {
await streamPipeline(
fs.createReadStream(file),
unzipper.Extract({
path: dest,
})
)
break
}
case ".7z": {
await extractFull(file, dest, {
$bin: Vars.sevenzip_bin,
})
break
}
case ".gz": {
await extractFull(file, dest, {
$bin: Vars.sevenzip_bin
})
break
}
default:
throw new Error(`Unsupported file extension: ${ext}`)
}
return dest
}
export default extractFile

View File

@ -1,21 +0,0 @@
export default function parseStringVars(str, pkg) {
if (!pkg) {
return str
}
const vars = {
id: pkg.id,
name: pkg.name,
version: pkg.version,
install_path: pkg.install_path,
remote: pkg.remote,
}
const regex = /%([^%]+)%/g
str = str.replace(regex, (match, varName) => {
return vars[varName]
})
return str
}

View File

@ -1,25 +0,0 @@
import fs from "node:fs"
import path from "node:path"
async function readDirRecurse(dir, maxDepth = 3, current = 0) {
if (current > maxDepth) {
return []
}
const files = await fs.promises.readdir(dir)
const promises = files.map(async (file) => {
const filePath = path.join(dir, file)
const stat = await fs.promises.stat(filePath)
if (stat.isDirectory()) {
return readDirRecurse(filePath, maxDepth, current + 1)
}
return filePath
})
return (await Promise.all(promises)).flat()
}
export default readDirRecurse

View File

@ -1,17 +0,0 @@
import os from "node:os"
export default () => {
if (os.platform() === "win32") {
return "windows"
}
if (os.platform() === "darwin") {
return "macos"
}
if (os.platform() === "linux") {
return "linux"
}
return os.platform()
}

View File

@ -1,15 +0,0 @@
export default (pre, post) => {
let url = null
if (process.platform === "darwin") {
url = `${pre}/mac/${process.arch}/${post}`
}
else if (process.platform === "win32") {
url = `${pre}/win/${process.arch}/${post}`
}
else {
url = `${pre}/linux/${process.arch}/${post}`
}
return url
}

View File

@ -1,35 +0,0 @@
import path from "node:path"
import upath from "upath"
const isWin = process.platform.includes("win")
const isMac = process.platform.includes("darwin")
const runtimeName = "rs-relic"
const userdata_path = upath.normalizeSafe(path.resolve(
process.env.APPDATA ||
(process.platform == "darwin" ? process.env.HOME + "/Library/Preferences" : process.env.HOME + "/.local/share"),
))
const runtime_path = upath.normalizeSafe(path.join(userdata_path, runtimeName))
const cache_path = upath.normalizeSafe(path.join(runtime_path, "cache"))
const packages_path = upath.normalizeSafe(path.join(runtime_path, "packages"))
const binaries_path = upath.normalizeSafe(path.resolve(runtime_path, "binaries"))
const db_path = upath.normalizeSafe(path.resolve(runtime_path, "db.json"))
const binaries = {
sevenzip_bin: upath.normalizeSafe(path.resolve(binaries_path, "7z-bin", isWin ? "7za.exe" : "7za")),
git_bin: upath.normalizeSafe(path.resolve(binaries_path, "git-bin", "bin", isWin ? "git.exe" : "git")),
rclone_bin: upath.normalizeSafe(path.resolve(binaries_path, "rclone-bin", isWin ? "rclone.exe" : "rclone")),
java_jre_bin: upath.normalizeSafe(path.resolve(binaries_path, "java_jre_bin", (isMac ? "Contents/Home/bin/java" : (isWin ? "bin/java.exe" : "bin/java")))),
}
export default {
runtimeName,
db_path,
userdata_path,
runtime_path,
cache_path,
packages_path,
binaries_path,
...binaries,
}

View File

@ -1,41 +0,0 @@
# Secrets
/**/**/.env
/**/**/origin.server
/**/**/server.manifest
/**/**/server.registry
/**/**/*.secret.*
/**/**/_shared
# Trash
/**/**/*.log
/**/**/dumps.log
/**/**/.crash.log
/**/**/.tmp
/**/**/.cache
/**/**/cache
/**/**/out
/**/**/.out
/**/**/dist
/**/**/node_modules
/**/**/corenode_modules
/**/**/.DS_Store
/**/**/package-lock.json
/**/**/yarn.lock
/**/**/.evite
/**/**/build
/**/**/uploads
/**/**/d_data
/**/**/*.tar
/**/**/*.7z
/**/**/*.zip
/**/**/*.env
# Logs
/**/**/npm-debug.log*
/**/**/yarn-error.log
/**/**/dumps.log
/**/**/corenode.log
# Temporal configurations
/**/**/.aliaser

View File

@ -1,54 +0,0 @@
{
"name": "@ragestudio/relic-gui",
"version": "0.17.0",
"description": "RageStudio Relic, yet another package manager.",
"main": "./out/main/index.js",
"author": "RageStudio",
"license": "MIT",
"scripts": {
"start": "electron-vite preview",
"dev": "electron-vite dev",
"build": "electron-vite build",
"postinstall": "electron-builder install-app-deps",
"pack:win": "electron-builder --win --config",
"pack:mac": "electron-builder --mac --config",
"pack:linux": "electron-builder --linux --config",
"build:win": "npm run build && npm run pack:win",
"build:mac": "npm run build && npm run pack:mac",
"build:linux": "npm run build && npm run pack:linux"
},
"dependencies": {
"@electron-toolkit/preload": "^2.0.0",
"@electron-toolkit/utils": "^2.0.0",
"@getstation/electron-google-oauth2": "^14.0.0",
"@imjs/electron-differential-updater": "^5.1.7",
"@loadable/component": "^5.16.3",
"antd": "^5.13.2",
"classnames": "^2.3.2",
"electron-build": "^0.0.3",
"electron-differential-updater": "^4.3.2",
"electron-is-dev": "^2.0.0",
"electron-store": "^8.1.0",
"electron-updater": "^6.1.1",
"got": "11.8.3",
"human-format": "^1.2.0",
"less": "^4.2.0",
"lodash": "^4.17.21",
"react-icons": "^4.11.0",
"react-motion": "0.5.2",
"react-router-dom": "6.6.2",
"react-spinners": "^0.13.8",
"react-spring": "^9.7.3"
},
"devDependencies": {
"@ragestudio/hermes": "^0.1.1",
"protocol-registry": "^1.4.1",
"@vitejs/plugin-react": "^4.0.4",
"electron": "29.1.6",
"electron-builder": "24.6.3",
"electron-vite": "^2.1.0",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"vite": "^4.4.9"
}
}

View File

Before

Width:  |  Height:  |  Size: 11 KiB

After

Width:  |  Height:  |  Size: 11 KiB

View File

Before

Width:  |  Height:  |  Size: 28 KiB

After

Width:  |  Height:  |  Size: 28 KiB

View File

Before

Width:  |  Height:  |  Size: 1.4 KiB

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

@ -1,35 +0,0 @@
const path = require("path")
const child_process = require("child_process")
const packagesPath = path.resolve(__dirname, "..", "packages")
const linkRoot = path.resolve(packagesPath, "core")
const linkPackages = [
path.resolve(packagesPath, "cli"),
path.resolve(packagesPath, "gui"),
]
async function main() {
console.log(`Linking @core to other packages...`)
const rootPkg = require(path.resolve(linkRoot, "package.json"))
await child_process.execSync("yarn link", {
cwd: linkRoot,
stdio: "inherit",
stdout: "inherit",
})
for (const linkPackage of linkPackages) {
await child_process.execSync(`yarn link "${rootPkg.name}"`, {
cwd: linkPackage,
stdio: "inherit",
stdout: "inherit",
})
}
console.log(`Done!`)
}
main()

Some files were not shown because too many files have changed in this diff Show More