mirror of
https://github.com/ragestudio/relic.git
synced 2025-06-09 18:44:17 +00:00
Compare commits
113 Commits
Author | SHA1 | Date | |
---|---|---|---|
0e3889220d | |||
c996b215a3 | |||
87223efa21 | |||
6287f5fa50 | |||
98b6740839 | |||
e1d833a5f5 | |||
1e74f49e4e | |||
21152ac248 | |||
f863c27411 | |||
8b3610c518 | |||
bd719202c1 | |||
93201a5c19 | |||
9d61fc2e05 | |||
fda084d861 | |||
2789b30051 | |||
d9e9772d24 | |||
f1257ec7f3 | |||
67db27cf28 | |||
765999eb8a | |||
881d018bfc | |||
feaee17391 | |||
4ca2410f0a | |||
4cb93e9878 | |||
aa1f2f0913 | |||
64303dac7e | |||
1c6f871de0 | |||
3e7f9963e9 | |||
f1fba82cdf | |||
26753e6471 | |||
98d4676d8e | |||
9af9d38fe1 | |||
83a3720361 | |||
a5f53ac5f3 | |||
d682e3fc65 | |||
edff94294a | |||
29bb3bdbfe | |||
6e2f82b52c | |||
b205f935b3 | |||
|
079d644c83 | ||
|
ed2dbb732d | ||
f313a7340f | |||
faed0d8754 | |||
e187a49947 | |||
86a6effeb1 | |||
eef02a7bec | |||
e283796395 | |||
3e4a6e0ca9 | |||
5abcd2640c | |||
74bb53ada4 | |||
01d6031473 | |||
fd6358ef11 | |||
363ad1cd97 | |||
bcc889c6fa | |||
4342339aae | |||
93329a7328 | |||
f48c1e6524 | |||
7a4a760d6f | |||
6be4fdcd12 | |||
f31371a5ca | |||
87b3e3aed3 | |||
361d07d3f5 | |||
a84fab1882 | |||
3ae13ccc61 | |||
8bb6b1daec | |||
7fec0f1f7d | |||
6dae178bc5 | |||
25df0cf352 | |||
a95f566518 | |||
4cd09d8f44 | |||
42f3944e20 | |||
84030da722 | |||
c949eea9ac | |||
d3a752c820 | |||
e8bb844df0 | |||
3b7d37ba7f | |||
26179d09cc | |||
37daf2fc16 | |||
b660c4e458 | |||
5047ddc539 | |||
98401fbf8d | |||
b68c49118a | |||
70056e82a1 | |||
f371ffce6a | |||
50e9c2d5a3 | |||
406a10e3c5 | |||
be51e638e5 | |||
a30f875639 | |||
b43cb2073c | |||
4ab0bc5975 | |||
fbb87e46d1 | |||
2fa3f10d3e | |||
d916e85d87 | |||
0538e85fc6 | |||
d1f03113f1 | |||
d6170aafc3 | |||
d2f0cf9d10 | |||
f3db738c7f | |||
4b76589c05 | |||
7c410f0a45 | |||
bdc0f6fbbd | |||
fd64d24470 | |||
e7cd36a5b3 | |||
2088ee7ee5 | |||
6a85669d5a | |||
7f17468781 | |||
48505fa80d | |||
4ab392ba8b | |||
b772dd12f7 | |||
e528a854bb | |||
38b7f4022c | |||
affe6a4bc3 | |||
9c57a002f3 | |||
46c77a15b0 |
@ -1,9 +0,0 @@
|
|||||||
root = true
|
|
||||||
|
|
||||||
[*]
|
|
||||||
charset = utf-8
|
|
||||||
indent_style = space
|
|
||||||
indent_size = 2
|
|
||||||
end_of_line = lf
|
|
||||||
insert_final_newline = true
|
|
||||||
trim_trailing_whitespace = true
|
|
56
.github/workflows/release.yml
vendored
Executable file
56
.github/workflows/release.yml
vendored
Executable file
@ -0,0 +1,56 @@
|
|||||||
|
name: Build & Release GUI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- v*.*.*
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
release:
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Check out Git repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Install Node.js
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 18
|
||||||
|
|
||||||
|
- name: Install Dependencies
|
||||||
|
run: npm install -f
|
||||||
|
|
||||||
|
- name: build-linux
|
||||||
|
if: matrix.os == 'ubuntu-latest'
|
||||||
|
run: cd ./packages/gui && npm run build:linux
|
||||||
|
|
||||||
|
- name: build-mac
|
||||||
|
if: matrix.os == 'macos-latest'
|
||||||
|
run: cd ./packages/gui && npm run build:mac
|
||||||
|
|
||||||
|
- name: build-win
|
||||||
|
if: matrix.os == 'windows-latest'
|
||||||
|
run: cd ./packages/gui && npm run build:win
|
||||||
|
|
||||||
|
- name: release
|
||||||
|
uses: softprops/action-gh-release@v1
|
||||||
|
with:
|
||||||
|
draft: true
|
||||||
|
files: |
|
||||||
|
dist/*.exe
|
||||||
|
dist/*.zip
|
||||||
|
dist/*.dmg
|
||||||
|
dist/*.AppImage
|
||||||
|
dist/*.snap
|
||||||
|
dist/*.deb
|
||||||
|
dist/*.rpm
|
||||||
|
dist/*.tar.gz
|
||||||
|
dist/*.yml
|
||||||
|
dist/*.blockmap
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.ACCESS_TOKEN }}
|
0
.gitignore
vendored
Normal file → Executable file
0
.gitignore
vendored
Normal file → Executable file
@ -1,6 +0,0 @@
|
|||||||
out
|
|
||||||
dist
|
|
||||||
pnpm-lock.yaml
|
|
||||||
LICENSE.md
|
|
||||||
tsconfig.json
|
|
||||||
tsconfig.*.json
|
|
@ -1,4 +0,0 @@
|
|||||||
singleQuote: false
|
|
||||||
semi: false
|
|
||||||
printWidth: 100
|
|
||||||
trailingComma: none
|
|
3
.vscode/extensions.json
vendored
3
.vscode/extensions.json
vendored
@ -1,3 +0,0 @@
|
|||||||
{
|
|
||||||
"recommendations": ["dbaeumer.vscode-eslint"]
|
|
||||||
}
|
|
39
.vscode/launch.json
vendored
39
.vscode/launch.json
vendored
@ -1,39 +0,0 @@
|
|||||||
{
|
|
||||||
"version": "0.2.0",
|
|
||||||
"configurations": [
|
|
||||||
{
|
|
||||||
"name": "Debug Main Process",
|
|
||||||
"type": "node",
|
|
||||||
"request": "launch",
|
|
||||||
"cwd": "${workspaceRoot}",
|
|
||||||
"runtimeExecutable": "${workspaceRoot}/node_modules/.bin/electron-vite",
|
|
||||||
"windows": {
|
|
||||||
"runtimeExecutable": "${workspaceRoot}/node_modules/.bin/electron-vite.cmd"
|
|
||||||
},
|
|
||||||
"runtimeArgs": ["--sourcemap"],
|
|
||||||
"env": {
|
|
||||||
"REMOTE_DEBUGGING_PORT": "9222"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Debug Renderer Process",
|
|
||||||
"port": 9222,
|
|
||||||
"request": "attach",
|
|
||||||
"type": "chrome",
|
|
||||||
"webRoot": "${workspaceFolder}/src/renderer",
|
|
||||||
"timeout": 60000,
|
|
||||||
"presentation": {
|
|
||||||
"hidden": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"compounds": [
|
|
||||||
{
|
|
||||||
"name": "Debug All",
|
|
||||||
"configurations": ["Debug Main Process", "Debug Renderer Process"],
|
|
||||||
"presentation": {
|
|
||||||
"order": 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
11
.vscode/settings.json
vendored
11
.vscode/settings.json
vendored
@ -1,11 +0,0 @@
|
|||||||
{
|
|
||||||
"[typescript]": {
|
|
||||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
|
||||||
},
|
|
||||||
"[javascript]": {
|
|
||||||
"editor.defaultFormatter": "vscode.typescript-language-features"
|
|
||||||
},
|
|
||||||
"[json]": {
|
|
||||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,3 +0,0 @@
|
|||||||
provider: generic
|
|
||||||
url: https://example.com/auto-updates
|
|
||||||
updaterCacheDirName: rs-bundler-updater
|
|
@ -1,43 +0,0 @@
|
|||||||
appId: com.ragestudio.bundler
|
|
||||||
productName: rs-bundler
|
|
||||||
directories:
|
|
||||||
buildResources: build
|
|
||||||
files:
|
|
||||||
- '!**/.vscode/*'
|
|
||||||
- '!src/*'
|
|
||||||
- '!electron.vite.config.{js,ts,mjs,cjs}'
|
|
||||||
- '!{.eslintignore,.eslintrc.cjs,.prettierignore,.prettierrc.yaml,dev-app-update.yml,CHANGELOG.md,README.md}'
|
|
||||||
- '!{.env,.env.*,.npmrc,pnpm-lock.yaml}'
|
|
||||||
asarUnpack:
|
|
||||||
- resources/**
|
|
||||||
win:
|
|
||||||
executableName: rs-bundler
|
|
||||||
icon: resources/icon.ico
|
|
||||||
nsis:
|
|
||||||
artifactName: ${name}-${version}-setup.${ext}
|
|
||||||
shortcutName: ${productName}
|
|
||||||
uninstallDisplayName: ${productName}
|
|
||||||
createDesktopShortcut: always
|
|
||||||
mac:
|
|
||||||
entitlementsInherit: build/entitlements.mac.plist
|
|
||||||
extendInfo:
|
|
||||||
- NSCameraUsageDescription: Application requests access to the device's camera.
|
|
||||||
- NSMicrophoneUsageDescription: Application requests access to the device's microphone.
|
|
||||||
- NSDocumentsFolderUsageDescription: Application requests access to the user's Documents folder.
|
|
||||||
- NSDownloadsFolderUsageDescription: Application requests access to the user's Downloads folder.
|
|
||||||
notarize: false
|
|
||||||
dmg:
|
|
||||||
artifactName: ${name}-${version}.${ext}
|
|
||||||
linux:
|
|
||||||
target:
|
|
||||||
- AppImage
|
|
||||||
- snap
|
|
||||||
- deb
|
|
||||||
maintainer: electronjs.org
|
|
||||||
category: Utility
|
|
||||||
appImage:
|
|
||||||
artifactName: ${name}-${version}.${ext}
|
|
||||||
npmRebuild: false
|
|
||||||
publish:
|
|
||||||
provider: generic
|
|
||||||
url: https://storage.ragestudio.net/rs-bundler/release
|
|
68
package.json
Normal file → Executable file
68
package.json
Normal file → Executable file
@ -1,69 +1,9 @@
|
|||||||
{
|
{
|
||||||
"name": "rs-bundler",
|
"name": "@ragestudio/relic-core",
|
||||||
"version": "0.10.2",
|
"repository": "https://github.com/srgooglo/rs_bundler",
|
||||||
"description": "RageStudio Bundler Utility GUI",
|
"author": "SrGooglo <srgooglo@ragestudio.net>",
|
||||||
"main": "./out/main/index.js",
|
|
||||||
"author": "RageStudio",
|
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"format": "prettier --write .",
|
"postinstall": "node scripts/postinstall.js"
|
||||||
"lint": "eslint . --ext .js,.jsx,.cjs,.mjs,.ts,.tsx,.cts,.mts --fix",
|
|
||||||
"start": "electron-vite preview",
|
|
||||||
"dev": "electron-vite dev",
|
|
||||||
"build": "electron-vite build",
|
|
||||||
"postinstall": "electron-builder install-app-deps",
|
|
||||||
"pack:win": "electron-builder --win --config",
|
|
||||||
"pack:mac": "electron-builder --mac --config",
|
|
||||||
"pack:linux": "electron-builder --linux --config",
|
|
||||||
"build:win": "npm run build && npm run pack:win",
|
|
||||||
"build:mac": "npm run build && npm run pack:mac",
|
|
||||||
"build:linux": "npm run build && npm run pack:linux"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@electron-toolkit/preload": "^2.0.0",
|
|
||||||
"@electron-toolkit/utils": "^2.0.0",
|
|
||||||
"@getstation/electron-google-oauth2": "^14.0.0",
|
|
||||||
"@imjs/electron-differential-updater": "^5.1.7",
|
|
||||||
"@ragestudio/hermes": "^0.1.1",
|
|
||||||
"adm-zip": "^0.5.10",
|
|
||||||
"antd": "^5.13.2",
|
|
||||||
"checksum": "^1.0.0",
|
|
||||||
"classnames": "^2.3.2",
|
|
||||||
"electron-differential-updater": "^4.3.2",
|
|
||||||
"electron-is-dev": "^2.0.0",
|
|
||||||
"electron-store": "^8.1.0",
|
|
||||||
"electron-updater": "^6.1.1",
|
|
||||||
"googleapis": "^105.0.0",
|
|
||||||
"got": "11.8.3",
|
|
||||||
"human-format": "^1.2.0",
|
|
||||||
"less": "^4.2.0",
|
|
||||||
"lodash": "^4.17.21",
|
|
||||||
"node-7z": "^3.0.0",
|
|
||||||
"open": "8.4.2",
|
|
||||||
"progress-stream": "^2.0.0",
|
|
||||||
"protocol-registry": "^1.4.1",
|
|
||||||
"react-icons": "^4.11.0",
|
|
||||||
"react-router-dom": "^6.19.0",
|
|
||||||
"react-spinners": "^0.13.8",
|
|
||||||
"request": "^2.88.2",
|
|
||||||
"rimraf": "^5.0.5",
|
|
||||||
"unzipper": "^0.10.14",
|
|
||||||
"uuid": "^9.0.1",
|
|
||||||
"which": "^4.0.0",
|
|
||||||
"winreg": "^1.2.5"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@electron-toolkit/eslint-config": "^1.0.1",
|
|
||||||
"@electron-toolkit/eslint-config-prettier": "^1.0.1",
|
|
||||||
"@vitejs/plugin-react": "^4.0.4",
|
|
||||||
"electron": "^25.6.0",
|
|
||||||
"electron-builder": "^24.6.3",
|
|
||||||
"electron-vite": "^1.0.27",
|
|
||||||
"eslint": "^8.47.0",
|
|
||||||
"eslint-plugin-react": "^7.33.2",
|
|
||||||
"prettier": "^3.0.2",
|
|
||||||
"react": "^17.0.2",
|
|
||||||
"react-dom": "^17.0.2",
|
|
||||||
"vite": "^4.4.9"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
2
packages/cli/bin
Executable file
2
packages/cli/bin
Executable file
@ -0,0 +1,2 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
require("./dist/index.js")
|
21
packages/cli/package.json
Executable file
21
packages/cli/package.json
Executable file
@ -0,0 +1,21 @@
|
|||||||
|
{
|
||||||
|
"name": "@ragestudio/relic-cli",
|
||||||
|
"version": "0.17.0",
|
||||||
|
"license": "MIT",
|
||||||
|
"author": "RageStudio",
|
||||||
|
"description": "RageStudio Relic, yet another package manager.",
|
||||||
|
"main": "./dist/index.js",
|
||||||
|
"bin": {
|
||||||
|
"relic": "./bin.js"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"dev": "hermes-node ./src/index.js",
|
||||||
|
"build": "hermes build"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"commander": "^12.0.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@ragestudio/hermes": "^0.1.1"
|
||||||
|
}
|
||||||
|
}
|
169
packages/cli/src/index.js
Executable file
169
packages/cli/src/index.js
Executable file
@ -0,0 +1,169 @@
|
|||||||
|
import RelicCore from "@ragestudio/relic-core"
|
||||||
|
import { program, Command, Argument } from "commander"
|
||||||
|
|
||||||
|
import pkg from "../package.json"
|
||||||
|
|
||||||
|
const commands = [
|
||||||
|
{
|
||||||
|
cmd: "install",
|
||||||
|
description: "Install a package manifest from a path or URL",
|
||||||
|
arguments: [
|
||||||
|
{
|
||||||
|
name: "package_manifest",
|
||||||
|
description: "Path or URL to a package manifest",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
fn: async (package_manifest, options) => {
|
||||||
|
await core.initialize()
|
||||||
|
await core.setup()
|
||||||
|
|
||||||
|
return await core.package.install(package_manifest, options)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
cmd: "run",
|
||||||
|
description: "Execute a package",
|
||||||
|
arguments: [
|
||||||
|
{
|
||||||
|
name: "id",
|
||||||
|
description: "The id of the package to execute",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
fn: async (pkg_id, options) => {
|
||||||
|
await core.initialize()
|
||||||
|
await core.setup()
|
||||||
|
|
||||||
|
return await core.package.execute(pkg_id, options)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
cmd: "update",
|
||||||
|
description: "Update a package",
|
||||||
|
arguments: [
|
||||||
|
{
|
||||||
|
name: "id",
|
||||||
|
description: "The id of the package to update",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
fn: async (pkg_id, options) => {
|
||||||
|
await core.initialize()
|
||||||
|
await core.setup()
|
||||||
|
|
||||||
|
return await core.package.update(pkg_id, options)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
cmd: "uninstall",
|
||||||
|
description: "Uninstall a package",
|
||||||
|
arguments: [
|
||||||
|
{
|
||||||
|
name: "id",
|
||||||
|
description: "The id of the package to uninstall",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
fn: async (pkg_id, options) => {
|
||||||
|
await core.initialize()
|
||||||
|
|
||||||
|
return await core.package.uninstall(pkg_id, options)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
cmd: "apply",
|
||||||
|
description: "Apply changes to a installed package",
|
||||||
|
arguments: [
|
||||||
|
{
|
||||||
|
name: "id",
|
||||||
|
description: "The id of the package to apply changes to",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
options: [
|
||||||
|
{
|
||||||
|
name: "add_patches",
|
||||||
|
description: "Add patches to the package",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "remove_patches",
|
||||||
|
description: "Remove patches from the package",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
fn: async (pkg_id, options) => {
|
||||||
|
await core.initialize()
|
||||||
|
|
||||||
|
return await core.package.apply(pkg_id, options)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
cmd: "list",
|
||||||
|
description: "List installed package manifests",
|
||||||
|
fn: async () => {
|
||||||
|
await core.initialize()
|
||||||
|
|
||||||
|
return console.log(await core.package.list())
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
cmd: "open-path",
|
||||||
|
description: "Open the base path or a package path",
|
||||||
|
options: [
|
||||||
|
{
|
||||||
|
name: "pkg_id",
|
||||||
|
description: "Path to open",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
fn: async (options) => {
|
||||||
|
await core.initialize()
|
||||||
|
|
||||||
|
await core.openPath(options.pkg_id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
global.core = new RelicCore()
|
||||||
|
|
||||||
|
program
|
||||||
|
.name(pkg.name)
|
||||||
|
.description(pkg.description)
|
||||||
|
.version(pkg.version)
|
||||||
|
|
||||||
|
for await (const command of commands) {
|
||||||
|
const cmd = new Command(command.cmd).action(command.fn)
|
||||||
|
|
||||||
|
if (command.description) {
|
||||||
|
cmd.description(command.description)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(command.arguments)) {
|
||||||
|
for await (const argument of command.arguments) {
|
||||||
|
if (typeof argument === "string") {
|
||||||
|
cmd.addArgument(new Argument(argument))
|
||||||
|
} else {
|
||||||
|
const arg = new Argument(argument.name, argument.description)
|
||||||
|
|
||||||
|
if (argument.default) {
|
||||||
|
arg.default(argument.default)
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd.addArgument(arg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(command.options)) {
|
||||||
|
for await (const option of command.options) {
|
||||||
|
if (typeof option === "string") {
|
||||||
|
cmd.option(option)
|
||||||
|
} else {
|
||||||
|
cmd.option(option.name, option.description, option.default)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
program.addCommand(cmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
program.parse()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
main()
|
11
packages/core/.swcrc
Executable file
11
packages/core/.swcrc
Executable file
@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"$schema": "http://json.schemastore.org/swcrc",
|
||||||
|
"module": {
|
||||||
|
"type": "commonjs",
|
||||||
|
// These are defaults.
|
||||||
|
"strict": false,
|
||||||
|
"strictMode": true,
|
||||||
|
"lazy": false,
|
||||||
|
"noInterop": false
|
||||||
|
}
|
||||||
|
}
|
45
packages/core/package.json
Executable file
45
packages/core/package.json
Executable file
@ -0,0 +1,45 @@
|
|||||||
|
{
|
||||||
|
"name": "@ragestudio/relic-core",
|
||||||
|
"version": "0.20.3",
|
||||||
|
"license": "MIT",
|
||||||
|
"author": "RageStudio",
|
||||||
|
"description": "RageStudio Relic, yet another package manager.",
|
||||||
|
"main": "./dist/index.js",
|
||||||
|
"files": [
|
||||||
|
"dist",
|
||||||
|
"src"
|
||||||
|
],
|
||||||
|
"scripts": {
|
||||||
|
"build": "hermes build",
|
||||||
|
"build:swc": "npx swc ./src --out-dir ./dist --strip-leading-paths"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@foxify/events": "^2.1.0",
|
||||||
|
"adm-zip": "^0.5.12",
|
||||||
|
"aria2": "^4.1.2",
|
||||||
|
"axios": "^1.6.8",
|
||||||
|
"checksum": "^1.0.0",
|
||||||
|
"cli-color": "^2.0.4",
|
||||||
|
"cli-progress": "^3.12.0",
|
||||||
|
"deep-object-diff": "^1.1.9",
|
||||||
|
"extends-classes": "^1.0.5",
|
||||||
|
"googleapis": "^134.0.0",
|
||||||
|
"human-format": "^1.2.0",
|
||||||
|
"merge-stream": "^2.0.0",
|
||||||
|
"module-alias": "^2.2.3",
|
||||||
|
"node-7z": "^3.0.0",
|
||||||
|
"open": "8.4.2",
|
||||||
|
"request": "^2.88.2",
|
||||||
|
"rimraf": "^5.0.5",
|
||||||
|
"signal-exit": "^4.1.0",
|
||||||
|
"unzipper": "^0.10.14",
|
||||||
|
"upath": "^2.0.1",
|
||||||
|
"uuid": "^9.0.1",
|
||||||
|
"webtorrent": "^2.4.1",
|
||||||
|
"winston": "^3.13.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@swc/cli": "^0.3.12",
|
||||||
|
"@swc/core": "^1.4.11"
|
||||||
|
}
|
||||||
|
}
|
44
packages/core/src/classes/ManifestAuthDB.js
Executable file
44
packages/core/src/classes/ManifestAuthDB.js
Executable file
@ -0,0 +1,44 @@
|
|||||||
|
import path from "path"
|
||||||
|
import { JSONFilePreset } from "../libraries/lowdb/presets/node"
|
||||||
|
|
||||||
|
import Vars from "../vars"
|
||||||
|
|
||||||
|
//! WARNING: Please DO NOT storage any password or sensitive data here,
|
||||||
|
// cause its not use any encryption method, and it will be stored in plain text.
|
||||||
|
// This is intended to store session tokens among other vars.
|
||||||
|
|
||||||
|
export default class ManifestAuthService {
|
||||||
|
static vaultPath = path.resolve(Vars.runtime_path, "auth.json")
|
||||||
|
|
||||||
|
static async withDB() {
|
||||||
|
return await JSONFilePreset(ManifestAuthService.vaultPath, {})
|
||||||
|
}
|
||||||
|
|
||||||
|
static has = async (pkg_id) => {
|
||||||
|
const db = await this.withDB()
|
||||||
|
|
||||||
|
return !!db.data[pkg_id]
|
||||||
|
}
|
||||||
|
|
||||||
|
static set = async (pkg_id, value) => {
|
||||||
|
const db = await this.withDB()
|
||||||
|
|
||||||
|
return await db.update((data) => {
|
||||||
|
data[pkg_id] = value
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
static get = async (pkg_id) => {
|
||||||
|
const db = await this.withDB()
|
||||||
|
|
||||||
|
return await db.data[pkg_id]
|
||||||
|
}
|
||||||
|
|
||||||
|
static delete = async (pkg_id) => {
|
||||||
|
const db = await this.withDB()
|
||||||
|
|
||||||
|
return await db.update((data) => {
|
||||||
|
delete data[pkg_id]
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
34
packages/core/src/classes/ManifestConfig.js
Executable file
34
packages/core/src/classes/ManifestConfig.js
Executable file
@ -0,0 +1,34 @@
|
|||||||
|
import DB from "../db"
|
||||||
|
|
||||||
|
export default class ManifestConfigManager {
|
||||||
|
constructor(pkg_id) {
|
||||||
|
this.pkg_id = pkg_id
|
||||||
|
this.config = null
|
||||||
|
}
|
||||||
|
|
||||||
|
async initialize() {
|
||||||
|
const pkg = await DB.getPackages(this.pkg_id) ?? {}
|
||||||
|
|
||||||
|
this.config = pkg.config
|
||||||
|
}
|
||||||
|
|
||||||
|
set(key, value) {
|
||||||
|
this.config[key] = value
|
||||||
|
|
||||||
|
DB.updatePackageById(pkg_id, { config: this.config })
|
||||||
|
|
||||||
|
return this.config
|
||||||
|
}
|
||||||
|
|
||||||
|
get(key) {
|
||||||
|
return this.config[key]
|
||||||
|
}
|
||||||
|
|
||||||
|
delete(key) {
|
||||||
|
delete this.config[key]
|
||||||
|
|
||||||
|
DB.updatePackageById(pkg_id, { config: this.config })
|
||||||
|
|
||||||
|
return this.config
|
||||||
|
}
|
||||||
|
}
|
149
packages/core/src/classes/PatchManager.js
Executable file
149
packages/core/src/classes/PatchManager.js
Executable file
@ -0,0 +1,149 @@
|
|||||||
|
import Logger from "../logger"
|
||||||
|
|
||||||
|
import DB from "../db"
|
||||||
|
import fs from "node:fs"
|
||||||
|
|
||||||
|
import GenericSteps from "../generic_steps"
|
||||||
|
import parseStringVars from "../utils/parseStringVars"
|
||||||
|
|
||||||
|
export default class PatchManager {
|
||||||
|
constructor(pkg, manifest) {
|
||||||
|
this.pkg = pkg
|
||||||
|
this.manifest = manifest
|
||||||
|
|
||||||
|
this.log = Logger.child({ service: `PATCH-MANAGER|${pkg.id}` })
|
||||||
|
}
|
||||||
|
|
||||||
|
async get(select) {
|
||||||
|
if (!this.manifest.patches) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
let list = []
|
||||||
|
|
||||||
|
if (typeof select === "undefined") {
|
||||||
|
list = this.manifest.patches
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(select)) {
|
||||||
|
for await (let id of select) {
|
||||||
|
const patch = this.manifest.patches.find((patch) => patch.id === id)
|
||||||
|
|
||||||
|
if (patch) {
|
||||||
|
list.push(patch)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return list
|
||||||
|
}
|
||||||
|
|
||||||
|
async reapply() {
|
||||||
|
if (Array.isArray(this.pkg.applied_patches)) {
|
||||||
|
return await this.patch(this.pkg.applied_patches)
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
async patch(select) {
|
||||||
|
const list = await this.get(select)
|
||||||
|
|
||||||
|
for await (let patch of list) {
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: this.pkg.id,
|
||||||
|
status_text: `Applying patch [${patch.id}]...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
this.log.info(`Applying patch [${patch.id}]...`)
|
||||||
|
|
||||||
|
if (Array.isArray(patch.additions)) {
|
||||||
|
this.log.info(`Applying ${patch.additions.length} Additions...`)
|
||||||
|
|
||||||
|
for await (let addition of patch.additions) {
|
||||||
|
// resolve patch file
|
||||||
|
addition.file = await parseStringVars(addition.file, this.pkg)
|
||||||
|
|
||||||
|
if (fs.existsSync(addition.file)) {
|
||||||
|
this.log.info(`Addition [${addition.file}] already exists. Skipping...`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
this.log.info(`Applying addition [${addition.file}]`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: this.pkg.id,
|
||||||
|
status_text: `Applying addition [${addition.file}]`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await GenericSteps(this.pkg, addition.steps, this.log)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.pkg.applied_patches.includes(patch.id)) {
|
||||||
|
this.pkg.applied_patches.push(patch.id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await DB.updatePackageById(this.pkg.id, { applied_patches: this.pkg.applied_patches })
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: this.pkg.id,
|
||||||
|
status_text: `${list.length} Patches applied`,
|
||||||
|
})
|
||||||
|
|
||||||
|
this.log.info(`${list.length} Patches applied`)
|
||||||
|
|
||||||
|
return this.pkg
|
||||||
|
}
|
||||||
|
|
||||||
|
async remove(select) {
|
||||||
|
const list = await this.get(select)
|
||||||
|
|
||||||
|
for await (let patch of list) {
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: this.pkg.id,
|
||||||
|
status_text: `Removing patch [${patch.id}]...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
this.log.info(`Removing patch [${patch.id}]...`)
|
||||||
|
|
||||||
|
if (Array.isArray(patch.additions)) {
|
||||||
|
this.log.info(`Removing ${patch.additions.length} Additions...`)
|
||||||
|
|
||||||
|
for await (let addition of patch.additions) {
|
||||||
|
addition.file = await parseStringVars(addition.file, this.pkg)
|
||||||
|
|
||||||
|
if (!fs.existsSync(addition.file)) {
|
||||||
|
this.log.info(`Addition [${addition.file}] does not exist. Skipping...`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
this.log.info(`Removing addition [${addition.file}]`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: this.pkg.id,
|
||||||
|
status_text: `Removing addition [${addition.file}]`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await fs.promises.unlink(addition.file)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.pkg.applied_patches = this.pkg.applied_patches.filter((p) => {
|
||||||
|
return p !== patch.id
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
await DB.updatePackageById(this.pkg.id, { applied_patches: this.pkg.applied_patches })
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: this.pkg.id,
|
||||||
|
status_text: `${list.length} Patches removed`,
|
||||||
|
})
|
||||||
|
|
||||||
|
this.log.info(`${list.length} Patches removed`)
|
||||||
|
|
||||||
|
return this.pkg
|
||||||
|
}
|
||||||
|
}
|
57
packages/core/src/classes/Settings.js
Executable file
57
packages/core/src/classes/Settings.js
Executable file
@ -0,0 +1,57 @@
|
|||||||
|
import fs from "node:fs"
|
||||||
|
import path from "node:path"
|
||||||
|
import Vars from "../vars"
|
||||||
|
|
||||||
|
const settingsPath = path.resolve(Vars.runtime_path, "settings.json")
|
||||||
|
|
||||||
|
export default class Settings {
|
||||||
|
static filePath = settingsPath
|
||||||
|
|
||||||
|
static async initialize() {
|
||||||
|
if (!fs.existsSync(settingsPath)) {
|
||||||
|
await fs.promises.writeFile(settingsPath, "{}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static async read() {
|
||||||
|
return JSON.parse(await fs.promises.readFile(settingsPath, "utf8"))
|
||||||
|
}
|
||||||
|
|
||||||
|
static async write(data) {
|
||||||
|
await fs.promises.writeFile(settingsPath, JSON.stringify(data, null, 2))
|
||||||
|
}
|
||||||
|
|
||||||
|
static async get(key) {
|
||||||
|
const data = await this.read()
|
||||||
|
|
||||||
|
if (key) {
|
||||||
|
return data[key]
|
||||||
|
}
|
||||||
|
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
static async has(key) {
|
||||||
|
const data = await this.read()
|
||||||
|
|
||||||
|
return key in data
|
||||||
|
}
|
||||||
|
|
||||||
|
static async set(key, value) {
|
||||||
|
const data = await this.read()
|
||||||
|
|
||||||
|
data[key] = value
|
||||||
|
|
||||||
|
await this.write(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
static async delete(key) {
|
||||||
|
const data = await this.read()
|
||||||
|
|
||||||
|
delete data[key]
|
||||||
|
|
||||||
|
await this.write(data)
|
||||||
|
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
}
|
118
packages/core/src/db.js
Executable file
118
packages/core/src/db.js
Executable file
@ -0,0 +1,118 @@
|
|||||||
|
import { JSONFilePreset } from "./libraries/lowdb/presets/node"
|
||||||
|
import Vars from "./vars"
|
||||||
|
import pkg from "../package.json"
|
||||||
|
import fs from "node:fs"
|
||||||
|
|
||||||
|
export default class DB {
|
||||||
|
static get defaultRoot() {
|
||||||
|
return {
|
||||||
|
created_at_version: pkg.version,
|
||||||
|
packages: [],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static defaultPackageState({
|
||||||
|
id,
|
||||||
|
name,
|
||||||
|
icon,
|
||||||
|
version,
|
||||||
|
author,
|
||||||
|
install_path,
|
||||||
|
description,
|
||||||
|
license,
|
||||||
|
last_status,
|
||||||
|
remote_manifest,
|
||||||
|
local_manifest,
|
||||||
|
config,
|
||||||
|
executable,
|
||||||
|
}) {
|
||||||
|
return {
|
||||||
|
id: id,
|
||||||
|
name: name,
|
||||||
|
version: version,
|
||||||
|
icon: icon,
|
||||||
|
install_path: install_path,
|
||||||
|
description: description,
|
||||||
|
author: author,
|
||||||
|
license: license ?? "unlicensed",
|
||||||
|
local_manifest: local_manifest ?? null,
|
||||||
|
remote_manifest: remote_manifest ?? null,
|
||||||
|
applied_patches: [],
|
||||||
|
config: typeof config === "object" ? config : {},
|
||||||
|
last_status: last_status ?? "installing",
|
||||||
|
last_update: null,
|
||||||
|
installed_at: null,
|
||||||
|
executable: executable ?? false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static async withDB() {
|
||||||
|
return await JSONFilePreset(Vars.db_path, DB.defaultRoot)
|
||||||
|
}
|
||||||
|
|
||||||
|
static async initialize() {
|
||||||
|
await this.cleanOrphans()
|
||||||
|
}
|
||||||
|
|
||||||
|
static async cleanOrphans() {
|
||||||
|
const list = await this.getPackages()
|
||||||
|
|
||||||
|
for (const pkg of list) {
|
||||||
|
if (!fs.existsSync(pkg.install_path)) {
|
||||||
|
await this.deletePackage(pkg.id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static async getPackages(pkg_id) {
|
||||||
|
const db = await this.withDB()
|
||||||
|
|
||||||
|
if (pkg_id) {
|
||||||
|
return db.data["packages"].find((i) => i.id === pkg_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
return db.data["packages"]
|
||||||
|
}
|
||||||
|
|
||||||
|
static async writePackage(pkg) {
|
||||||
|
const db = await this.withDB()
|
||||||
|
|
||||||
|
const prevIndex = db.data["packages"].findIndex((i) => i.id === pkg.id)
|
||||||
|
|
||||||
|
if (prevIndex !== -1) {
|
||||||
|
db.data["packages"][prevIndex] = pkg
|
||||||
|
} else {
|
||||||
|
db.data["packages"].push(pkg)
|
||||||
|
}
|
||||||
|
|
||||||
|
await db.write()
|
||||||
|
|
||||||
|
return db.data
|
||||||
|
}
|
||||||
|
|
||||||
|
static async updatePackageById(pkg_id, obj) {
|
||||||
|
let pkg = await this.getPackages(pkg_id)
|
||||||
|
|
||||||
|
|
||||||
|
if (!pkg) {
|
||||||
|
throw new Error("Package not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
return await this.writePackage({
|
||||||
|
...pkg,
|
||||||
|
...obj,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
static async deletePackage(pkg_id) {
|
||||||
|
const db = await this.withDB()
|
||||||
|
|
||||||
|
await db.update((data) => {
|
||||||
|
data["packages"] = data["packages"].filter((i) => i.id !== pkg_id)
|
||||||
|
|
||||||
|
return data
|
||||||
|
})
|
||||||
|
|
||||||
|
return pkg_id
|
||||||
|
}
|
||||||
|
}
|
50
packages/core/src/generic_steps/git_clone.js
Executable file
50
packages/core/src/generic_steps/git_clone.js
Executable file
@ -0,0 +1,50 @@
|
|||||||
|
import Logger from "../logger"
|
||||||
|
|
||||||
|
import path from "node:path"
|
||||||
|
import fs from "node:fs"
|
||||||
|
import upath from "upath"
|
||||||
|
import { execa } from "../libraries/execa"
|
||||||
|
|
||||||
|
import Vars from "../vars"
|
||||||
|
|
||||||
|
export default async (pkg, step) => {
|
||||||
|
if (!step.path) {
|
||||||
|
step.path = `.`
|
||||||
|
}
|
||||||
|
|
||||||
|
const Log = Logger.child({ service: `GIT|${pkg.id}` })
|
||||||
|
|
||||||
|
const gitCMD = fs.existsSync(Vars.git_bin) ? `${Vars.git_bin}` : "git"
|
||||||
|
const final_path = upath.normalizeSafe(path.resolve(pkg.install_path, step.path))
|
||||||
|
|
||||||
|
if (!fs.existsSync(final_path)) {
|
||||||
|
fs.mkdirSync(final_path, { recursive: true })
|
||||||
|
}
|
||||||
|
|
||||||
|
Log.info(`Cloning from [${step.url}]`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Cloning from [${step.url}]`,
|
||||||
|
})
|
||||||
|
|
||||||
|
const args = [
|
||||||
|
"clone",
|
||||||
|
//`--depth ${step.depth ?? 1}`,
|
||||||
|
//"--filter=blob:none",
|
||||||
|
//"--filter=tree:0",
|
||||||
|
"--progress",
|
||||||
|
"--recurse-submodules",
|
||||||
|
"--remote-submodules",
|
||||||
|
step.url,
|
||||||
|
final_path,
|
||||||
|
]
|
||||||
|
|
||||||
|
await execa(gitCMD, args, {
|
||||||
|
cwd: final_path,
|
||||||
|
stdout: "inherit",
|
||||||
|
stderr: "inherit",
|
||||||
|
})
|
||||||
|
|
||||||
|
return pkg
|
||||||
|
}
|
33
packages/core/src/generic_steps/git_pull.js
Executable file
33
packages/core/src/generic_steps/git_pull.js
Executable file
@ -0,0 +1,33 @@
|
|||||||
|
import Logger from "../logger"
|
||||||
|
|
||||||
|
import path from "node:path"
|
||||||
|
import fs from "node:fs"
|
||||||
|
import { execa } from "../libraries/execa"
|
||||||
|
|
||||||
|
import Vars from "../vars"
|
||||||
|
|
||||||
|
export default async (pkg, step) => {
|
||||||
|
if (!step.path) {
|
||||||
|
step.path = `.`
|
||||||
|
}
|
||||||
|
|
||||||
|
const Log = Logger.child({ service: `GIT|${pkg.id}` })
|
||||||
|
|
||||||
|
const gitCMD = fs.existsSync(Vars.git_bin) ? `${Vars.git_bin}` : "git"
|
||||||
|
const _path = path.resolve(pkg.install_path, step.path)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Pulling...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
Log.info(`Pulling from HEAD...`)
|
||||||
|
|
||||||
|
await execa(gitCMD, ["pull", "--rebase"], {
|
||||||
|
cwd: _path,
|
||||||
|
stdout: "inherit",
|
||||||
|
stderr: "inherit",
|
||||||
|
})
|
||||||
|
|
||||||
|
return pkg
|
||||||
|
}
|
83
packages/core/src/generic_steps/git_reset.js
Executable file
83
packages/core/src/generic_steps/git_reset.js
Executable file
@ -0,0 +1,83 @@
|
|||||||
|
import Logger from "../logger"
|
||||||
|
|
||||||
|
import path from "node:path"
|
||||||
|
import fs from "node:fs"
|
||||||
|
import { execa } from "../libraries/execa"
|
||||||
|
|
||||||
|
import git_pull from "./git_pull"
|
||||||
|
import Vars from "../vars"
|
||||||
|
|
||||||
|
export default async (pkg, step) => {
|
||||||
|
if (!step.path) {
|
||||||
|
step.path = `.`
|
||||||
|
}
|
||||||
|
|
||||||
|
const Log = Logger.child({ service: `GIT|${pkg.id}` })
|
||||||
|
|
||||||
|
const gitCMD = fs.existsSync(Vars.git_bin) ? `${Vars.git_bin}` : "git"
|
||||||
|
|
||||||
|
const _path = path.resolve(pkg.install_path, step.path)
|
||||||
|
const from = step.from ?? "HEAD"
|
||||||
|
|
||||||
|
if (!fs.existsSync(_path)) {
|
||||||
|
fs.mkdirSync(_path, { recursive: true })
|
||||||
|
}
|
||||||
|
|
||||||
|
Log.info(`Fetching from origin`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Fetching from origin...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
// fetch from origin
|
||||||
|
await execa(gitCMD, ["fetch", "origin"], {
|
||||||
|
cwd: _path,
|
||||||
|
stdout: "inherit",
|
||||||
|
stderr: "inherit",
|
||||||
|
})
|
||||||
|
|
||||||
|
Log.info(`Cleaning untracked files...`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Cleaning untracked files...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await execa(gitCMD, ["clean", "-df"], {
|
||||||
|
cwd: _path,
|
||||||
|
stdout: "inherit",
|
||||||
|
stderr: "inherit",
|
||||||
|
})
|
||||||
|
|
||||||
|
Log.info(`Resetting to ${from}`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Resetting to ${from}`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await execa(gitCMD, ["reset", "--hard", from], {
|
||||||
|
cwd: _path,
|
||||||
|
stdout: "inherit",
|
||||||
|
stderr: "inherit",
|
||||||
|
})
|
||||||
|
|
||||||
|
// pull the latest
|
||||||
|
await git_pull(pkg, step)
|
||||||
|
|
||||||
|
Log.info(`Checkout to HEAD`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Checkout to HEAD`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await execa(gitCMD, ["checkout", "HEAD"], {
|
||||||
|
cwd: _path,
|
||||||
|
stdout: "inherit",
|
||||||
|
stderr: "inherit",
|
||||||
|
})
|
||||||
|
|
||||||
|
return pkg
|
||||||
|
}
|
71
packages/core/src/generic_steps/http.js
Executable file
71
packages/core/src/generic_steps/http.js
Executable file
@ -0,0 +1,71 @@
|
|||||||
|
import path from "node:path"
|
||||||
|
import fs from "node:fs"
|
||||||
|
import os from "node:os"
|
||||||
|
|
||||||
|
import downloadHttpFile from "../helpers/downloadHttpFile"
|
||||||
|
import parseStringVars from "../utils/parseStringVars"
|
||||||
|
import extractFile from "../utils/extractFile"
|
||||||
|
|
||||||
|
export default async (pkg, step, logger, abortController) => {
|
||||||
|
if (!step.path) {
|
||||||
|
step.path = `./${path.basename(step.url)}`
|
||||||
|
}
|
||||||
|
|
||||||
|
step.path = await parseStringVars(step.path, pkg)
|
||||||
|
|
||||||
|
let _path = path.resolve(pkg.install_path, step.path)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Downloading [${step.url}]`,
|
||||||
|
})
|
||||||
|
|
||||||
|
logger.info(`Downloading [${step.url} to ${_path}]`)
|
||||||
|
|
||||||
|
if (step.tmp) {
|
||||||
|
_path = path.resolve(os.tmpdir(), String(new Date().getTime()), path.basename(step.url))
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.mkdirSync(path.resolve(_path, ".."), { recursive: true })
|
||||||
|
|
||||||
|
await downloadHttpFile(
|
||||||
|
step.url,
|
||||||
|
_path,
|
||||||
|
(progress) => {
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
use_id_only: true,
|
||||||
|
status_text: `Downloaded ${progress.transferredString} / ${progress.totalString} | ${progress.speedString}/s`,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
abortController
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(`Downloaded finished.`)
|
||||||
|
|
||||||
|
if (step.extract) {
|
||||||
|
if (typeof step.extract === "string") {
|
||||||
|
step.extract = path.resolve(pkg.install_path, step.extract)
|
||||||
|
} else {
|
||||||
|
step.extract = path.resolve(pkg.install_path, ".")
|
||||||
|
}
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Extracting bundle...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await extractFile(_path, step.extract)
|
||||||
|
|
||||||
|
if (step.deleteAfterExtract !== false) {
|
||||||
|
logger.info(`Deleting temporal file [${_path}]...`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Deleting temporal files...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await fs.promises.rm(_path, { recursive: true })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
55
packages/core/src/generic_steps/index.js
Executable file
55
packages/core/src/generic_steps/index.js
Executable file
@ -0,0 +1,55 @@
|
|||||||
|
import Logger from "../logger"
|
||||||
|
|
||||||
|
import ISM_GIT_CLONE from "./git_clone"
|
||||||
|
import ISM_GIT_PULL from "./git_pull"
|
||||||
|
import ISM_GIT_RESET from "./git_reset"
|
||||||
|
import ISM_HTTP from "./http"
|
||||||
|
import ISM_TORRENT from "./torrent"
|
||||||
|
|
||||||
|
const InstallationStepsMethods = {
|
||||||
|
git_clone: ISM_GIT_CLONE,
|
||||||
|
git_pull: ISM_GIT_PULL,
|
||||||
|
git_reset: ISM_GIT_RESET,
|
||||||
|
http_file: ISM_HTTP,
|
||||||
|
torrent: ISM_TORRENT,
|
||||||
|
}
|
||||||
|
|
||||||
|
const StepsOrders = [
|
||||||
|
"git_clones",
|
||||||
|
"git_pull",
|
||||||
|
"git_reset",
|
||||||
|
"torrent",
|
||||||
|
"http_file",
|
||||||
|
]
|
||||||
|
|
||||||
|
export default async function processGenericSteps(pkg, steps, logger = Logger, abortController) {
|
||||||
|
logger.info(`Processing generic steps...`)
|
||||||
|
|
||||||
|
if (!Array.isArray(steps)) {
|
||||||
|
throw new Error(`Steps must be an array`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (steps.length === 0) {
|
||||||
|
return pkg
|
||||||
|
}
|
||||||
|
|
||||||
|
steps = steps.sort((a, b) => {
|
||||||
|
return StepsOrders.indexOf(a.type) - StepsOrders.indexOf(b.type)
|
||||||
|
})
|
||||||
|
|
||||||
|
for await (let step of steps) {
|
||||||
|
step.type = step.type.toLowerCase()
|
||||||
|
|
||||||
|
if (abortController?.signal?.aborted) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!InstallationStepsMethods[step.type]) {
|
||||||
|
throw new Error(`Unknown step: ${step.type}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
await InstallationStepsMethods[step.type](pkg, step, logger, abortController)
|
||||||
|
}
|
||||||
|
|
||||||
|
return pkg
|
||||||
|
}
|
48
packages/core/src/generic_steps/torrent.js
Executable file
48
packages/core/src/generic_steps/torrent.js
Executable file
@ -0,0 +1,48 @@
|
|||||||
|
import path from "node:path"
|
||||||
|
import fs from "node:fs"
|
||||||
|
|
||||||
|
import parseStringVars from "../utils/parseStringVars"
|
||||||
|
import downloadTorrent from "../helpers/downloadTorrent"
|
||||||
|
|
||||||
|
export default async (pkg, step, logger, abortController) => {
|
||||||
|
if (!step.magnet) {
|
||||||
|
throw new Error(`Magnet is required for torrent step`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof step.path === "undefined") {
|
||||||
|
step.path = `.`
|
||||||
|
}
|
||||||
|
|
||||||
|
step.path = await parseStringVars(step.path, pkg)
|
||||||
|
|
||||||
|
let _path = path.resolve(pkg.install_path, step.path)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Preparing torrent...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
logger.info(`Preparing torrent with magnet => [${step.magnet}]`)
|
||||||
|
|
||||||
|
if (step.tmp) {
|
||||||
|
_path = path.resolve(os.tmpdir(), String(new Date().getTime()))
|
||||||
|
}
|
||||||
|
|
||||||
|
const parentDir = path.resolve(_path, "..")
|
||||||
|
|
||||||
|
if (!fs.existsSync(parentDir)) {
|
||||||
|
fs.mkdirSync(parentDir, { recursive: true })
|
||||||
|
}
|
||||||
|
|
||||||
|
await downloadTorrent(step.magnet, _path, {
|
||||||
|
onProgress: (progress) => {
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
use_id_only: true,
|
||||||
|
status_text: `Downloaded ${progress.transferredString} / ${progress.totalString} | ${progress.speedString}/s`,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
taskId: pkg.id
|
||||||
|
})
|
||||||
|
|
||||||
|
}
|
98
packages/core/src/handlers/apply.js
Executable file
98
packages/core/src/handlers/apply.js
Executable file
@ -0,0 +1,98 @@
|
|||||||
|
import Logger from "../logger"
|
||||||
|
|
||||||
|
import PatchManager from "../classes/PatchManager"
|
||||||
|
import ManifestReader from "../manifest/reader"
|
||||||
|
import ManifestVM from "../manifest/vm"
|
||||||
|
import DB from "../db"
|
||||||
|
|
||||||
|
const BaseLog = Logger.child({ service: "APPLIER" })
|
||||||
|
|
||||||
|
function findPatch(patches, applied_patches, changes, mustBeInstalled) {
|
||||||
|
return patches.filter((patch) => {
|
||||||
|
const patchID = patch.id
|
||||||
|
|
||||||
|
if (typeof changes.patches[patchID] === "undefined") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (mustBeInstalled === true && !applied_patches.includes(patch.id) && changes.patches[patchID] === true) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (mustBeInstalled === false && applied_patches.includes(patch.id) && changes.patches[patchID] === false) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}).map((patch) => patch.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async function apply(pkg_id, changes = {}) {
|
||||||
|
try {
|
||||||
|
let pkg = await DB.getPackages(pkg_id)
|
||||||
|
|
||||||
|
if (!pkg) {
|
||||||
|
BaseLog.error(`Package not found [${pkg_id}]`)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
let manifest = await ManifestReader(pkg.local_manifest)
|
||||||
|
manifest = await ManifestVM(manifest.code)
|
||||||
|
|
||||||
|
const Log = Logger.child({ service: `APPLIER|${pkg.id}` })
|
||||||
|
|
||||||
|
Log.info(`Applying changes to package...`)
|
||||||
|
Log.info(`Changes: ${JSON.stringify(changes)}`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Applying changes to package...`,
|
||||||
|
last_status: "loading",
|
||||||
|
})
|
||||||
|
|
||||||
|
if (changes.patches) {
|
||||||
|
if (!Array.isArray(pkg.applied_patches)) {
|
||||||
|
pkg.applied_patches = []
|
||||||
|
}
|
||||||
|
|
||||||
|
const patches = new PatchManager(pkg, manifest)
|
||||||
|
|
||||||
|
await patches.remove(findPatch(manifest.patches, pkg.applied_patches, changes, false))
|
||||||
|
await patches.patch(findPatch(manifest.patches, pkg.applied_patches, changes, true))
|
||||||
|
|
||||||
|
pkg = await DB.getPackages(pkg_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (changes.config) {
|
||||||
|
Log.info(`Applying config to package...`)
|
||||||
|
|
||||||
|
if (Object.keys(changes.config).length !== 0) {
|
||||||
|
Object.entries(changes.config).forEach(([key, value]) => {
|
||||||
|
pkg.config[key] = value
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await DB.writePackage(pkg)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: "All changes applied",
|
||||||
|
})
|
||||||
|
|
||||||
|
Log.info(`All changes applied to package.`)
|
||||||
|
|
||||||
|
return pkg
|
||||||
|
} catch (error) {
|
||||||
|
global._relic_eventBus.emit(`pkg:error`, {
|
||||||
|
event: "apply",
|
||||||
|
id: pkg_id,
|
||||||
|
error
|
||||||
|
})
|
||||||
|
|
||||||
|
BaseLog.error(`Failed to apply changes to package [${pkg_id}]`, error)
|
||||||
|
BaseLog.error(error.stack)
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
33
packages/core/src/handlers/authorize.js
Executable file
33
packages/core/src/handlers/authorize.js
Executable file
@ -0,0 +1,33 @@
|
|||||||
|
import ManifestAuthDB from "../classes/ManifestAuthDB"
|
||||||
|
import DB from "../db"
|
||||||
|
|
||||||
|
import Logger from "../logger"
|
||||||
|
|
||||||
|
const Log = Logger.child({ service: "AUTH" })
|
||||||
|
|
||||||
|
export default async (pkg_id, value) => {
|
||||||
|
if (!pkg_id) {
|
||||||
|
Log.error("pkg_id is required")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!value) {
|
||||||
|
Log.error("value is required")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
const pkg = await DB.getPackages(pkg_id)
|
||||||
|
|
||||||
|
if (!pkg) {
|
||||||
|
Log.error("Package not found")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
Log.info(`Setting auth for [${pkg_id}]`)
|
||||||
|
|
||||||
|
await ManifestAuthDB.set(pkg_id, value)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit("pkg:authorized", pkg)
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
44
packages/core/src/handlers/cancelInstall.js
Executable file
44
packages/core/src/handlers/cancelInstall.js
Executable file
@ -0,0 +1,44 @@
|
|||||||
|
import Logger from "../logger"
|
||||||
|
|
||||||
|
import DB from "../db"
|
||||||
|
|
||||||
|
import UninstallHandler from "./uninstall"
|
||||||
|
|
||||||
|
const BaseLog = Logger.child({ service: "CANCEL_INSTALL" })
|
||||||
|
|
||||||
|
export default async function reinstall(pkg_id) {
|
||||||
|
try {
|
||||||
|
const pkg = await DB.getPackages(pkg_id)
|
||||||
|
|
||||||
|
if (!pkg) {
|
||||||
|
BaseLog.info(`Package not found [${pkg_id}]`)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:install:cancel`, pkg_id)
|
||||||
|
global._relic_eventBus.emit(`pkg:install:cancel:${pkg_id}`, pkg_id)
|
||||||
|
global._relic_eventBus.emit(`task:cancel:${pkg_id}`, pkg_id)
|
||||||
|
|
||||||
|
const task = globalThis.relic_core.tasks.find((task) => task.id === pkg_id)
|
||||||
|
|
||||||
|
if (task) {
|
||||||
|
BaseLog.warn(`Task not found [${pkg_id}]`)
|
||||||
|
await task.abortController.abort()
|
||||||
|
}
|
||||||
|
|
||||||
|
await UninstallHandler(pkg_id)
|
||||||
|
|
||||||
|
return pkg
|
||||||
|
} catch (error) {
|
||||||
|
global._relic_eventBus.emit(`pkg:error`, {
|
||||||
|
event: "cancel_install",
|
||||||
|
id: pkg_id,
|
||||||
|
error
|
||||||
|
})
|
||||||
|
|
||||||
|
BaseLog.error(`Failed to cancel installation package [${pkg_id}]`, error)
|
||||||
|
BaseLog.error(error.stack)
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
43
packages/core/src/handlers/checkUpdate.js
Executable file
43
packages/core/src/handlers/checkUpdate.js
Executable file
@ -0,0 +1,43 @@
|
|||||||
|
import Logger from "../logger"
|
||||||
|
import DB from "../db"
|
||||||
|
|
||||||
|
import softRead from "./read"
|
||||||
|
|
||||||
|
const Log = Logger.child({ service: "CHECK_UPDATE" })
|
||||||
|
|
||||||
|
export default async function checkUpdate(pkg_id) {
|
||||||
|
const pkg = await DB.getPackages(pkg_id)
|
||||||
|
|
||||||
|
if (!pkg) {
|
||||||
|
Log.error("Package not found")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
Log.info(`Checking update for [${pkg_id}]`)
|
||||||
|
|
||||||
|
const remoteSoftManifest = await softRead(pkg.remote_manifest, {
|
||||||
|
soft: true
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!remoteSoftManifest) {
|
||||||
|
Log.error("Cannot read remote manifest")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pkg.version === remoteSoftManifest.version) {
|
||||||
|
Log.info("No update available")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
Log.info("Update available")
|
||||||
|
Log.info("Local:", pkg.version)
|
||||||
|
Log.info("Remote:", remoteSoftManifest.version)
|
||||||
|
Log.info("Changelog:", remoteSoftManifest.changelog_url)
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: pkg.id,
|
||||||
|
local: pkg.version,
|
||||||
|
remote: remoteSoftManifest.version,
|
||||||
|
changelog: remoteSoftManifest.changelog_url,
|
||||||
|
}
|
||||||
|
}
|
28
packages/core/src/handlers/deauthorize.js
Executable file
28
packages/core/src/handlers/deauthorize.js
Executable file
@ -0,0 +1,28 @@
|
|||||||
|
import ManifestAuthDB from "../classes/ManifestAuthDB"
|
||||||
|
import DB from "../db"
|
||||||
|
|
||||||
|
import Logger from "../logger"
|
||||||
|
|
||||||
|
const Log = Logger.child({ service: "AUTH" })
|
||||||
|
|
||||||
|
export default async (pkg_id) => {
|
||||||
|
if (!pkg_id) {
|
||||||
|
Log.error("pkg_id is required")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
const pkg = await DB.getPackages(pkg_id)
|
||||||
|
|
||||||
|
if (!pkg) {
|
||||||
|
Log.error("Package not found")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
Log.info(`Deleting auth for [${pkg_id}]`)
|
||||||
|
|
||||||
|
await ManifestAuthDB.delete(pkg_id)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit("pkg:deauthorized", pkg)
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
96
packages/core/src/handlers/execute.js
Executable file
96
packages/core/src/handlers/execute.js
Executable file
@ -0,0 +1,96 @@
|
|||||||
|
import Logger from "../logger"
|
||||||
|
|
||||||
|
import fs from "node:fs"
|
||||||
|
|
||||||
|
import DB from "../db"
|
||||||
|
import ManifestReader from "../manifest/reader"
|
||||||
|
import ManifestVM from "../manifest/vm"
|
||||||
|
import parseStringVars from "../utils/parseStringVars"
|
||||||
|
import { execa } from "../libraries/execa"
|
||||||
|
|
||||||
|
const BaseLog = Logger.child({ service: "EXECUTER" })
|
||||||
|
|
||||||
|
export default async function execute(pkg_id, { useRemote = false, force = false } = {}) {
|
||||||
|
try {
|
||||||
|
const pkg = await DB.getPackages(pkg_id)
|
||||||
|
|
||||||
|
if (!pkg) {
|
||||||
|
BaseLog.info(`Package not found [${pkg_id}]`)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pkg.last_status !== "installed") {
|
||||||
|
if (!force) {
|
||||||
|
BaseLog.info(`Package not installed [${pkg_id}], aborting execution`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:error`, {
|
||||||
|
id: pkg_id,
|
||||||
|
event: "execute",
|
||||||
|
error: new Error("Package not valid or not installed"),
|
||||||
|
})
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const manifestPath = useRemote ? pkg.remote_manifest : pkg.local_manifest
|
||||||
|
|
||||||
|
if (!fs.existsSync(manifestPath)) {
|
||||||
|
BaseLog.error(`Manifest not found in expected path [${manifestPath}]
|
||||||
|
\nMaybe the package installation has not been completed yet or corrupted.
|
||||||
|
`)
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
BaseLog.info(`Executing manifest > [${manifestPath}]`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
last_status: "loading",
|
||||||
|
status_text: null,
|
||||||
|
})
|
||||||
|
|
||||||
|
const ManifestRead = await ManifestReader(manifestPath)
|
||||||
|
|
||||||
|
const manifest = await ManifestVM(ManifestRead.code)
|
||||||
|
|
||||||
|
if (typeof manifest.execute === "function") {
|
||||||
|
await manifest.execute(pkg)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof manifest.execute === "string") {
|
||||||
|
manifest.execute = parseStringVars(manifest.execute, pkg)
|
||||||
|
|
||||||
|
BaseLog.info(`Executing binary > [${manifest.execute}]`)
|
||||||
|
|
||||||
|
const args = Array.isArray(manifest.execute_args) ? manifest.execute_args : []
|
||||||
|
|
||||||
|
await execa(manifest.execute, args, {
|
||||||
|
cwd: pkg.install_path,
|
||||||
|
stdout: "inherit",
|
||||||
|
stderr: "inherit",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
last_status: "installed",
|
||||||
|
status_text: null,
|
||||||
|
})
|
||||||
|
|
||||||
|
return pkg
|
||||||
|
} catch (error) {
|
||||||
|
global._relic_eventBus.emit(`pkg:error`, {
|
||||||
|
id: pkg_id,
|
||||||
|
event: "execute",
|
||||||
|
last_status: "installed",
|
||||||
|
error,
|
||||||
|
})
|
||||||
|
|
||||||
|
BaseLog.error(`Failed to execute package [${pkg_id}]`, error)
|
||||||
|
BaseLog.error(error.stack)
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
234
packages/core/src/handlers/install.js
Executable file
234
packages/core/src/handlers/install.js
Executable file
@ -0,0 +1,234 @@
|
|||||||
|
import Logger from "../logger"
|
||||||
|
|
||||||
|
import fs from "node:fs"
|
||||||
|
|
||||||
|
import DB from "../db"
|
||||||
|
import ManifestReader from "../manifest/reader"
|
||||||
|
import ManifestVM from "../manifest/vm"
|
||||||
|
import GenericSteps from "../generic_steps"
|
||||||
|
import Apply from "../handlers/apply"
|
||||||
|
|
||||||
|
const BaseLog = Logger.child({ service: "INSTALLER" })
|
||||||
|
|
||||||
|
export default async function install(manifest, options = {}) {
|
||||||
|
let id = null
|
||||||
|
let abortController = new AbortController()
|
||||||
|
|
||||||
|
try {
|
||||||
|
BaseLog.info(`Invoking new installation...`)
|
||||||
|
BaseLog.info(`Fetching manifest [${manifest}]`)
|
||||||
|
|
||||||
|
const ManifestRead = await ManifestReader(manifest)
|
||||||
|
|
||||||
|
manifest = await ManifestVM(ManifestRead.code)
|
||||||
|
|
||||||
|
id = manifest.constructor.id
|
||||||
|
|
||||||
|
globalThis.relic_core.tasks.push({
|
||||||
|
type: "install",
|
||||||
|
id: id,
|
||||||
|
abortController: abortController,
|
||||||
|
})
|
||||||
|
|
||||||
|
const Log = BaseLog.child({ service: `INSTALLER|${id}` })
|
||||||
|
|
||||||
|
Log.info(`Creating install path [${manifest.install_path}]`)
|
||||||
|
|
||||||
|
if (abortController.signal.aborted) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fs.existsSync(manifest.install_path)) {
|
||||||
|
Log.info(`Package already exists, removing...`)
|
||||||
|
await fs.rmSync(manifest.install_path, { recursive: true })
|
||||||
|
}
|
||||||
|
|
||||||
|
await fs.mkdirSync(manifest.install_path, { recursive: true })
|
||||||
|
|
||||||
|
Log.info(`Initializing manifest...`)
|
||||||
|
|
||||||
|
if (abortController.signal.aborted) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof manifest.initialize === "function") {
|
||||||
|
await manifest.initialize()
|
||||||
|
}
|
||||||
|
|
||||||
|
Log.info(`Appending to db...`)
|
||||||
|
|
||||||
|
if (abortController.signal.aborted) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
let pkg = DB.defaultPackageState({
|
||||||
|
...manifest.constructor,
|
||||||
|
id: id,
|
||||||
|
name: manifest.constructor.pkg_name,
|
||||||
|
version: manifest.constructor.version,
|
||||||
|
install_path: manifest.install_path,
|
||||||
|
description: manifest.constructor.description,
|
||||||
|
license: manifest.constructor.license,
|
||||||
|
last_status: "installing",
|
||||||
|
remote_manifest: ManifestRead.remote_manifest,
|
||||||
|
local_manifest: ManifestRead.local_manifest,
|
||||||
|
executable: !!manifest.execute
|
||||||
|
})
|
||||||
|
|
||||||
|
await DB.writePackage(pkg)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit("pkg:new", pkg)
|
||||||
|
|
||||||
|
if (abortController.signal.aborted) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (manifest.configuration) {
|
||||||
|
Log.info(`Applying default config to package...`)
|
||||||
|
|
||||||
|
pkg.config = Object.entries(manifest.configuration).reduce((acc, [key, value]) => {
|
||||||
|
acc[key] = value.default
|
||||||
|
|
||||||
|
return acc
|
||||||
|
}, {})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (abortController.signal.aborted) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof manifest.beforeInstall === "function") {
|
||||||
|
Log.info(`Executing beforeInstall hook...`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Performing beforeInstall hook...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await manifest.beforeInstall(pkg)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (abortController.signal.aborted) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(manifest.installSteps) && !options.noInstallSteps) {
|
||||||
|
Log.info(`Executing generic install steps...`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Performing generic install steps...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await GenericSteps(pkg, manifest.installSteps, Log, abortController)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (abortController.signal.aborted) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof manifest.afterInstall === "function") {
|
||||||
|
Log.info(`Executing afterInstall hook...`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Performing afterInstall hook...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await manifest.afterInstall(pkg)
|
||||||
|
}
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Finishing up...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
Log.info(`Copying manifest to the final location...`)
|
||||||
|
|
||||||
|
const finalPath = `${manifest.install_path}/.rmanifest`
|
||||||
|
|
||||||
|
if (abortController.signal.aborted) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fs.existsSync(finalPath)) {
|
||||||
|
await fs.promises.unlink(finalPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
await fs.promises.copyFile(ManifestRead.local_manifest, finalPath)
|
||||||
|
|
||||||
|
if (ManifestRead.is_catched) {
|
||||||
|
Log.info(`Removing cache manifest...`)
|
||||||
|
await fs.promises.unlink(ManifestRead.local_manifest)
|
||||||
|
}
|
||||||
|
|
||||||
|
pkg.local_manifest = finalPath
|
||||||
|
pkg.last_status = "loading"
|
||||||
|
pkg.installed_at = Date.now()
|
||||||
|
|
||||||
|
await DB.writePackage(pkg)
|
||||||
|
|
||||||
|
if (abortController.signal.aborted) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (manifest.patches) {
|
||||||
|
const defaultPatches = manifest.patches.filter((patch) => patch.default)
|
||||||
|
|
||||||
|
if (defaultPatches.length > 0) {
|
||||||
|
Log.info(`Applying default patches...`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Applying default patches...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
pkg = await Apply(id, {
|
||||||
|
patches: Object.fromEntries(defaultPatches.map((patch) => [patch.id, true])),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pkg.last_status = "installed"
|
||||||
|
|
||||||
|
if (abortController.signal.aborted) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
await DB.writePackage(pkg)
|
||||||
|
|
||||||
|
globalThis.relic_core.tasks.filter((task) => task.id !== id)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
...pkg,
|
||||||
|
id: pkg.id,
|
||||||
|
last_status: "installed",
|
||||||
|
status_text: `Installation completed successfully`,
|
||||||
|
})
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:new:done`, pkg)
|
||||||
|
|
||||||
|
Log.info(`Package installed successfully!`)
|
||||||
|
|
||||||
|
return pkg
|
||||||
|
} catch (error) {
|
||||||
|
global._relic_eventBus.emit(`pkg:error`, {
|
||||||
|
id: id ?? manifest.constructor.id,
|
||||||
|
event: "install",
|
||||||
|
error,
|
||||||
|
})
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: id ?? manifest.constructor.id,
|
||||||
|
last_status: "failed",
|
||||||
|
status_text: `Installation failed`,
|
||||||
|
})
|
||||||
|
|
||||||
|
globalThis.relic_core.tasks.filter((task) => task.id !== id)
|
||||||
|
|
||||||
|
BaseLog.error(`Error during installation of package [${id}] >`, error)
|
||||||
|
BaseLog.error(error.stack)
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
80
packages/core/src/handlers/lastOperationRetry.js
Executable file
80
packages/core/src/handlers/lastOperationRetry.js
Executable file
@ -0,0 +1,80 @@
|
|||||||
|
import fs from "node:fs"
|
||||||
|
import path from "node:path"
|
||||||
|
|
||||||
|
import Logger from "../logger"
|
||||||
|
import DB from "../db"
|
||||||
|
|
||||||
|
import PackageInstall from "./install"
|
||||||
|
import PackageUpdate from "./update"
|
||||||
|
import PackageUninstall from "./uninstall"
|
||||||
|
|
||||||
|
import Vars from "../vars"
|
||||||
|
|
||||||
|
export default async function lastOperationRetry(pkg_id) {
|
||||||
|
try {
|
||||||
|
const Log = Logger.child({ service: `OPERATION_RETRY|${pkg_id}` })
|
||||||
|
const pkg = await DB.getPackages(pkg_id)
|
||||||
|
|
||||||
|
if (!pkg) {
|
||||||
|
Log.error(`This package doesn't exist`)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
Log.info(`Try performing last operation retry...`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Performing last operation retry...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
switch (pkg.last_status) {
|
||||||
|
case "installing":
|
||||||
|
await PackageInstall(pkg.local_manifest)
|
||||||
|
break
|
||||||
|
case "updating":
|
||||||
|
await PackageUpdate(pkg_id)
|
||||||
|
break
|
||||||
|
case "uninstalling":
|
||||||
|
await PackageUninstall(pkg_id)
|
||||||
|
break
|
||||||
|
case "failed": {
|
||||||
|
// copy pkg.local_manifest to cache after uninstall
|
||||||
|
const cachedManifest = path.join(Vars.cache_path, `${Date.now()}${path.basename(pkg.local_manifest)}`)
|
||||||
|
|
||||||
|
if (!fs.existsSync(Vars.cache_path)) {
|
||||||
|
await fs.promises.mkdir(Vars.cache_path, { recursive: true })
|
||||||
|
}
|
||||||
|
|
||||||
|
await fs.promises.copyFile(pkg.local_manifest, cachedManifest)
|
||||||
|
|
||||||
|
await PackageUninstall(pkg_id)
|
||||||
|
await PackageInstall(cachedManifest)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
default: {
|
||||||
|
Log.error(`Invalid last status: ${pkg.last_status}`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:error`, {
|
||||||
|
id: pkg.id,
|
||||||
|
event: "retrying last operation",
|
||||||
|
status_text: `Performing last operation retry...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return pkg
|
||||||
|
} catch (error) {
|
||||||
|
Logger.error(`Failed to perform last operation retry of [${pkg_id}]`)
|
||||||
|
Logger.error(error)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:error`, {
|
||||||
|
event: "retrying last operation",
|
||||||
|
id: pkg_id,
|
||||||
|
error: error,
|
||||||
|
})
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
5
packages/core/src/handlers/list.js
Executable file
5
packages/core/src/handlers/list.js
Executable file
@ -0,0 +1,5 @@
|
|||||||
|
import DB from "../db"
|
||||||
|
|
||||||
|
export default async function list() {
|
||||||
|
return await DB.getPackages()
|
||||||
|
}
|
9
packages/core/src/handlers/read.js
Executable file
9
packages/core/src/handlers/read.js
Executable file
@ -0,0 +1,9 @@
|
|||||||
|
import ManifestReader from "../manifest/reader"
|
||||||
|
import ManifestVM from "../manifest/vm"
|
||||||
|
|
||||||
|
export default async function softRead(manifest, options = {}) {
|
||||||
|
const Reader = await ManifestReader(manifest)
|
||||||
|
const VM = await ManifestVM(Reader.code, options)
|
||||||
|
|
||||||
|
return VM
|
||||||
|
}
|
35
packages/core/src/handlers/reinstall.js
Executable file
35
packages/core/src/handlers/reinstall.js
Executable file
@ -0,0 +1,35 @@
|
|||||||
|
import Logger from "../logger"
|
||||||
|
|
||||||
|
import DB from "../db"
|
||||||
|
|
||||||
|
import UninstallHandler from "./uninstall"
|
||||||
|
import InstallHandler from "./install"
|
||||||
|
|
||||||
|
const BaseLog = Logger.child({ service: "REINSTALL" })
|
||||||
|
|
||||||
|
export default async function reinstall(pkg_id) {
|
||||||
|
try {
|
||||||
|
const pkg = await DB.getPackages(pkg_id)
|
||||||
|
|
||||||
|
if (!pkg) {
|
||||||
|
BaseLog.info(`Package not found [${pkg_id}]`)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
await UninstallHandler(pkg_id)
|
||||||
|
await InstallHandler(pkg.remote_manifest)
|
||||||
|
|
||||||
|
return pkg
|
||||||
|
} catch (error) {
|
||||||
|
global._relic_eventBus.emit(`pkg:error`, {
|
||||||
|
event: "reinstall",
|
||||||
|
id: pkg_id,
|
||||||
|
error
|
||||||
|
})
|
||||||
|
|
||||||
|
BaseLog.error(`Failed to reinstall package [${pkg_id}]`, error)
|
||||||
|
BaseLog.error(error.stack)
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
87
packages/core/src/handlers/uninstall.js
Executable file
87
packages/core/src/handlers/uninstall.js
Executable file
@ -0,0 +1,87 @@
|
|||||||
|
import Logger from "../logger"
|
||||||
|
|
||||||
|
import DB from "../db"
|
||||||
|
import ManifestReader from "../manifest/reader"
|
||||||
|
import ManifestVM from "../manifest/vm"
|
||||||
|
|
||||||
|
import { rimraf } from "rimraf"
|
||||||
|
|
||||||
|
const BaseLog = Logger.child({ service: "UNINSTALLER" })
|
||||||
|
|
||||||
|
export default async function uninstall(pkg_id) {
|
||||||
|
try {
|
||||||
|
const pkg = await DB.getPackages(pkg_id)
|
||||||
|
|
||||||
|
if (!pkg) {
|
||||||
|
BaseLog.info(`Package not found [${pkg_id}]`)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const Log = Logger.child({ service: `UNINSTALLER|${pkg.id}` })
|
||||||
|
|
||||||
|
Log.info(`Uninstalling package...`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Uninstalling package...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
try {
|
||||||
|
const ManifestRead = await ManifestReader(pkg.local_manifest)
|
||||||
|
const manifest = await ManifestVM(ManifestRead.code)
|
||||||
|
|
||||||
|
if (typeof manifest.uninstall === "function") {
|
||||||
|
Log.info(`Performing uninstall hook...`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Performing uninstall hook...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await manifest.uninstall(pkg)
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
Log.error(`Failed to perform uninstall hook`, error)
|
||||||
|
global._relic_eventBus.emit(`pkg:error`, {
|
||||||
|
event: "uninstall",
|
||||||
|
id: pkg.id,
|
||||||
|
error
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
Log.info(`Deleting package directory...`)
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Deleting package directory...`,
|
||||||
|
})
|
||||||
|
await rimraf(pkg.install_path)
|
||||||
|
|
||||||
|
Log.info(`Removing package from database...`)
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Removing package from database...`,
|
||||||
|
})
|
||||||
|
await DB.deletePackage(pkg.id)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
last_status: "deleted",
|
||||||
|
status_text: `Uninstalling package...`,
|
||||||
|
})
|
||||||
|
global._relic_eventBus.emit(`pkg:remove`, pkg)
|
||||||
|
Log.info(`Package uninstalled successfully!`)
|
||||||
|
|
||||||
|
return pkg
|
||||||
|
} catch (error) {
|
||||||
|
global._relic_eventBus.emit(`pkg:error`, {
|
||||||
|
event: "uninstall",
|
||||||
|
id: pkg_id,
|
||||||
|
error
|
||||||
|
})
|
||||||
|
|
||||||
|
BaseLog.error(`Failed to uninstall package [${pkg_id}]`, error)
|
||||||
|
BaseLog.error(error.stack)
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
139
packages/core/src/handlers/update.js
Executable file
139
packages/core/src/handlers/update.js
Executable file
@ -0,0 +1,139 @@
|
|||||||
|
import Logger from "../logger"
|
||||||
|
|
||||||
|
import DB from "../db"
|
||||||
|
|
||||||
|
import ManifestReader from "../manifest/reader"
|
||||||
|
import ManifestVM from "../manifest/vm"
|
||||||
|
|
||||||
|
import GenericSteps from "../generic_steps"
|
||||||
|
import PatchManager from "../classes/PatchManager"
|
||||||
|
|
||||||
|
const BaseLog = Logger.child({ service: "UPDATER" })
|
||||||
|
|
||||||
|
const AllowedPkgChanges = [
|
||||||
|
"id",
|
||||||
|
"name",
|
||||||
|
"version",
|
||||||
|
"description",
|
||||||
|
"author",
|
||||||
|
"license",
|
||||||
|
"icon",
|
||||||
|
"core_minimum_version",
|
||||||
|
"remote_manifest",
|
||||||
|
]
|
||||||
|
|
||||||
|
const ManifestKeysMap = {
|
||||||
|
"name": "pkg_name",
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async function update(pkg_id) {
|
||||||
|
try {
|
||||||
|
const pkg = await DB.getPackages(pkg_id)
|
||||||
|
|
||||||
|
if (!pkg) {
|
||||||
|
BaseLog.error(`Package not found [${pkg_id}]`)
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const Log = BaseLog.child({ service: `UPDATER|${pkg.id}` })
|
||||||
|
|
||||||
|
let ManifestRead = await ManifestReader(pkg.local_manifest)
|
||||||
|
let manifest = await ManifestVM(ManifestRead.code)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
last_status: "updating",
|
||||||
|
status_text: `Updating package...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
pkg.last_status = "updating"
|
||||||
|
|
||||||
|
await DB.writePackage(pkg)
|
||||||
|
|
||||||
|
if (typeof manifest.update === "function") {
|
||||||
|
Log.info(`Performing update hook...`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Performing update hook...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await manifest.update(pkg)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (manifest.updateSteps) {
|
||||||
|
Log.info(`Performing update steps...`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Performing update steps...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await GenericSteps(pkg, manifest.updateSteps, Log)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(pkg.applied_patches)) {
|
||||||
|
const patchManager = new PatchManager(pkg, manifest)
|
||||||
|
|
||||||
|
await patchManager.reapply()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof manifest.afterUpdate === "function") {
|
||||||
|
Log.info(`Performing after update hook...`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
id: pkg.id,
|
||||||
|
status_text: `Performing after update hook...`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await manifest.afterUpdate(pkg)
|
||||||
|
}
|
||||||
|
|
||||||
|
ManifestRead = await ManifestReader(pkg.local_manifest)
|
||||||
|
manifest = await ManifestVM(ManifestRead.code)
|
||||||
|
|
||||||
|
// override public static values
|
||||||
|
for await (const key of AllowedPkgChanges) {
|
||||||
|
if (key in manifest.constructor) {
|
||||||
|
const mapKey = ManifestKeysMap[key] || key
|
||||||
|
pkg[key] = manifest.constructor[mapKey]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pkg.last_status = "installed"
|
||||||
|
pkg.last_update = Date.now()
|
||||||
|
|
||||||
|
await DB.writePackage(pkg)
|
||||||
|
|
||||||
|
Log.info(`Package updated successfully`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit(`pkg:update:state`, {
|
||||||
|
...pkg,
|
||||||
|
id: pkg.id,
|
||||||
|
})
|
||||||
|
|
||||||
|
return pkg
|
||||||
|
} catch (error) {
|
||||||
|
global._relic_eventBus.emit(`pkg:error`, {
|
||||||
|
event: "update",
|
||||||
|
id: pkg_id,
|
||||||
|
error,
|
||||||
|
last_status: "failed"
|
||||||
|
})
|
||||||
|
|
||||||
|
try {
|
||||||
|
await DB.updatePackageById(pkg_id, {
|
||||||
|
last_status: "failed",
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
BaseLog.error(`Failed to update status of pkg [${pkg_id}]`)
|
||||||
|
BaseLog.error(error.stack)
|
||||||
|
}
|
||||||
|
|
||||||
|
BaseLog.error(`Failed to update package [${pkg_id}]`, error)
|
||||||
|
BaseLog.error(error.stack)
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
74
packages/core/src/helpers/downloadHttpFile.js
Executable file
74
packages/core/src/helpers/downloadHttpFile.js
Executable file
@ -0,0 +1,74 @@
|
|||||||
|
import fs from "node:fs"
|
||||||
|
import axios from "axios"
|
||||||
|
import humanFormat from "human-format"
|
||||||
|
import cliProgress from "cli-progress"
|
||||||
|
|
||||||
|
function convertSize(size) {
|
||||||
|
return `${humanFormat(size, {
|
||||||
|
decimals: 2,
|
||||||
|
})}B`
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async (url, destination, progressCallback, abortController) => {
|
||||||
|
const progressBar = new cliProgress.SingleBar({
|
||||||
|
format: "[{bar}] {percentage}% | {total_formatted} | {speed}/s | {eta_formatted}",
|
||||||
|
barCompleteChar: "\u2588",
|
||||||
|
barIncompleteChar: "\u2591",
|
||||||
|
hideCursor: true
|
||||||
|
}, cliProgress.Presets.shades_classic)
|
||||||
|
|
||||||
|
const { data: remoteStream, headers } = await axios.get(url, {
|
||||||
|
responseType: "stream",
|
||||||
|
signal: abortController?.signal,
|
||||||
|
})
|
||||||
|
|
||||||
|
const localStream = fs.createWriteStream(destination)
|
||||||
|
|
||||||
|
let progress = {
|
||||||
|
total: Number(headers["content-length"] ?? 0),
|
||||||
|
transferred: 0,
|
||||||
|
speed: 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
let lastTickTransferred = 0
|
||||||
|
|
||||||
|
progressBar.start(progress.total, 0, {
|
||||||
|
speed: "0B/s",
|
||||||
|
total_formatted: convertSize(progress.total),
|
||||||
|
})
|
||||||
|
|
||||||
|
remoteStream.pipe(localStream)
|
||||||
|
|
||||||
|
remoteStream.on("data", (data) => {
|
||||||
|
progress.transferred = progress.transferred + Buffer.byteLength(data)
|
||||||
|
})
|
||||||
|
|
||||||
|
const progressInterval = setInterval(() => {
|
||||||
|
progress.speed = ((progress.transferred ?? 0) - lastTickTransferred) / 1
|
||||||
|
|
||||||
|
lastTickTransferred = progress.transferred ?? 0
|
||||||
|
|
||||||
|
progress.transferredString = convertSize(progress.transferred ?? 0)
|
||||||
|
progress.totalString = convertSize(progress.total)
|
||||||
|
progress.speedString = convertSize(progress.speed)
|
||||||
|
|
||||||
|
progressBar.update(progress.transferred, {
|
||||||
|
speed: progress.speedString,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (typeof progressCallback === "function") {
|
||||||
|
progressCallback(progress)
|
||||||
|
}
|
||||||
|
}, 1000)
|
||||||
|
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
localStream.on("finish", resolve)
|
||||||
|
localStream.on("error", reject)
|
||||||
|
})
|
||||||
|
|
||||||
|
progressBar.stop()
|
||||||
|
|
||||||
|
clearInterval(progressInterval)
|
||||||
|
|
||||||
|
return destination
|
||||||
|
}
|
140
packages/core/src/helpers/downloadTorrent.js
Executable file
140
packages/core/src/helpers/downloadTorrent.js
Executable file
@ -0,0 +1,140 @@
|
|||||||
|
import humanFormat from "human-format"
|
||||||
|
import aria2 from "aria2"
|
||||||
|
|
||||||
|
function convertSize(size) {
|
||||||
|
return `${humanFormat(size, {
|
||||||
|
decimals: 2,
|
||||||
|
})}B`
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async function downloadTorrent(
|
||||||
|
magnet,
|
||||||
|
destination,
|
||||||
|
{
|
||||||
|
onStart,
|
||||||
|
onProgress,
|
||||||
|
onDone,
|
||||||
|
onError,
|
||||||
|
taskId,
|
||||||
|
} = {}
|
||||||
|
) {
|
||||||
|
let progressInterval = null
|
||||||
|
let tickProgress = {
|
||||||
|
total: 0,
|
||||||
|
transferred: 0,
|
||||||
|
speed: 0,
|
||||||
|
|
||||||
|
totalString: "0B",
|
||||||
|
transferredString: "0B",
|
||||||
|
speedString: "0B/s",
|
||||||
|
}
|
||||||
|
|
||||||
|
const client = new aria2({
|
||||||
|
host: "localhost",
|
||||||
|
port: 6800,
|
||||||
|
secure: false,
|
||||||
|
secret: "",
|
||||||
|
path: "/jsonrpc"
|
||||||
|
})
|
||||||
|
|
||||||
|
await client.open()
|
||||||
|
|
||||||
|
let downloadId = await client.call(
|
||||||
|
"addUri",
|
||||||
|
[magnet],
|
||||||
|
{
|
||||||
|
dir: destination,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
async function stopDownload() {
|
||||||
|
await client.call("remove", downloadId)
|
||||||
|
clearInterval(progressInterval)
|
||||||
|
}
|
||||||
|
|
||||||
|
await new Promise(async (resolve, reject) => {
|
||||||
|
if (typeof onStart === "function") {
|
||||||
|
onStart()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (taskId) {
|
||||||
|
global._relic_eventBus.once(`task:cancel:${taskId}`, stopDownload)
|
||||||
|
}
|
||||||
|
|
||||||
|
progressInterval = setInterval(async () => {
|
||||||
|
const data = await client.call("tellStatus", downloadId)
|
||||||
|
|
||||||
|
console.log(data)
|
||||||
|
|
||||||
|
if (data.status === "complete") {
|
||||||
|
if (Array.isArray(data.followedBy) && data.followedBy[0]) {
|
||||||
|
// replace downloadId
|
||||||
|
downloadId = data.followedBy[0]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tickProgress.total = parseInt(data.totalLength)
|
||||||
|
tickProgress.speed = parseInt(data.downloadSpeed)
|
||||||
|
tickProgress.transferred = parseInt(data.completedLength)
|
||||||
|
tickProgress.connections = data.connections
|
||||||
|
|
||||||
|
tickProgress.transferredString = convertSize(tickProgress.transferred)
|
||||||
|
tickProgress.totalString = convertSize(tickProgress.total)
|
||||||
|
tickProgress.speedString = convertSize(tickProgress.speed)
|
||||||
|
|
||||||
|
if (typeof onProgress === "function") {
|
||||||
|
onProgress(tickProgress)
|
||||||
|
}
|
||||||
|
}, 1000)
|
||||||
|
|
||||||
|
client.on("onDownloadStart", async ([{ gid }]) => {
|
||||||
|
const data = await client.call("tellStatus", gid)
|
||||||
|
|
||||||
|
console.log(data)
|
||||||
|
|
||||||
|
if (typeof data.following !== "undefined") {
|
||||||
|
if (data.following === downloadId) {
|
||||||
|
downloadId = data.gid
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
client.on("onBtDownloadComplete", ([{ gid }]) => {
|
||||||
|
if (gid !== downloadId) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof onDone === "function") {
|
||||||
|
onDone()
|
||||||
|
}
|
||||||
|
|
||||||
|
stopDownload()
|
||||||
|
|
||||||
|
return resolve({
|
||||||
|
downloadId,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
client.on("onDownloadError", ([{ gid }]) => {
|
||||||
|
if (gid !== downloadId) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
stopDownload()
|
||||||
|
|
||||||
|
if (typeof onError === "function") {
|
||||||
|
onError()
|
||||||
|
}
|
||||||
|
|
||||||
|
return reject()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
await client.call("remove", downloadId)
|
||||||
|
|
||||||
|
if (taskId) {
|
||||||
|
global._relic_eventBus.off(`task:cancel:${taskId}`, stopDownload)
|
||||||
|
}
|
||||||
|
|
||||||
|
return downloadId
|
||||||
|
}
|
43
packages/core/src/helpers/sendToRender.js
Executable file
43
packages/core/src/helpers/sendToRender.js
Executable file
@ -0,0 +1,43 @@
|
|||||||
|
import lodash from "lodash"
|
||||||
|
|
||||||
|
const forbidden = [
|
||||||
|
"libraries"
|
||||||
|
]
|
||||||
|
|
||||||
|
export default (event, data) => {
|
||||||
|
if (!global.win) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
function serializeIpc(data) {
|
||||||
|
if (!data) {
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
data = JSON.stringify(data)
|
||||||
|
|
||||||
|
data = JSON.parse(data)
|
||||||
|
|
||||||
|
const copy = lodash.cloneDeep(data)
|
||||||
|
|
||||||
|
if (!Array.isArray(copy)) {
|
||||||
|
Object.keys(copy).forEach((key) => {
|
||||||
|
if (forbidden.includes(key)) {
|
||||||
|
delete copy[key]
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof copy[key] === "function") {
|
||||||
|
delete copy[key]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return copy
|
||||||
|
}
|
||||||
|
|
||||||
|
global.win.webContents.send(event, serializeIpc(data))
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error)
|
||||||
|
}
|
||||||
|
}
|
201
packages/core/src/helpers/setup.js
Executable file
201
packages/core/src/helpers/setup.js
Executable file
@ -0,0 +1,201 @@
|
|||||||
|
import Logger from "../logger"
|
||||||
|
|
||||||
|
const Log = Logger.child({ service: "SETUP" })
|
||||||
|
|
||||||
|
import path from "node:path"
|
||||||
|
import fs from "node:fs"
|
||||||
|
import os from "node:os"
|
||||||
|
import admzip from "adm-zip"
|
||||||
|
import resolveOs from "../utils/resolveOs"
|
||||||
|
import chmodRecursive from "../utils/chmodRecursive"
|
||||||
|
|
||||||
|
import downloadFile from "../helpers/downloadHttpFile"
|
||||||
|
|
||||||
|
import Vars from "../vars"
|
||||||
|
import Prerequisites from "../prerequisites"
|
||||||
|
|
||||||
|
export default async () => {
|
||||||
|
if (!fs.existsSync(Vars.binaries_path)) {
|
||||||
|
Log.info(`Creating binaries directory: ${Vars.binaries_path}...`)
|
||||||
|
await fs.promises.mkdir(Vars.binaries_path, { recursive: true })
|
||||||
|
}
|
||||||
|
|
||||||
|
for await (let prerequisite of Prerequisites) {
|
||||||
|
try {
|
||||||
|
Log.info(`Checking prerequisite: ${prerequisite.id}...`)
|
||||||
|
|
||||||
|
if (Array.isArray(prerequisite.requireOs) && !prerequisite.requireOs.includes(os.platform())) {
|
||||||
|
Log.info(`Prerequisite: ${prerequisite.id} is not required for this os.`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!fs.existsSync(prerequisite.finalBin)) {
|
||||||
|
Log.info(`Missing prerequisite: ${prerequisite.id}, installing...`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit("app:setup", {
|
||||||
|
installed: false,
|
||||||
|
message: `Installing ${prerequisite.id}`,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (fs.existsSync(prerequisite.destination)) {
|
||||||
|
Log.info(`Deleting temporal file [${prerequisite.destination}]`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit("app:setup", {
|
||||||
|
installed: false,
|
||||||
|
message: `Deleting temporal file [${prerequisite.destination}]`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await fs.promises.rm(prerequisite.destination)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fs.existsSync(prerequisite.extract)) {
|
||||||
|
Log.info(`Deleting temporal directory [${prerequisite.extract}]`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit("app:setup", {
|
||||||
|
installed: false,
|
||||||
|
message: `Deleting temporal directory [${prerequisite.extract}]`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await fs.promises.rm(prerequisite.extract, { recursive: true })
|
||||||
|
}
|
||||||
|
|
||||||
|
Log.info(`Creating base directory: ${Vars.binaries_path}/${prerequisite.id}...`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit("app:setup", {
|
||||||
|
installed: false,
|
||||||
|
message: `Creating base directory: ${Vars.binaries_path}/${prerequisite.id}`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await fs.promises.mkdir(path.resolve(Vars.binaries_path, prerequisite.id), { recursive: true })
|
||||||
|
|
||||||
|
if (typeof prerequisite.url === "function") {
|
||||||
|
prerequisite.url = await prerequisite.url(os.platform(), os.arch())
|
||||||
|
Log.info(`Resolved url: ${prerequisite.url}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
Log.info(`Downloading ${prerequisite.id} from [${prerequisite.url}] to destination [${prerequisite.destination}]...`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit("app:setup", {
|
||||||
|
installed: false,
|
||||||
|
message: `Starting download ${prerequisite.id} from [${prerequisite.url}] to destination [${prerequisite.destination}]`,
|
||||||
|
})
|
||||||
|
|
||||||
|
try {
|
||||||
|
await downloadFile(
|
||||||
|
prerequisite.url,
|
||||||
|
prerequisite.destination,
|
||||||
|
(progress) => {
|
||||||
|
global._relic_eventBus.emit("app:setup", {
|
||||||
|
installed: false,
|
||||||
|
message: `Downloaded ${progress.transferredString} / ${progress.totalString} | ${progress.speedString}/s`,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
} catch (error) {
|
||||||
|
if (fs.existsSync(prerequisite.destination)) {
|
||||||
|
await fs.promises.rm(prerequisite.destination)
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof prerequisite.extract === "string") {
|
||||||
|
Log.info(`Extracting ${prerequisite.id} to destination [${prerequisite.extract}]...`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit("app:setup", {
|
||||||
|
installed: false,
|
||||||
|
message: `Extracting ${prerequisite.id} to destination [${prerequisite.extract}]`,
|
||||||
|
})
|
||||||
|
|
||||||
|
const zip = new admzip(prerequisite.destination)
|
||||||
|
|
||||||
|
await zip.extractAllTo(prerequisite.extract, true)
|
||||||
|
|
||||||
|
Log.info(`Extraction ok...`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (prerequisite.extractTargetFromName === true) {
|
||||||
|
let name = path.basename(prerequisite.url)
|
||||||
|
const ext = path.extname(name)
|
||||||
|
|
||||||
|
name = name.replace(ext, "")
|
||||||
|
|
||||||
|
if (fs.existsSync(path.resolve(prerequisite.extract, name))) {
|
||||||
|
await fs.promises.rename(path.resolve(prerequisite.extract, name), `${prerequisite.extract}_old`)
|
||||||
|
await fs.promises.rm(prerequisite.extract, { recursive: true })
|
||||||
|
await fs.promises.rename(`${prerequisite.extract}_old`, prerequisite.extract)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (prerequisite.deleteBeforeExtract === true) {
|
||||||
|
Log.info(`Deleting temporal file [${prerequisite.destination}]`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit("app:setup", {
|
||||||
|
installed: false,
|
||||||
|
message: `Deleting temporal file [${prerequisite.destination}]`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await fs.promises.unlink(prerequisite.destination)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof prerequisite.rewriteExecutionPermission !== "undefined") {
|
||||||
|
const to = typeof prerequisite.rewriteExecutionPermission === "string" ?
|
||||||
|
prerequisite.rewriteExecutionPermission :
|
||||||
|
prerequisite.finalBin
|
||||||
|
|
||||||
|
Log.info(`Rewriting permissions to ${to}...`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit("app:setup", {
|
||||||
|
installed: false,
|
||||||
|
message: `Rewriting permissions to ${to}`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await chmodRecursive(to, 0o755)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(prerequisite.moveDirs)) {
|
||||||
|
for (const dir of prerequisite.moveDirs) {
|
||||||
|
if (Array.isArray(dir.requireOs)) {
|
||||||
|
if (!dir.requireOs.includes(resolveOs())) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Log.info(`Moving ${dir.from} to ${dir.to}...`)
|
||||||
|
|
||||||
|
global._relic_eventBus.emit("app:setup", {
|
||||||
|
installed: false,
|
||||||
|
message: `Moving ${dir.from} to ${dir.to}`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await fs.promises.rename(dir.from, dir.to)
|
||||||
|
|
||||||
|
if (dir.deleteParentBefore === true) {
|
||||||
|
await fs.promises.rm(path.dirname(dir.from), { recursive: true })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
global._relic_eventBus.emit("app:setup", {
|
||||||
|
installed: true,
|
||||||
|
message: null,
|
||||||
|
})
|
||||||
|
|
||||||
|
Log.info(`Prerequisite: ${prerequisite.id} is ready!`)
|
||||||
|
} catch (error) {
|
||||||
|
global._relic_eventBus.emit("app:setup", {
|
||||||
|
installed: false,
|
||||||
|
error: error,
|
||||||
|
message: error.message,
|
||||||
|
})
|
||||||
|
|
||||||
|
Log.error("Aborting setup due to an error...")
|
||||||
|
Log.error(error)
|
||||||
|
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
|
||||||
|
Log.info(`All prerequisites are ready!`)
|
||||||
|
}
|
||||||
|
}
|
116
packages/core/src/index.js
Executable file
116
packages/core/src/index.js
Executable file
@ -0,0 +1,116 @@
|
|||||||
|
import fs from "node:fs"
|
||||||
|
import { EventEmitter } from "@foxify/events"
|
||||||
|
import { onExit } from "signal-exit"
|
||||||
|
import open from "open"
|
||||||
|
|
||||||
|
import SetupHelper from "./helpers/setup"
|
||||||
|
import { execa } from "./libraries/execa"
|
||||||
|
import Logger from "./logger"
|
||||||
|
|
||||||
|
import Settings from "./classes/Settings"
|
||||||
|
import Vars from "./vars"
|
||||||
|
import DB from "./db"
|
||||||
|
|
||||||
|
import PackageInstall from "./handlers/install"
|
||||||
|
import PackageExecute from "./handlers/execute"
|
||||||
|
import PackageUninstall from "./handlers/uninstall"
|
||||||
|
import PackageReinstall from "./handlers/reinstall"
|
||||||
|
import PackageCancelInstall from "./handlers/cancelInstall"
|
||||||
|
import PackageUpdate from "./handlers/update"
|
||||||
|
import PackageApply from "./handlers/apply"
|
||||||
|
import PackageList from "./handlers/list"
|
||||||
|
import PackageRead from "./handlers/read"
|
||||||
|
import PackageAuthorize from "./handlers/authorize"
|
||||||
|
import PackageDeauthorize from "./handlers/deauthorize"
|
||||||
|
import PackageCheckUpdate from "./handlers/checkUpdate"
|
||||||
|
import PackageLastOperationRetry from "./handlers/lastOperationRetry"
|
||||||
|
|
||||||
|
export default class RelicCore {
|
||||||
|
constructor(params) {
|
||||||
|
this.params = params
|
||||||
|
}
|
||||||
|
|
||||||
|
eventBus = (global._relic_eventBus = new EventEmitter())
|
||||||
|
|
||||||
|
logger = Logger
|
||||||
|
|
||||||
|
db = DB
|
||||||
|
|
||||||
|
async initialize() {
|
||||||
|
globalThis.relic_core = {
|
||||||
|
tasks: [],
|
||||||
|
vars: Vars,
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Checking runtime_path >`, Vars.runtime_path)
|
||||||
|
|
||||||
|
if (!fs.existsSync(Vars.runtime_path)) {
|
||||||
|
fs.mkdirSync(Vars.runtime_path, { recursive: true })
|
||||||
|
}
|
||||||
|
|
||||||
|
await DB.initialize()
|
||||||
|
|
||||||
|
await Settings.initialize()
|
||||||
|
|
||||||
|
if (!(await Settings.get("packages_path"))) {
|
||||||
|
await Settings.set("packages_path", Vars.packages_path)
|
||||||
|
}
|
||||||
|
|
||||||
|
this.aria2c_instance = execa(
|
||||||
|
Vars.aria2_bin,
|
||||||
|
[
|
||||||
|
"--enable-rpc",
|
||||||
|
"--rpc-listen-all=true",
|
||||||
|
"--rpc-allow-origin-all",
|
||||||
|
"--file-allocation=none",
|
||||||
|
],
|
||||||
|
{
|
||||||
|
stdout: "inherit",
|
||||||
|
stderr: "inherit",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
onExit(this.onExit)
|
||||||
|
}
|
||||||
|
|
||||||
|
onExit = () => {
|
||||||
|
if (fs.existsSync(Vars.cache_path)) {
|
||||||
|
fs.rmSync(Vars.cache_path, { recursive: true, force: true })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.aria2c_instance) {
|
||||||
|
this.aria2c_instance.kill("SIGINT")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async setup() {
|
||||||
|
return await SetupHelper()
|
||||||
|
}
|
||||||
|
|
||||||
|
package = {
|
||||||
|
install: PackageInstall,
|
||||||
|
execute: PackageExecute,
|
||||||
|
uninstall: PackageUninstall,
|
||||||
|
reinstall: PackageReinstall,
|
||||||
|
cancelInstall: PackageCancelInstall,
|
||||||
|
update: PackageUpdate,
|
||||||
|
apply: PackageApply,
|
||||||
|
list: PackageList,
|
||||||
|
read: PackageRead,
|
||||||
|
authorize: PackageAuthorize,
|
||||||
|
deauthorize: PackageDeauthorize,
|
||||||
|
checkUpdate: PackageCheckUpdate,
|
||||||
|
lastOperationRetry: PackageLastOperationRetry,
|
||||||
|
}
|
||||||
|
|
||||||
|
async openPath(pkg_id) {
|
||||||
|
if (!pkg_id) {
|
||||||
|
return open(Vars.runtime_path)
|
||||||
|
}
|
||||||
|
|
||||||
|
const packagesPath =
|
||||||
|
(await Settings.get("packages_path")) ?? Vars.packages_path
|
||||||
|
|
||||||
|
return open(packagesPath + "/" + pkg_id)
|
||||||
|
}
|
||||||
|
}
|
309
packages/core/src/libraries/execa/index.js
Executable file
309
packages/core/src/libraries/execa/index.js
Executable file
@ -0,0 +1,309 @@
|
|||||||
|
import {Buffer} from 'node:buffer';
|
||||||
|
import path from 'node:path';
|
||||||
|
import childProcess from 'node:child_process';
|
||||||
|
import process from 'node:process';
|
||||||
|
import crossSpawn from 'cross-spawn';
|
||||||
|
import stripFinalNewline from '../strip-final-newline';
|
||||||
|
import {npmRunPathEnv} from '../npm-run-path';
|
||||||
|
import onetime from '../onetime';
|
||||||
|
import {makeError} from './lib/error.js';
|
||||||
|
import {normalizeStdio, normalizeStdioNode} from './lib/stdio.js';
|
||||||
|
import {spawnedKill, spawnedCancel, setupTimeout, validateTimeout, setExitHandler} from './lib/kill.js';
|
||||||
|
import {addPipeMethods} from './lib/pipe.js';
|
||||||
|
import {handleInput, getSpawnedResult, makeAllStream, handleInputSync} from './lib/stream.js';
|
||||||
|
import {mergePromise, getSpawnedPromise} from './lib/promise.js';
|
||||||
|
import {joinCommand, parseCommand, parseTemplates, getEscapedCommand} from './lib/command.js';
|
||||||
|
import {logCommand, verboseDefault} from './lib/verbose.js';
|
||||||
|
|
||||||
|
const DEFAULT_MAX_BUFFER = 1000 * 1000 * 100;
|
||||||
|
|
||||||
|
const getEnv = ({env: envOption, extendEnv, preferLocal, localDir, execPath}) => {
|
||||||
|
const env = extendEnv ? {...process.env, ...envOption} : envOption;
|
||||||
|
|
||||||
|
if (preferLocal) {
|
||||||
|
return npmRunPathEnv({env, cwd: localDir, execPath});
|
||||||
|
}
|
||||||
|
|
||||||
|
return env;
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleArguments = (file, args, options = {}) => {
|
||||||
|
const parsed = crossSpawn._parse(file, args, options);
|
||||||
|
file = parsed.command;
|
||||||
|
args = parsed.args;
|
||||||
|
options = parsed.options;
|
||||||
|
|
||||||
|
options = {
|
||||||
|
maxBuffer: DEFAULT_MAX_BUFFER,
|
||||||
|
buffer: true,
|
||||||
|
stripFinalNewline: true,
|
||||||
|
extendEnv: true,
|
||||||
|
preferLocal: false,
|
||||||
|
localDir: options.cwd || process.cwd(),
|
||||||
|
execPath: process.execPath,
|
||||||
|
encoding: 'utf8',
|
||||||
|
reject: true,
|
||||||
|
cleanup: true,
|
||||||
|
all: false,
|
||||||
|
windowsHide: true,
|
||||||
|
verbose: verboseDefault,
|
||||||
|
...options,
|
||||||
|
};
|
||||||
|
|
||||||
|
options.env = getEnv(options);
|
||||||
|
|
||||||
|
options.stdio = normalizeStdio(options);
|
||||||
|
|
||||||
|
if (process.platform === 'win32' && path.basename(file, '.exe') === 'cmd') {
|
||||||
|
// #116
|
||||||
|
args.unshift('/q');
|
||||||
|
}
|
||||||
|
|
||||||
|
return {file, args, options, parsed};
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleOutput = (options, value, error) => {
|
||||||
|
if (typeof value !== 'string' && !Buffer.isBuffer(value)) {
|
||||||
|
// When `execaSync()` errors, we normalize it to '' to mimic `execa()`
|
||||||
|
return error === undefined ? undefined : '';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.stripFinalNewline) {
|
||||||
|
return stripFinalNewline(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
return value;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function execa(file, args, options) {
|
||||||
|
const parsed = handleArguments(file, args, options);
|
||||||
|
const command = joinCommand(file, args);
|
||||||
|
const escapedCommand = getEscapedCommand(file, args);
|
||||||
|
logCommand(escapedCommand, parsed.options);
|
||||||
|
|
||||||
|
validateTimeout(parsed.options);
|
||||||
|
|
||||||
|
let spawned;
|
||||||
|
try {
|
||||||
|
spawned = childProcess.spawn(parsed.file, parsed.args, parsed.options);
|
||||||
|
} catch (error) {
|
||||||
|
// Ensure the returned error is always both a promise and a child process
|
||||||
|
const dummySpawned = new childProcess.ChildProcess();
|
||||||
|
const errorPromise = Promise.reject(makeError({
|
||||||
|
error,
|
||||||
|
stdout: '',
|
||||||
|
stderr: '',
|
||||||
|
all: '',
|
||||||
|
command,
|
||||||
|
escapedCommand,
|
||||||
|
parsed,
|
||||||
|
timedOut: false,
|
||||||
|
isCanceled: false,
|
||||||
|
killed: false,
|
||||||
|
}));
|
||||||
|
mergePromise(dummySpawned, errorPromise);
|
||||||
|
return dummySpawned;
|
||||||
|
}
|
||||||
|
|
||||||
|
const spawnedPromise = getSpawnedPromise(spawned);
|
||||||
|
const timedPromise = setupTimeout(spawned, parsed.options, spawnedPromise);
|
||||||
|
const processDone = setExitHandler(spawned, parsed.options, timedPromise);
|
||||||
|
|
||||||
|
const context = {isCanceled: false};
|
||||||
|
|
||||||
|
spawned.kill = spawnedKill.bind(null, spawned.kill.bind(spawned));
|
||||||
|
spawned.cancel = spawnedCancel.bind(null, spawned, context);
|
||||||
|
|
||||||
|
const handlePromise = async () => {
|
||||||
|
const [{error, exitCode, signal, timedOut}, stdoutResult, stderrResult, allResult] = await getSpawnedResult(spawned, parsed.options, processDone);
|
||||||
|
const stdout = handleOutput(parsed.options, stdoutResult);
|
||||||
|
const stderr = handleOutput(parsed.options, stderrResult);
|
||||||
|
const all = handleOutput(parsed.options, allResult);
|
||||||
|
|
||||||
|
if (error || exitCode !== 0 || signal !== null) {
|
||||||
|
const returnedError = makeError({
|
||||||
|
error,
|
||||||
|
exitCode,
|
||||||
|
signal,
|
||||||
|
stdout,
|
||||||
|
stderr,
|
||||||
|
all,
|
||||||
|
command,
|
||||||
|
escapedCommand,
|
||||||
|
parsed,
|
||||||
|
timedOut,
|
||||||
|
isCanceled: context.isCanceled || (parsed.options.signal ? parsed.options.signal.aborted : false),
|
||||||
|
killed: spawned.killed,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!parsed.options.reject) {
|
||||||
|
return returnedError;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw returnedError;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
command,
|
||||||
|
escapedCommand,
|
||||||
|
exitCode: 0,
|
||||||
|
stdout,
|
||||||
|
stderr,
|
||||||
|
all,
|
||||||
|
failed: false,
|
||||||
|
timedOut: false,
|
||||||
|
isCanceled: false,
|
||||||
|
killed: false,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const handlePromiseOnce = onetime(handlePromise);
|
||||||
|
|
||||||
|
handleInput(spawned, parsed.options);
|
||||||
|
|
||||||
|
spawned.all = makeAllStream(spawned, parsed.options);
|
||||||
|
|
||||||
|
addPipeMethods(spawned);
|
||||||
|
mergePromise(spawned, handlePromiseOnce);
|
||||||
|
return spawned;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function execaSync(file, args, options) {
|
||||||
|
const parsed = handleArguments(file, args, options);
|
||||||
|
const command = joinCommand(file, args);
|
||||||
|
const escapedCommand = getEscapedCommand(file, args);
|
||||||
|
logCommand(escapedCommand, parsed.options);
|
||||||
|
|
||||||
|
const input = handleInputSync(parsed.options);
|
||||||
|
|
||||||
|
let result;
|
||||||
|
try {
|
||||||
|
result = childProcess.spawnSync(parsed.file, parsed.args, {...parsed.options, input});
|
||||||
|
} catch (error) {
|
||||||
|
throw makeError({
|
||||||
|
error,
|
||||||
|
stdout: '',
|
||||||
|
stderr: '',
|
||||||
|
all: '',
|
||||||
|
command,
|
||||||
|
escapedCommand,
|
||||||
|
parsed,
|
||||||
|
timedOut: false,
|
||||||
|
isCanceled: false,
|
||||||
|
killed: false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const stdout = handleOutput(parsed.options, result.stdout, result.error);
|
||||||
|
const stderr = handleOutput(parsed.options, result.stderr, result.error);
|
||||||
|
|
||||||
|
if (result.error || result.status !== 0 || result.signal !== null) {
|
||||||
|
const error = makeError({
|
||||||
|
stdout,
|
||||||
|
stderr,
|
||||||
|
error: result.error,
|
||||||
|
signal: result.signal,
|
||||||
|
exitCode: result.status,
|
||||||
|
command,
|
||||||
|
escapedCommand,
|
||||||
|
parsed,
|
||||||
|
timedOut: result.error && result.error.code === 'ETIMEDOUT',
|
||||||
|
isCanceled: false,
|
||||||
|
killed: result.signal !== null,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!parsed.options.reject) {
|
||||||
|
return error;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
command,
|
||||||
|
escapedCommand,
|
||||||
|
exitCode: 0,
|
||||||
|
stdout,
|
||||||
|
stderr,
|
||||||
|
failed: false,
|
||||||
|
timedOut: false,
|
||||||
|
isCanceled: false,
|
||||||
|
killed: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizeScriptStdin = ({input, inputFile, stdio}) => input === undefined && inputFile === undefined && stdio === undefined
|
||||||
|
? {stdin: 'inherit'}
|
||||||
|
: {};
|
||||||
|
|
||||||
|
const normalizeScriptOptions = (options = {}) => ({
|
||||||
|
preferLocal: true,
|
||||||
|
...normalizeScriptStdin(options),
|
||||||
|
...options,
|
||||||
|
});
|
||||||
|
|
||||||
|
function create$(options) {
|
||||||
|
function $(templatesOrOptions, ...expressions) {
|
||||||
|
if (!Array.isArray(templatesOrOptions)) {
|
||||||
|
return create$({...options, ...templatesOrOptions});
|
||||||
|
}
|
||||||
|
|
||||||
|
const [file, ...args] = parseTemplates(templatesOrOptions, expressions);
|
||||||
|
return execa(file, args, normalizeScriptOptions(options));
|
||||||
|
}
|
||||||
|
|
||||||
|
$.sync = (templates, ...expressions) => {
|
||||||
|
if (!Array.isArray(templates)) {
|
||||||
|
throw new TypeError('Please use $(options).sync`command` instead of $.sync(options)`command`.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const [file, ...args] = parseTemplates(templates, expressions);
|
||||||
|
return execaSync(file, args, normalizeScriptOptions(options));
|
||||||
|
};
|
||||||
|
|
||||||
|
return $;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const $ = create$();
|
||||||
|
|
||||||
|
export function execaCommand(command, options) {
|
||||||
|
const [file, ...args] = parseCommand(command);
|
||||||
|
return execa(file, args, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function execaCommandSync(command, options) {
|
||||||
|
const [file, ...args] = parseCommand(command);
|
||||||
|
return execaSync(file, args, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function execaNode(scriptPath, args, options = {}) {
|
||||||
|
if (args && !Array.isArray(args) && typeof args === 'object') {
|
||||||
|
options = args;
|
||||||
|
args = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const stdio = normalizeStdioNode(options);
|
||||||
|
const defaultExecArgv = process.execArgv.filter(arg => !arg.startsWith('--inspect'));
|
||||||
|
|
||||||
|
const {
|
||||||
|
nodePath = process.execPath,
|
||||||
|
nodeOptions = defaultExecArgv,
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
return execa(
|
||||||
|
nodePath,
|
||||||
|
[
|
||||||
|
...nodeOptions,
|
||||||
|
scriptPath,
|
||||||
|
...(Array.isArray(args) ? args : []),
|
||||||
|
],
|
||||||
|
{
|
||||||
|
...options,
|
||||||
|
stdin: undefined,
|
||||||
|
stdout: undefined,
|
||||||
|
stderr: undefined,
|
||||||
|
stdio,
|
||||||
|
shell: false,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
119
packages/core/src/libraries/execa/lib/command.js
Executable file
119
packages/core/src/libraries/execa/lib/command.js
Executable file
@ -0,0 +1,119 @@
|
|||||||
|
import {Buffer} from 'node:buffer';
|
||||||
|
import {ChildProcess} from 'node:child_process';
|
||||||
|
|
||||||
|
const normalizeArgs = (file, args = []) => {
|
||||||
|
if (!Array.isArray(args)) {
|
||||||
|
return [file];
|
||||||
|
}
|
||||||
|
|
||||||
|
return [file, ...args];
|
||||||
|
};
|
||||||
|
|
||||||
|
const NO_ESCAPE_REGEXP = /^[\w.-]+$/;
|
||||||
|
|
||||||
|
const escapeArg = arg => {
|
||||||
|
if (typeof arg !== 'string' || NO_ESCAPE_REGEXP.test(arg)) {
|
||||||
|
return arg;
|
||||||
|
}
|
||||||
|
|
||||||
|
return `"${arg.replaceAll('"', '\\"')}"`;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const joinCommand = (file, args) => normalizeArgs(file, args).join(' ');
|
||||||
|
|
||||||
|
export const getEscapedCommand = (file, args) => normalizeArgs(file, args).map(arg => escapeArg(arg)).join(' ');
|
||||||
|
|
||||||
|
const SPACES_REGEXP = / +/g;
|
||||||
|
|
||||||
|
// Handle `execaCommand()`
|
||||||
|
export const parseCommand = command => {
|
||||||
|
const tokens = [];
|
||||||
|
for (const token of command.trim().split(SPACES_REGEXP)) {
|
||||||
|
// Allow spaces to be escaped by a backslash if not meant as a delimiter
|
||||||
|
const previousToken = tokens.at(-1);
|
||||||
|
if (previousToken && previousToken.endsWith('\\')) {
|
||||||
|
// Merge previous token with current one
|
||||||
|
tokens[tokens.length - 1] = `${previousToken.slice(0, -1)} ${token}`;
|
||||||
|
} else {
|
||||||
|
tokens.push(token);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return tokens;
|
||||||
|
};
|
||||||
|
|
||||||
|
const parseExpression = expression => {
|
||||||
|
const typeOfExpression = typeof expression;
|
||||||
|
|
||||||
|
if (typeOfExpression === 'string') {
|
||||||
|
return expression;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeOfExpression === 'number') {
|
||||||
|
return String(expression);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
typeOfExpression === 'object'
|
||||||
|
&& expression !== null
|
||||||
|
&& !(expression instanceof ChildProcess)
|
||||||
|
&& 'stdout' in expression
|
||||||
|
) {
|
||||||
|
const typeOfStdout = typeof expression.stdout;
|
||||||
|
|
||||||
|
if (typeOfStdout === 'string') {
|
||||||
|
return expression.stdout;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Buffer.isBuffer(expression.stdout)) {
|
||||||
|
return expression.stdout.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new TypeError(`Unexpected "${typeOfStdout}" stdout in template expression`);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new TypeError(`Unexpected "${typeOfExpression}" in template expression`);
|
||||||
|
};
|
||||||
|
|
||||||
|
const concatTokens = (tokens, nextTokens, isNew) => isNew || tokens.length === 0 || nextTokens.length === 0
|
||||||
|
? [...tokens, ...nextTokens]
|
||||||
|
: [
|
||||||
|
...tokens.slice(0, -1),
|
||||||
|
`${tokens.at(-1)}${nextTokens[0]}`,
|
||||||
|
...nextTokens.slice(1),
|
||||||
|
];
|
||||||
|
|
||||||
|
const parseTemplate = ({templates, expressions, tokens, index, template}) => {
|
||||||
|
const templateString = template ?? templates.raw[index];
|
||||||
|
const templateTokens = templateString.split(SPACES_REGEXP).filter(Boolean);
|
||||||
|
const newTokens = concatTokens(
|
||||||
|
tokens,
|
||||||
|
templateTokens,
|
||||||
|
templateString.startsWith(' '),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (index === expressions.length) {
|
||||||
|
return newTokens;
|
||||||
|
}
|
||||||
|
|
||||||
|
const expression = expressions[index];
|
||||||
|
const expressionTokens = Array.isArray(expression)
|
||||||
|
? expression.map(expression => parseExpression(expression))
|
||||||
|
: [parseExpression(expression)];
|
||||||
|
return concatTokens(
|
||||||
|
newTokens,
|
||||||
|
expressionTokens,
|
||||||
|
templateString.endsWith(' '),
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const parseTemplates = (templates, expressions) => {
|
||||||
|
let tokens = [];
|
||||||
|
|
||||||
|
for (const [index, template] of templates.entries()) {
|
||||||
|
tokens = parseTemplate({templates, expressions, tokens, index, template});
|
||||||
|
}
|
||||||
|
|
||||||
|
return tokens;
|
||||||
|
};
|
||||||
|
|
87
packages/core/src/libraries/execa/lib/error.js
Executable file
87
packages/core/src/libraries/execa/lib/error.js
Executable file
@ -0,0 +1,87 @@
|
|||||||
|
import process from 'node:process';
|
||||||
|
import {signalsByName} from '../../human-signals';
|
||||||
|
|
||||||
|
const getErrorPrefix = ({timedOut, timeout, errorCode, signal, signalDescription, exitCode, isCanceled}) => {
|
||||||
|
if (timedOut) {
|
||||||
|
return `timed out after ${timeout} milliseconds`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isCanceled) {
|
||||||
|
return 'was canceled';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (errorCode !== undefined) {
|
||||||
|
return `failed with ${errorCode}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (signal !== undefined) {
|
||||||
|
return `was killed with ${signal} (${signalDescription})`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (exitCode !== undefined) {
|
||||||
|
return `failed with exit code ${exitCode}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return 'failed';
|
||||||
|
};
|
||||||
|
|
||||||
|
export const makeError = ({
|
||||||
|
stdout,
|
||||||
|
stderr,
|
||||||
|
all,
|
||||||
|
error,
|
||||||
|
signal,
|
||||||
|
exitCode,
|
||||||
|
command,
|
||||||
|
escapedCommand,
|
||||||
|
timedOut,
|
||||||
|
isCanceled,
|
||||||
|
killed,
|
||||||
|
parsed: {options: {timeout, cwd = process.cwd()}},
|
||||||
|
}) => {
|
||||||
|
// `signal` and `exitCode` emitted on `spawned.on('exit')` event can be `null`.
|
||||||
|
// We normalize them to `undefined`
|
||||||
|
exitCode = exitCode === null ? undefined : exitCode;
|
||||||
|
signal = signal === null ? undefined : signal;
|
||||||
|
const signalDescription = signal === undefined ? undefined : signalsByName[signal].description;
|
||||||
|
|
||||||
|
const errorCode = error && error.code;
|
||||||
|
|
||||||
|
const prefix = getErrorPrefix({timedOut, timeout, errorCode, signal, signalDescription, exitCode, isCanceled});
|
||||||
|
const execaMessage = `Command ${prefix}: ${command}`;
|
||||||
|
const isError = Object.prototype.toString.call(error) === '[object Error]';
|
||||||
|
const shortMessage = isError ? `${execaMessage}\n${error.message}` : execaMessage;
|
||||||
|
const message = [shortMessage, stderr, stdout].filter(Boolean).join('\n');
|
||||||
|
|
||||||
|
if (isError) {
|
||||||
|
error.originalMessage = error.message;
|
||||||
|
error.message = message;
|
||||||
|
} else {
|
||||||
|
error = new Error(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
error.shortMessage = shortMessage;
|
||||||
|
error.command = command;
|
||||||
|
error.escapedCommand = escapedCommand;
|
||||||
|
error.exitCode = exitCode;
|
||||||
|
error.signal = signal;
|
||||||
|
error.signalDescription = signalDescription;
|
||||||
|
error.stdout = stdout;
|
||||||
|
error.stderr = stderr;
|
||||||
|
error.cwd = cwd;
|
||||||
|
|
||||||
|
if (all !== undefined) {
|
||||||
|
error.all = all;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ('bufferedData' in error) {
|
||||||
|
delete error.bufferedData;
|
||||||
|
}
|
||||||
|
|
||||||
|
error.failed = true;
|
||||||
|
error.timedOut = Boolean(timedOut);
|
||||||
|
error.isCanceled = isCanceled;
|
||||||
|
error.killed = killed && !timedOut;
|
||||||
|
|
||||||
|
return error;
|
||||||
|
};
|
102
packages/core/src/libraries/execa/lib/kill.js
Executable file
102
packages/core/src/libraries/execa/lib/kill.js
Executable file
@ -0,0 +1,102 @@
|
|||||||
|
import os from 'node:os';
|
||||||
|
import {onExit} from 'signal-exit';
|
||||||
|
|
||||||
|
const DEFAULT_FORCE_KILL_TIMEOUT = 1000 * 5;
|
||||||
|
|
||||||
|
// Monkey-patches `childProcess.kill()` to add `forceKillAfterTimeout` behavior
|
||||||
|
export const spawnedKill = (kill, signal = 'SIGTERM', options = {}) => {
|
||||||
|
const killResult = kill(signal);
|
||||||
|
setKillTimeout(kill, signal, options, killResult);
|
||||||
|
return killResult;
|
||||||
|
};
|
||||||
|
|
||||||
|
const setKillTimeout = (kill, signal, options, killResult) => {
|
||||||
|
if (!shouldForceKill(signal, options, killResult)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const timeout = getForceKillAfterTimeout(options);
|
||||||
|
const t = setTimeout(() => {
|
||||||
|
kill('SIGKILL');
|
||||||
|
}, timeout);
|
||||||
|
|
||||||
|
// Guarded because there's no `.unref()` when `execa` is used in the renderer
|
||||||
|
// process in Electron. This cannot be tested since we don't run tests in
|
||||||
|
// Electron.
|
||||||
|
// istanbul ignore else
|
||||||
|
if (t.unref) {
|
||||||
|
t.unref();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const shouldForceKill = (signal, {forceKillAfterTimeout}, killResult) => isSigterm(signal) && forceKillAfterTimeout !== false && killResult;
|
||||||
|
|
||||||
|
const isSigterm = signal => signal === os.constants.signals.SIGTERM
|
||||||
|
|| (typeof signal === 'string' && signal.toUpperCase() === 'SIGTERM');
|
||||||
|
|
||||||
|
const getForceKillAfterTimeout = ({forceKillAfterTimeout = true}) => {
|
||||||
|
if (forceKillAfterTimeout === true) {
|
||||||
|
return DEFAULT_FORCE_KILL_TIMEOUT;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Number.isFinite(forceKillAfterTimeout) || forceKillAfterTimeout < 0) {
|
||||||
|
throw new TypeError(`Expected the \`forceKillAfterTimeout\` option to be a non-negative integer, got \`${forceKillAfterTimeout}\` (${typeof forceKillAfterTimeout})`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return forceKillAfterTimeout;
|
||||||
|
};
|
||||||
|
|
||||||
|
// `childProcess.cancel()`
|
||||||
|
export const spawnedCancel = (spawned, context) => {
|
||||||
|
const killResult = spawned.kill();
|
||||||
|
|
||||||
|
if (killResult) {
|
||||||
|
context.isCanceled = true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const timeoutKill = (spawned, signal, reject) => {
|
||||||
|
spawned.kill(signal);
|
||||||
|
reject(Object.assign(new Error('Timed out'), {timedOut: true, signal}));
|
||||||
|
};
|
||||||
|
|
||||||
|
// `timeout` option handling
|
||||||
|
export const setupTimeout = (spawned, {timeout, killSignal = 'SIGTERM'}, spawnedPromise) => {
|
||||||
|
if (timeout === 0 || timeout === undefined) {
|
||||||
|
return spawnedPromise;
|
||||||
|
}
|
||||||
|
|
||||||
|
let timeoutId;
|
||||||
|
const timeoutPromise = new Promise((resolve, reject) => {
|
||||||
|
timeoutId = setTimeout(() => {
|
||||||
|
timeoutKill(spawned, killSignal, reject);
|
||||||
|
}, timeout);
|
||||||
|
});
|
||||||
|
|
||||||
|
const safeSpawnedPromise = spawnedPromise.finally(() => {
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
});
|
||||||
|
|
||||||
|
return Promise.race([timeoutPromise, safeSpawnedPromise]);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const validateTimeout = ({timeout}) => {
|
||||||
|
if (timeout !== undefined && (!Number.isFinite(timeout) || timeout < 0)) {
|
||||||
|
throw new TypeError(`Expected the \`timeout\` option to be a non-negative integer, got \`${timeout}\` (${typeof timeout})`);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// `cleanup` option handling
|
||||||
|
export const setExitHandler = async (spawned, {cleanup, detached}, timedPromise) => {
|
||||||
|
if (!cleanup || detached) {
|
||||||
|
return timedPromise;
|
||||||
|
}
|
||||||
|
|
||||||
|
const removeExitHandler = onExit(() => {
|
||||||
|
spawned.kill();
|
||||||
|
});
|
||||||
|
|
||||||
|
return timedPromise.finally(() => {
|
||||||
|
removeExitHandler();
|
||||||
|
});
|
||||||
|
};
|
42
packages/core/src/libraries/execa/lib/pipe.js
Executable file
42
packages/core/src/libraries/execa/lib/pipe.js
Executable file
@ -0,0 +1,42 @@
|
|||||||
|
import {createWriteStream} from 'node:fs';
|
||||||
|
import {ChildProcess} from 'node:child_process';
|
||||||
|
import {isWritableStream} from '../../is-stream';
|
||||||
|
|
||||||
|
const isExecaChildProcess = target => target instanceof ChildProcess && typeof target.then === 'function';
|
||||||
|
|
||||||
|
const pipeToTarget = (spawned, streamName, target) => {
|
||||||
|
if (typeof target === 'string') {
|
||||||
|
spawned[streamName].pipe(createWriteStream(target));
|
||||||
|
return spawned;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isWritableStream(target)) {
|
||||||
|
spawned[streamName].pipe(target);
|
||||||
|
return spawned;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isExecaChildProcess(target)) {
|
||||||
|
throw new TypeError('The second argument must be a string, a stream or an Execa child process.');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isWritableStream(target.stdin)) {
|
||||||
|
throw new TypeError('The target child process\'s stdin must be available.');
|
||||||
|
}
|
||||||
|
|
||||||
|
spawned[streamName].pipe(target.stdin);
|
||||||
|
return target;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const addPipeMethods = spawned => {
|
||||||
|
if (spawned.stdout !== null) {
|
||||||
|
spawned.pipeStdout = pipeToTarget.bind(undefined, spawned, 'stdout');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (spawned.stderr !== null) {
|
||||||
|
spawned.pipeStderr = pipeToTarget.bind(undefined, spawned, 'stderr');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (spawned.all !== undefined) {
|
||||||
|
spawned.pipeAll = pipeToTarget.bind(undefined, spawned, 'all');
|
||||||
|
}
|
||||||
|
};
|
36
packages/core/src/libraries/execa/lib/promise.js
Executable file
36
packages/core/src/libraries/execa/lib/promise.js
Executable file
@ -0,0 +1,36 @@
|
|||||||
|
// eslint-disable-next-line unicorn/prefer-top-level-await
|
||||||
|
const nativePromisePrototype = (async () => {})().constructor.prototype;
|
||||||
|
|
||||||
|
const descriptors = ['then', 'catch', 'finally'].map(property => [
|
||||||
|
property,
|
||||||
|
Reflect.getOwnPropertyDescriptor(nativePromisePrototype, property),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// The return value is a mixin of `childProcess` and `Promise`
|
||||||
|
export const mergePromise = (spawned, promise) => {
|
||||||
|
for (const [property, descriptor] of descriptors) {
|
||||||
|
// Starting the main `promise` is deferred to avoid consuming streams
|
||||||
|
const value = typeof promise === 'function'
|
||||||
|
? (...args) => Reflect.apply(descriptor.value, promise(), args)
|
||||||
|
: descriptor.value.bind(promise);
|
||||||
|
|
||||||
|
Reflect.defineProperty(spawned, property, {...descriptor, value});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Use promises instead of `child_process` events
|
||||||
|
export const getSpawnedPromise = spawned => new Promise((resolve, reject) => {
|
||||||
|
spawned.on('exit', (exitCode, signal) => {
|
||||||
|
resolve({exitCode, signal});
|
||||||
|
});
|
||||||
|
|
||||||
|
spawned.on('error', error => {
|
||||||
|
reject(error);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (spawned.stdin) {
|
||||||
|
spawned.stdin.on('error', error => {
|
||||||
|
reject(error);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
49
packages/core/src/libraries/execa/lib/stdio.js
Executable file
49
packages/core/src/libraries/execa/lib/stdio.js
Executable file
@ -0,0 +1,49 @@
|
|||||||
|
const aliases = ['stdin', 'stdout', 'stderr'];
|
||||||
|
|
||||||
|
const hasAlias = options => aliases.some(alias => options[alias] !== undefined);
|
||||||
|
|
||||||
|
export const normalizeStdio = options => {
|
||||||
|
if (!options) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const {stdio} = options;
|
||||||
|
|
||||||
|
if (stdio === undefined) {
|
||||||
|
return aliases.map(alias => options[alias]);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasAlias(options)) {
|
||||||
|
throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${aliases.map(alias => `\`${alias}\``).join(', ')}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof stdio === 'string') {
|
||||||
|
return stdio;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Array.isArray(stdio)) {
|
||||||
|
throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``);
|
||||||
|
}
|
||||||
|
|
||||||
|
const length = Math.max(stdio.length, aliases.length);
|
||||||
|
return Array.from({length}, (value, index) => stdio[index]);
|
||||||
|
};
|
||||||
|
|
||||||
|
// `ipc` is pushed unless it is already present
|
||||||
|
export const normalizeStdioNode = options => {
|
||||||
|
const stdio = normalizeStdio(options);
|
||||||
|
|
||||||
|
if (stdio === 'ipc') {
|
||||||
|
return 'ipc';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (stdio === undefined || typeof stdio === 'string') {
|
||||||
|
return [stdio, stdio, stdio, 'ipc'];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (stdio.includes('ipc')) {
|
||||||
|
return stdio;
|
||||||
|
}
|
||||||
|
|
||||||
|
return [...stdio, 'ipc'];
|
||||||
|
};
|
133
packages/core/src/libraries/execa/lib/stream.js
Executable file
133
packages/core/src/libraries/execa/lib/stream.js
Executable file
@ -0,0 +1,133 @@
|
|||||||
|
import {createReadStream, readFileSync} from 'node:fs';
|
||||||
|
import {setTimeout} from 'node:timers/promises';
|
||||||
|
import {isStream} from '../../is-stream';
|
||||||
|
import getStream, {getStreamAsBuffer} from '../../get-stream';
|
||||||
|
import mergeStream from 'merge-stream';
|
||||||
|
|
||||||
|
const validateInputOptions = input => {
|
||||||
|
if (input !== undefined) {
|
||||||
|
throw new TypeError('The `input` and `inputFile` options cannot be both set.');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getInputSync = ({input, inputFile}) => {
|
||||||
|
if (typeof inputFile !== 'string') {
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
validateInputOptions(input);
|
||||||
|
return readFileSync(inputFile);
|
||||||
|
};
|
||||||
|
|
||||||
|
// `input` and `inputFile` option in sync mode
|
||||||
|
export const handleInputSync = options => {
|
||||||
|
const input = getInputSync(options);
|
||||||
|
|
||||||
|
if (isStream(input)) {
|
||||||
|
throw new TypeError('The `input` option cannot be a stream in sync mode');
|
||||||
|
}
|
||||||
|
|
||||||
|
return input;
|
||||||
|
};
|
||||||
|
|
||||||
|
const getInput = ({input, inputFile}) => {
|
||||||
|
if (typeof inputFile !== 'string') {
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
validateInputOptions(input);
|
||||||
|
return createReadStream(inputFile);
|
||||||
|
};
|
||||||
|
|
||||||
|
// `input` and `inputFile` option in async mode
|
||||||
|
export const handleInput = (spawned, options) => {
|
||||||
|
const input = getInput(options);
|
||||||
|
|
||||||
|
if (input === undefined) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isStream(input)) {
|
||||||
|
input.pipe(spawned.stdin);
|
||||||
|
} else {
|
||||||
|
spawned.stdin.end(input);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// `all` interleaves `stdout` and `stderr`
|
||||||
|
export const makeAllStream = (spawned, {all}) => {
|
||||||
|
if (!all || (!spawned.stdout && !spawned.stderr)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const mixed = mergeStream();
|
||||||
|
|
||||||
|
if (spawned.stdout) {
|
||||||
|
mixed.add(spawned.stdout);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (spawned.stderr) {
|
||||||
|
mixed.add(spawned.stderr);
|
||||||
|
}
|
||||||
|
|
||||||
|
return mixed;
|
||||||
|
};
|
||||||
|
|
||||||
|
// On failure, `result.stdout|stderr|all` should contain the currently buffered stream
|
||||||
|
const getBufferedData = async (stream, streamPromise) => {
|
||||||
|
// When `buffer` is `false`, `streamPromise` is `undefined` and there is no buffered data to retrieve
|
||||||
|
if (!stream || streamPromise === undefined) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for the `all` stream to receive the last chunk before destroying the stream
|
||||||
|
await setTimeout(0);
|
||||||
|
|
||||||
|
stream.destroy();
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await streamPromise;
|
||||||
|
} catch (error) {
|
||||||
|
return error.bufferedData;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getStreamPromise = (stream, {encoding, buffer, maxBuffer}) => {
|
||||||
|
if (!stream || !buffer) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line unicorn/text-encoding-identifier-case
|
||||||
|
if (encoding === 'utf8' || encoding === 'utf-8') {
|
||||||
|
return getStream(stream, {maxBuffer});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (encoding === null || encoding === 'buffer') {
|
||||||
|
return getStreamAsBuffer(stream, {maxBuffer});
|
||||||
|
}
|
||||||
|
|
||||||
|
return applyEncoding(stream, maxBuffer, encoding);
|
||||||
|
};
|
||||||
|
|
||||||
|
const applyEncoding = async (stream, maxBuffer, encoding) => {
|
||||||
|
const buffer = await getStreamAsBuffer(stream, {maxBuffer});
|
||||||
|
return buffer.toString(encoding);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Retrieve result of child process: exit code, signal, error, streams (stdout/stderr/all)
|
||||||
|
export const getSpawnedResult = async ({stdout, stderr, all}, {encoding, buffer, maxBuffer}, processDone) => {
|
||||||
|
const stdoutPromise = getStreamPromise(stdout, {encoding, buffer, maxBuffer});
|
||||||
|
const stderrPromise = getStreamPromise(stderr, {encoding, buffer, maxBuffer});
|
||||||
|
const allPromise = getStreamPromise(all, {encoding, buffer, maxBuffer: maxBuffer * 2});
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await Promise.all([processDone, stdoutPromise, stderrPromise, allPromise]);
|
||||||
|
} catch (error) {
|
||||||
|
return Promise.all([
|
||||||
|
{error, signal: error.signal, timedOut: error.timedOut},
|
||||||
|
getBufferedData(stdout, stdoutPromise),
|
||||||
|
getBufferedData(stderr, stderrPromise),
|
||||||
|
getBufferedData(all, allPromise),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
};
|
19
packages/core/src/libraries/execa/lib/verbose.js
Executable file
19
packages/core/src/libraries/execa/lib/verbose.js
Executable file
@ -0,0 +1,19 @@
|
|||||||
|
import {debuglog} from 'node:util';
|
||||||
|
import process from 'node:process';
|
||||||
|
|
||||||
|
export const verboseDefault = debuglog('execa').enabled;
|
||||||
|
|
||||||
|
const padField = (field, padding) => String(field).padStart(padding, '0');
|
||||||
|
|
||||||
|
const getTimestamp = () => {
|
||||||
|
const date = new Date();
|
||||||
|
return `${padField(date.getHours(), 2)}:${padField(date.getMinutes(), 2)}:${padField(date.getSeconds(), 2)}.${padField(date.getMilliseconds(), 3)}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const logCommand = (escapedCommand, {verbose}) => {
|
||||||
|
if (!verbose) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
process.stderr.write(`[${getTimestamp()}] ${escapedCommand}\n`);
|
||||||
|
};
|
84
packages/core/src/libraries/get-stream/array-buffer.js
Executable file
84
packages/core/src/libraries/get-stream/array-buffer.js
Executable file
@ -0,0 +1,84 @@
|
|||||||
|
import {getStreamContents} from './contents.js';
|
||||||
|
import {noop, throwObjectStream, getLengthProp} from './utils.js';
|
||||||
|
|
||||||
|
export async function getStreamAsArrayBuffer(stream, options) {
|
||||||
|
return getStreamContents(stream, arrayBufferMethods, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
const initArrayBuffer = () => ({contents: new ArrayBuffer(0)});
|
||||||
|
|
||||||
|
const useTextEncoder = chunk => textEncoder.encode(chunk);
|
||||||
|
const textEncoder = new TextEncoder();
|
||||||
|
|
||||||
|
const useUint8Array = chunk => new Uint8Array(chunk);
|
||||||
|
|
||||||
|
const useUint8ArrayWithOffset = chunk => new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength);
|
||||||
|
|
||||||
|
const truncateArrayBufferChunk = (convertedChunk, chunkSize) => convertedChunk.slice(0, chunkSize);
|
||||||
|
|
||||||
|
// `contents` is an increasingly growing `Uint8Array`.
|
||||||
|
const addArrayBufferChunk = (convertedChunk, {contents, length: previousLength}, length) => {
|
||||||
|
const newContents = hasArrayBufferResize() ? resizeArrayBuffer(contents, length) : resizeArrayBufferSlow(contents, length);
|
||||||
|
new Uint8Array(newContents).set(convertedChunk, previousLength);
|
||||||
|
return newContents;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Without `ArrayBuffer.resize()`, `contents` size is always a power of 2.
|
||||||
|
// This means its last bytes are zeroes (not stream data), which need to be
|
||||||
|
// trimmed at the end with `ArrayBuffer.slice()`.
|
||||||
|
const resizeArrayBufferSlow = (contents, length) => {
|
||||||
|
if (length <= contents.byteLength) {
|
||||||
|
return contents;
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayBuffer = new ArrayBuffer(getNewContentsLength(length));
|
||||||
|
new Uint8Array(arrayBuffer).set(new Uint8Array(contents), 0);
|
||||||
|
return arrayBuffer;
|
||||||
|
};
|
||||||
|
|
||||||
|
// With `ArrayBuffer.resize()`, `contents` size matches exactly the size of
|
||||||
|
// the stream data. It does not include extraneous zeroes to trim at the end.
|
||||||
|
// The underlying `ArrayBuffer` does allocate a number of bytes that is a power
|
||||||
|
// of 2, but those bytes are only visible after calling `ArrayBuffer.resize()`.
|
||||||
|
const resizeArrayBuffer = (contents, length) => {
|
||||||
|
if (length <= contents.maxByteLength) {
|
||||||
|
contents.resize(length);
|
||||||
|
return contents;
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayBuffer = new ArrayBuffer(length, {maxByteLength: getNewContentsLength(length)});
|
||||||
|
new Uint8Array(arrayBuffer).set(new Uint8Array(contents), 0);
|
||||||
|
return arrayBuffer;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Retrieve the closest `length` that is both >= and a power of 2
|
||||||
|
const getNewContentsLength = length => SCALE_FACTOR ** Math.ceil(Math.log(length) / Math.log(SCALE_FACTOR));
|
||||||
|
|
||||||
|
const SCALE_FACTOR = 2;
|
||||||
|
|
||||||
|
const finalizeArrayBuffer = ({contents, length}) => hasArrayBufferResize() ? contents : contents.slice(0, length);
|
||||||
|
|
||||||
|
// `ArrayBuffer.slice()` is slow. When `ArrayBuffer.resize()` is available
|
||||||
|
// (Node >=20.0.0, Safari >=16.4 and Chrome), we can use it instead.
|
||||||
|
// eslint-disable-next-line no-warning-comments
|
||||||
|
// TODO: remove after dropping support for Node 20.
|
||||||
|
// eslint-disable-next-line no-warning-comments
|
||||||
|
// TODO: use `ArrayBuffer.transferToFixedLength()` instead once it is available
|
||||||
|
const hasArrayBufferResize = () => 'resize' in ArrayBuffer.prototype;
|
||||||
|
|
||||||
|
const arrayBufferMethods = {
|
||||||
|
init: initArrayBuffer,
|
||||||
|
convertChunk: {
|
||||||
|
string: useTextEncoder,
|
||||||
|
buffer: useUint8Array,
|
||||||
|
arrayBuffer: useUint8Array,
|
||||||
|
dataView: useUint8ArrayWithOffset,
|
||||||
|
typedArray: useUint8ArrayWithOffset,
|
||||||
|
others: throwObjectStream,
|
||||||
|
},
|
||||||
|
getSize: getLengthProp,
|
||||||
|
truncateChunk: truncateArrayBufferChunk,
|
||||||
|
addChunk: addArrayBufferChunk,
|
||||||
|
getFinalChunk: noop,
|
||||||
|
finalize: finalizeArrayBuffer,
|
||||||
|
};
|
32
packages/core/src/libraries/get-stream/array.js
Executable file
32
packages/core/src/libraries/get-stream/array.js
Executable file
@ -0,0 +1,32 @@
|
|||||||
|
import {getStreamContents} from './contents.js';
|
||||||
|
import {identity, noop, getContentsProp} from './utils.js';
|
||||||
|
|
||||||
|
export async function getStreamAsArray(stream, options) {
|
||||||
|
return getStreamContents(stream, arrayMethods, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
const initArray = () => ({contents: []});
|
||||||
|
|
||||||
|
const increment = () => 1;
|
||||||
|
|
||||||
|
const addArrayChunk = (convertedChunk, {contents}) => {
|
||||||
|
contents.push(convertedChunk);
|
||||||
|
return contents;
|
||||||
|
};
|
||||||
|
|
||||||
|
const arrayMethods = {
|
||||||
|
init: initArray,
|
||||||
|
convertChunk: {
|
||||||
|
string: identity,
|
||||||
|
buffer: identity,
|
||||||
|
arrayBuffer: identity,
|
||||||
|
dataView: identity,
|
||||||
|
typedArray: identity,
|
||||||
|
others: identity,
|
||||||
|
},
|
||||||
|
getSize: increment,
|
||||||
|
truncateChunk: noop,
|
||||||
|
addChunk: addArrayChunk,
|
||||||
|
getFinalChunk: noop,
|
||||||
|
finalize: getContentsProp,
|
||||||
|
};
|
20
packages/core/src/libraries/get-stream/buffer.js
Executable file
20
packages/core/src/libraries/get-stream/buffer.js
Executable file
@ -0,0 +1,20 @@
|
|||||||
|
import {getStreamAsArrayBuffer} from './array-buffer.js';
|
||||||
|
|
||||||
|
export async function getStreamAsBuffer(stream, options) {
|
||||||
|
if (!('Buffer' in globalThis)) {
|
||||||
|
throw new Error('getStreamAsBuffer() is only supported in Node.js');
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return arrayBufferToNodeBuffer(await getStreamAsArrayBuffer(stream, options));
|
||||||
|
} catch (error) {
|
||||||
|
if (error.bufferedData !== undefined) {
|
||||||
|
error.bufferedData = arrayBufferToNodeBuffer(error.bufferedData);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line n/prefer-global/buffer
|
||||||
|
const arrayBufferToNodeBuffer = arrayBuffer => globalThis.Buffer.from(arrayBuffer);
|
101
packages/core/src/libraries/get-stream/contents.js
Executable file
101
packages/core/src/libraries/get-stream/contents.js
Executable file
@ -0,0 +1,101 @@
|
|||||||
|
export const getStreamContents = async (stream, {init, convertChunk, getSize, truncateChunk, addChunk, getFinalChunk, finalize}, {maxBuffer = Number.POSITIVE_INFINITY} = {}) => {
|
||||||
|
if (!isAsyncIterable(stream)) {
|
||||||
|
throw new Error('The first argument must be a Readable, a ReadableStream, or an async iterable.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const state = init();
|
||||||
|
state.length = 0;
|
||||||
|
|
||||||
|
try {
|
||||||
|
for await (const chunk of stream) {
|
||||||
|
const chunkType = getChunkType(chunk);
|
||||||
|
const convertedChunk = convertChunk[chunkType](chunk, state);
|
||||||
|
appendChunk({convertedChunk, state, getSize, truncateChunk, addChunk, maxBuffer});
|
||||||
|
}
|
||||||
|
|
||||||
|
appendFinalChunk({state, convertChunk, getSize, truncateChunk, addChunk, getFinalChunk, maxBuffer});
|
||||||
|
return finalize(state);
|
||||||
|
} catch (error) {
|
||||||
|
error.bufferedData = finalize(state);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const appendFinalChunk = ({state, getSize, truncateChunk, addChunk, getFinalChunk, maxBuffer}) => {
|
||||||
|
const convertedChunk = getFinalChunk(state);
|
||||||
|
if (convertedChunk !== undefined) {
|
||||||
|
appendChunk({convertedChunk, state, getSize, truncateChunk, addChunk, maxBuffer});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const appendChunk = ({convertedChunk, state, getSize, truncateChunk, addChunk, maxBuffer}) => {
|
||||||
|
const chunkSize = getSize(convertedChunk);
|
||||||
|
const newLength = state.length + chunkSize;
|
||||||
|
|
||||||
|
if (newLength <= maxBuffer) {
|
||||||
|
addNewChunk(convertedChunk, state, addChunk, newLength);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const truncatedChunk = truncateChunk(convertedChunk, maxBuffer - state.length);
|
||||||
|
|
||||||
|
if (truncatedChunk !== undefined) {
|
||||||
|
addNewChunk(truncatedChunk, state, addChunk, maxBuffer);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new MaxBufferError();
|
||||||
|
};
|
||||||
|
|
||||||
|
const addNewChunk = (convertedChunk, state, addChunk, newLength) => {
|
||||||
|
state.contents = addChunk(convertedChunk, state, newLength);
|
||||||
|
state.length = newLength;
|
||||||
|
};
|
||||||
|
|
||||||
|
const isAsyncIterable = stream => typeof stream === 'object' && stream !== null && typeof stream[Symbol.asyncIterator] === 'function';
|
||||||
|
|
||||||
|
const getChunkType = chunk => {
|
||||||
|
const typeOfChunk = typeof chunk;
|
||||||
|
|
||||||
|
if (typeOfChunk === 'string') {
|
||||||
|
return 'string';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeOfChunk !== 'object' || chunk === null) {
|
||||||
|
return 'others';
|
||||||
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line n/prefer-global/buffer
|
||||||
|
if (globalThis.Buffer?.isBuffer(chunk)) {
|
||||||
|
return 'buffer';
|
||||||
|
}
|
||||||
|
|
||||||
|
const prototypeName = objectToString.call(chunk);
|
||||||
|
|
||||||
|
if (prototypeName === '[object ArrayBuffer]') {
|
||||||
|
return 'arrayBuffer';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (prototypeName === '[object DataView]') {
|
||||||
|
return 'dataView';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
Number.isInteger(chunk.byteLength)
|
||||||
|
&& Number.isInteger(chunk.byteOffset)
|
||||||
|
&& objectToString.call(chunk.buffer) === '[object ArrayBuffer]'
|
||||||
|
) {
|
||||||
|
return 'typedArray';
|
||||||
|
}
|
||||||
|
|
||||||
|
return 'others';
|
||||||
|
};
|
||||||
|
|
||||||
|
const {toString: objectToString} = Object.prototype;
|
||||||
|
|
||||||
|
export class MaxBufferError extends Error {
|
||||||
|
name = 'MaxBufferError';
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super('maxBuffer exceeded');
|
||||||
|
}
|
||||||
|
}
|
5
packages/core/src/libraries/get-stream/index.js
Executable file
5
packages/core/src/libraries/get-stream/index.js
Executable file
@ -0,0 +1,5 @@
|
|||||||
|
export {getStreamAsArray} from './array.js';
|
||||||
|
export {getStreamAsArrayBuffer} from './array-buffer.js';
|
||||||
|
export {getStreamAsBuffer} from './buffer.js';
|
||||||
|
export {getStreamAsString as default} from './string.js';
|
||||||
|
export {MaxBufferError} from './contents.js';
|
36
packages/core/src/libraries/get-stream/string.js
Executable file
36
packages/core/src/libraries/get-stream/string.js
Executable file
@ -0,0 +1,36 @@
|
|||||||
|
import {getStreamContents} from './contents.js';
|
||||||
|
import {identity, getContentsProp, throwObjectStream, getLengthProp} from './utils.js';
|
||||||
|
|
||||||
|
export async function getStreamAsString(stream, options) {
|
||||||
|
return getStreamContents(stream, stringMethods, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
const initString = () => ({contents: '', textDecoder: new TextDecoder()});
|
||||||
|
|
||||||
|
const useTextDecoder = (chunk, {textDecoder}) => textDecoder.decode(chunk, {stream: true});
|
||||||
|
|
||||||
|
const addStringChunk = (convertedChunk, {contents}) => contents + convertedChunk;
|
||||||
|
|
||||||
|
const truncateStringChunk = (convertedChunk, chunkSize) => convertedChunk.slice(0, chunkSize);
|
||||||
|
|
||||||
|
const getFinalStringChunk = ({textDecoder}) => {
|
||||||
|
const finalChunk = textDecoder.decode();
|
||||||
|
return finalChunk === '' ? undefined : finalChunk;
|
||||||
|
};
|
||||||
|
|
||||||
|
const stringMethods = {
|
||||||
|
init: initString,
|
||||||
|
convertChunk: {
|
||||||
|
string: identity,
|
||||||
|
buffer: useTextDecoder,
|
||||||
|
arrayBuffer: useTextDecoder,
|
||||||
|
dataView: useTextDecoder,
|
||||||
|
typedArray: useTextDecoder,
|
||||||
|
others: throwObjectStream,
|
||||||
|
},
|
||||||
|
getSize: getLengthProp,
|
||||||
|
truncateChunk: truncateStringChunk,
|
||||||
|
addChunk: addStringChunk,
|
||||||
|
getFinalChunk: getFinalStringChunk,
|
||||||
|
finalize: getContentsProp,
|
||||||
|
};
|
11
packages/core/src/libraries/get-stream/utils.js
Executable file
11
packages/core/src/libraries/get-stream/utils.js
Executable file
@ -0,0 +1,11 @@
|
|||||||
|
export const identity = value => value;
|
||||||
|
|
||||||
|
export const noop = () => undefined;
|
||||||
|
|
||||||
|
export const getContentsProp = ({contents}) => contents;
|
||||||
|
|
||||||
|
export const throwObjectStream = chunk => {
|
||||||
|
throw new Error(`Streams in object mode are not supported: ${String(chunk)}`);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getLengthProp = convertedChunk => convertedChunk.length;
|
275
packages/core/src/libraries/human-signals/core.js
Executable file
275
packages/core/src/libraries/human-signals/core.js
Executable file
@ -0,0 +1,275 @@
|
|||||||
|
/* eslint-disable max-lines */
|
||||||
|
// List of known process signals with information about them
|
||||||
|
export const SIGNALS = [
|
||||||
|
{
|
||||||
|
name: 'SIGHUP',
|
||||||
|
number: 1,
|
||||||
|
action: 'terminate',
|
||||||
|
description: 'Terminal closed',
|
||||||
|
standard: 'posix',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGINT',
|
||||||
|
number: 2,
|
||||||
|
action: 'terminate',
|
||||||
|
description: 'User interruption with CTRL-C',
|
||||||
|
standard: 'ansi',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGQUIT',
|
||||||
|
number: 3,
|
||||||
|
action: 'core',
|
||||||
|
description: 'User interruption with CTRL-\\',
|
||||||
|
standard: 'posix',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGILL',
|
||||||
|
number: 4,
|
||||||
|
action: 'core',
|
||||||
|
description: 'Invalid machine instruction',
|
||||||
|
standard: 'ansi',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGTRAP',
|
||||||
|
number: 5,
|
||||||
|
action: 'core',
|
||||||
|
description: 'Debugger breakpoint',
|
||||||
|
standard: 'posix',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGABRT',
|
||||||
|
number: 6,
|
||||||
|
action: 'core',
|
||||||
|
description: 'Aborted',
|
||||||
|
standard: 'ansi',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGIOT',
|
||||||
|
number: 6,
|
||||||
|
action: 'core',
|
||||||
|
description: 'Aborted',
|
||||||
|
standard: 'bsd',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGBUS',
|
||||||
|
number: 7,
|
||||||
|
action: 'core',
|
||||||
|
description:
|
||||||
|
'Bus error due to misaligned, non-existing address or paging error',
|
||||||
|
standard: 'bsd',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGEMT',
|
||||||
|
number: 7,
|
||||||
|
action: 'terminate',
|
||||||
|
description: 'Command should be emulated but is not implemented',
|
||||||
|
standard: 'other',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGFPE',
|
||||||
|
number: 8,
|
||||||
|
action: 'core',
|
||||||
|
description: 'Floating point arithmetic error',
|
||||||
|
standard: 'ansi',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGKILL',
|
||||||
|
number: 9,
|
||||||
|
action: 'terminate',
|
||||||
|
description: 'Forced termination',
|
||||||
|
standard: 'posix',
|
||||||
|
forced: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGUSR1',
|
||||||
|
number: 10,
|
||||||
|
action: 'terminate',
|
||||||
|
description: 'Application-specific signal',
|
||||||
|
standard: 'posix',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGSEGV',
|
||||||
|
number: 11,
|
||||||
|
action: 'core',
|
||||||
|
description: 'Segmentation fault',
|
||||||
|
standard: 'ansi',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGUSR2',
|
||||||
|
number: 12,
|
||||||
|
action: 'terminate',
|
||||||
|
description: 'Application-specific signal',
|
||||||
|
standard: 'posix',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGPIPE',
|
||||||
|
number: 13,
|
||||||
|
action: 'terminate',
|
||||||
|
description: 'Broken pipe or socket',
|
||||||
|
standard: 'posix',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGALRM',
|
||||||
|
number: 14,
|
||||||
|
action: 'terminate',
|
||||||
|
description: 'Timeout or timer',
|
||||||
|
standard: 'posix',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGTERM',
|
||||||
|
number: 15,
|
||||||
|
action: 'terminate',
|
||||||
|
description: 'Termination',
|
||||||
|
standard: 'ansi',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGSTKFLT',
|
||||||
|
number: 16,
|
||||||
|
action: 'terminate',
|
||||||
|
description: 'Stack is empty or overflowed',
|
||||||
|
standard: 'other',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGCHLD',
|
||||||
|
number: 17,
|
||||||
|
action: 'ignore',
|
||||||
|
description: 'Child process terminated, paused or unpaused',
|
||||||
|
standard: 'posix',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGCLD',
|
||||||
|
number: 17,
|
||||||
|
action: 'ignore',
|
||||||
|
description: 'Child process terminated, paused or unpaused',
|
||||||
|
standard: 'other',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGCONT',
|
||||||
|
number: 18,
|
||||||
|
action: 'unpause',
|
||||||
|
description: 'Unpaused',
|
||||||
|
standard: 'posix',
|
||||||
|
forced: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGSTOP',
|
||||||
|
number: 19,
|
||||||
|
action: 'pause',
|
||||||
|
description: 'Paused',
|
||||||
|
standard: 'posix',
|
||||||
|
forced: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGTSTP',
|
||||||
|
number: 20,
|
||||||
|
action: 'pause',
|
||||||
|
description: 'Paused using CTRL-Z or "suspend"',
|
||||||
|
standard: 'posix',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGTTIN',
|
||||||
|
number: 21,
|
||||||
|
action: 'pause',
|
||||||
|
description: 'Background process cannot read terminal input',
|
||||||
|
standard: 'posix',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGBREAK',
|
||||||
|
number: 21,
|
||||||
|
action: 'terminate',
|
||||||
|
description: 'User interruption with CTRL-BREAK',
|
||||||
|
standard: 'other',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGTTOU',
|
||||||
|
number: 22,
|
||||||
|
action: 'pause',
|
||||||
|
description: 'Background process cannot write to terminal output',
|
||||||
|
standard: 'posix',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGURG',
|
||||||
|
number: 23,
|
||||||
|
action: 'ignore',
|
||||||
|
description: 'Socket received out-of-band data',
|
||||||
|
standard: 'bsd',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGXCPU',
|
||||||
|
number: 24,
|
||||||
|
action: 'core',
|
||||||
|
description: 'Process timed out',
|
||||||
|
standard: 'bsd',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGXFSZ',
|
||||||
|
number: 25,
|
||||||
|
action: 'core',
|
||||||
|
description: 'File too big',
|
||||||
|
standard: 'bsd',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGVTALRM',
|
||||||
|
number: 26,
|
||||||
|
action: 'terminate',
|
||||||
|
description: 'Timeout or timer',
|
||||||
|
standard: 'bsd',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGPROF',
|
||||||
|
number: 27,
|
||||||
|
action: 'terminate',
|
||||||
|
description: 'Timeout or timer',
|
||||||
|
standard: 'bsd',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGWINCH',
|
||||||
|
number: 28,
|
||||||
|
action: 'ignore',
|
||||||
|
description: 'Terminal window size changed',
|
||||||
|
standard: 'bsd',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGIO',
|
||||||
|
number: 29,
|
||||||
|
action: 'terminate',
|
||||||
|
description: 'I/O is available',
|
||||||
|
standard: 'other',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGPOLL',
|
||||||
|
number: 29,
|
||||||
|
action: 'terminate',
|
||||||
|
description: 'Watched event',
|
||||||
|
standard: 'other',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGINFO',
|
||||||
|
number: 29,
|
||||||
|
action: 'ignore',
|
||||||
|
description: 'Request for process information',
|
||||||
|
standard: 'other',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGPWR',
|
||||||
|
number: 30,
|
||||||
|
action: 'terminate',
|
||||||
|
description: 'Device running out of power',
|
||||||
|
standard: 'systemv',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGSYS',
|
||||||
|
number: 31,
|
||||||
|
action: 'core',
|
||||||
|
description: 'Invalid system call',
|
||||||
|
standard: 'other',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SIGUNUSED',
|
||||||
|
number: 31,
|
||||||
|
action: 'terminate',
|
||||||
|
description: 'Invalid system call',
|
||||||
|
standard: 'other',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
/* eslint-enable max-lines */
|
70
packages/core/src/libraries/human-signals/index.js
Executable file
70
packages/core/src/libraries/human-signals/index.js
Executable file
@ -0,0 +1,70 @@
|
|||||||
|
import { constants } from 'node:os'
|
||||||
|
|
||||||
|
import { SIGRTMAX } from './realtime.js'
|
||||||
|
import { getSignals } from './signals.js'
|
||||||
|
|
||||||
|
// Retrieve `signalsByName`, an object mapping signal name to signal properties.
|
||||||
|
// We make sure the object is sorted by `number`.
|
||||||
|
const getSignalsByName = () => {
|
||||||
|
const signals = getSignals()
|
||||||
|
return Object.fromEntries(signals.map(getSignalByName))
|
||||||
|
}
|
||||||
|
|
||||||
|
const getSignalByName = ({
|
||||||
|
name,
|
||||||
|
number,
|
||||||
|
description,
|
||||||
|
supported,
|
||||||
|
action,
|
||||||
|
forced,
|
||||||
|
standard,
|
||||||
|
}) => [name, { name, number, description, supported, action, forced, standard }]
|
||||||
|
|
||||||
|
export const signalsByName = getSignalsByName()
|
||||||
|
|
||||||
|
// Retrieve `signalsByNumber`, an object mapping signal number to signal
|
||||||
|
// properties.
|
||||||
|
// We make sure the object is sorted by `number`.
|
||||||
|
const getSignalsByNumber = () => {
|
||||||
|
const signals = getSignals()
|
||||||
|
const length = SIGRTMAX + 1
|
||||||
|
const signalsA = Array.from({ length }, (value, number) =>
|
||||||
|
getSignalByNumber(number, signals),
|
||||||
|
)
|
||||||
|
return Object.assign({}, ...signalsA)
|
||||||
|
}
|
||||||
|
|
||||||
|
const getSignalByNumber = (number, signals) => {
|
||||||
|
const signal = findSignalByNumber(number, signals)
|
||||||
|
|
||||||
|
if (signal === undefined) {
|
||||||
|
return {}
|
||||||
|
}
|
||||||
|
|
||||||
|
const { name, description, supported, action, forced, standard } = signal
|
||||||
|
return {
|
||||||
|
[number]: {
|
||||||
|
name,
|
||||||
|
number,
|
||||||
|
description,
|
||||||
|
supported,
|
||||||
|
action,
|
||||||
|
forced,
|
||||||
|
standard,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Several signals might end up sharing the same number because of OS-specific
|
||||||
|
// numbers, in which case those prevail.
|
||||||
|
const findSignalByNumber = (number, signals) => {
|
||||||
|
const signal = signals.find(({ name }) => constants.signals[name] === number)
|
||||||
|
|
||||||
|
if (signal !== undefined) {
|
||||||
|
return signal
|
||||||
|
}
|
||||||
|
|
||||||
|
return signals.find((signalA) => signalA.number === number)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const signalsByNumber = getSignalsByNumber()
|
16
packages/core/src/libraries/human-signals/realtime.js
Executable file
16
packages/core/src/libraries/human-signals/realtime.js
Executable file
@ -0,0 +1,16 @@
|
|||||||
|
// List of realtime signals with information about them
|
||||||
|
export const getRealtimeSignals = () => {
|
||||||
|
const length = SIGRTMAX - SIGRTMIN + 1
|
||||||
|
return Array.from({ length }, getRealtimeSignal)
|
||||||
|
}
|
||||||
|
|
||||||
|
const getRealtimeSignal = (value, index) => ({
|
||||||
|
name: `SIGRT${index + 1}`,
|
||||||
|
number: SIGRTMIN + index,
|
||||||
|
action: 'terminate',
|
||||||
|
description: 'Application-specific signal (realtime)',
|
||||||
|
standard: 'posix',
|
||||||
|
})
|
||||||
|
|
||||||
|
const SIGRTMIN = 34
|
||||||
|
export const SIGRTMAX = 64
|
34
packages/core/src/libraries/human-signals/signals.js
Executable file
34
packages/core/src/libraries/human-signals/signals.js
Executable file
@ -0,0 +1,34 @@
|
|||||||
|
import { constants } from 'node:os'
|
||||||
|
|
||||||
|
import { SIGNALS } from './core.js'
|
||||||
|
import { getRealtimeSignals } from './realtime.js'
|
||||||
|
|
||||||
|
// Retrieve list of know signals (including realtime) with information about
|
||||||
|
// them
|
||||||
|
export const getSignals = () => {
|
||||||
|
const realtimeSignals = getRealtimeSignals()
|
||||||
|
const signals = [...SIGNALS, ...realtimeSignals].map(normalizeSignal)
|
||||||
|
return signals
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize signal:
|
||||||
|
// - `number`: signal numbers are OS-specific. This is taken into account by
|
||||||
|
// `os.constants.signals`. However we provide a default `number` since some
|
||||||
|
// signals are not defined for some OS.
|
||||||
|
// - `forced`: set default to `false`
|
||||||
|
// - `supported`: set value
|
||||||
|
const normalizeSignal = ({
|
||||||
|
name,
|
||||||
|
number: defaultNumber,
|
||||||
|
description,
|
||||||
|
action,
|
||||||
|
forced = false,
|
||||||
|
standard,
|
||||||
|
}) => {
|
||||||
|
const {
|
||||||
|
signals: { [name]: constantSignal },
|
||||||
|
} = constants
|
||||||
|
const supported = constantSignal !== undefined
|
||||||
|
const number = supported ? constantSignal : defaultNumber
|
||||||
|
return { name, number, description, supported, action, forced, standard }
|
||||||
|
}
|
29
packages/core/src/libraries/is-stream/index.js
Executable file
29
packages/core/src/libraries/is-stream/index.js
Executable file
@ -0,0 +1,29 @@
|
|||||||
|
export function isStream(stream) {
|
||||||
|
return stream !== null
|
||||||
|
&& typeof stream === 'object'
|
||||||
|
&& typeof stream.pipe === 'function';
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isWritableStream(stream) {
|
||||||
|
return isStream(stream)
|
||||||
|
&& stream.writable !== false
|
||||||
|
&& typeof stream._write === 'function'
|
||||||
|
&& typeof stream._writableState === 'object';
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isReadableStream(stream) {
|
||||||
|
return isStream(stream)
|
||||||
|
&& stream.readable !== false
|
||||||
|
&& typeof stream._read === 'function'
|
||||||
|
&& typeof stream._readableState === 'object';
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isDuplexStream(stream) {
|
||||||
|
return isWritableStream(stream)
|
||||||
|
&& isReadableStream(stream);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isTransformStream(stream) {
|
||||||
|
return isDuplexStream(stream)
|
||||||
|
&& typeof stream._transform === 'function';
|
||||||
|
}
|
24
packages/core/src/libraries/lowdb/adapters/Memory.js
Executable file
24
packages/core/src/libraries/lowdb/adapters/Memory.js
Executable file
@ -0,0 +1,24 @@
|
|||||||
|
export class Memory {
|
||||||
|
#data = null
|
||||||
|
|
||||||
|
read() {
|
||||||
|
return Promise.resolve(this.#data)
|
||||||
|
}
|
||||||
|
|
||||||
|
write(obj) {
|
||||||
|
this.#data = obj
|
||||||
|
return Promise.resolve()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class MemorySync {
|
||||||
|
#data = null
|
||||||
|
|
||||||
|
read() {
|
||||||
|
return this.#data || null
|
||||||
|
}
|
||||||
|
|
||||||
|
write(obj) {
|
||||||
|
this.#data = obj
|
||||||
|
}
|
||||||
|
}
|
51
packages/core/src/libraries/lowdb/adapters/node/DataFile.js
Executable file
51
packages/core/src/libraries/lowdb/adapters/node/DataFile.js
Executable file
@ -0,0 +1,51 @@
|
|||||||
|
import { TextFile, TextFileSync } from "./TextFile.js"
|
||||||
|
|
||||||
|
export class DataFile {
|
||||||
|
#adapter
|
||||||
|
#parse
|
||||||
|
#stringify
|
||||||
|
|
||||||
|
constructor(filename, { parse, stringify }) {
|
||||||
|
this.#adapter = new TextFile(filename)
|
||||||
|
this.#parse = parse
|
||||||
|
this.#stringify = stringify
|
||||||
|
}
|
||||||
|
|
||||||
|
async read() {
|
||||||
|
const data = await this.#adapter.read()
|
||||||
|
if (data === null) {
|
||||||
|
return null
|
||||||
|
} else {
|
||||||
|
return this.#parse(data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
write(obj) {
|
||||||
|
return this.#adapter.write(this.#stringify(obj))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DataFileSync {
|
||||||
|
#adapter
|
||||||
|
#parse
|
||||||
|
#stringify
|
||||||
|
|
||||||
|
constructor(filename, { parse, stringify }) {
|
||||||
|
this.#adapter = new TextFileSync(filename)
|
||||||
|
this.#parse = parse
|
||||||
|
this.#stringify = stringify
|
||||||
|
}
|
||||||
|
|
||||||
|
read() {
|
||||||
|
const data = this.#adapter.read()
|
||||||
|
if (data === null) {
|
||||||
|
return null
|
||||||
|
} else {
|
||||||
|
return this.#parse(data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
write(obj) {
|
||||||
|
this.#adapter.write(this.#stringify(obj))
|
||||||
|
}
|
||||||
|
}
|
19
packages/core/src/libraries/lowdb/adapters/node/JSONFile.js
Executable file
19
packages/core/src/libraries/lowdb/adapters/node/JSONFile.js
Executable file
@ -0,0 +1,19 @@
|
|||||||
|
import { DataFile, DataFileSync } from "./DataFile.js";
|
||||||
|
|
||||||
|
export class JSONFile extends DataFile {
|
||||||
|
constructor(filename) {
|
||||||
|
super(filename, {
|
||||||
|
parse: JSON.parse,
|
||||||
|
stringify: (data) => JSON.stringify(data, null, 2),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class JSONFileSync extends DataFileSync {
|
||||||
|
constructor(filename) {
|
||||||
|
super(filename, {
|
||||||
|
parse: JSON.parse,
|
||||||
|
stringify: (data) => JSON.stringify(data, null, 2),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
65
packages/core/src/libraries/lowdb/adapters/node/TextFile.js
Executable file
65
packages/core/src/libraries/lowdb/adapters/node/TextFile.js
Executable file
@ -0,0 +1,65 @@
|
|||||||
|
import { readFileSync, renameSync, writeFileSync } from "node:fs"
|
||||||
|
import { readFile } from "node:fs/promises"
|
||||||
|
import path from "node:path"
|
||||||
|
|
||||||
|
import { Writer } from "../../steno"
|
||||||
|
|
||||||
|
export class TextFile {
|
||||||
|
#filename
|
||||||
|
#writer
|
||||||
|
|
||||||
|
constructor(filename) {
|
||||||
|
this.#filename = filename
|
||||||
|
this.#writer = new Writer(filename)
|
||||||
|
}
|
||||||
|
|
||||||
|
async read() {
|
||||||
|
let data
|
||||||
|
|
||||||
|
try {
|
||||||
|
data = await readFile(this.#filename, "utf-8")
|
||||||
|
} catch (e) {
|
||||||
|
if (e.code === "ENOENT") {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
write(str) {
|
||||||
|
return this.#writer.write(str)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class TextFileSync {
|
||||||
|
#tempFilename
|
||||||
|
#filename
|
||||||
|
|
||||||
|
constructor(filename) {
|
||||||
|
this.#filename = filename
|
||||||
|
const f = filename.toString()
|
||||||
|
this.#tempFilename = path.join(path.dirname(f), `.${path.basename(f)}.tmp`)
|
||||||
|
}
|
||||||
|
|
||||||
|
read() {
|
||||||
|
let data
|
||||||
|
|
||||||
|
try {
|
||||||
|
data = readFileSync(this.#filename, "utf-8")
|
||||||
|
} catch (e) {
|
||||||
|
if (e.code === "ENOENT") {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
write(str) {
|
||||||
|
writeFileSync(this.#tempFilename, str)
|
||||||
|
renameSync(this.#tempFilename, this.#filename)
|
||||||
|
}
|
||||||
|
}
|
48
packages/core/src/libraries/lowdb/core/Low.js
Executable file
48
packages/core/src/libraries/lowdb/core/Low.js
Executable file
@ -0,0 +1,48 @@
|
|||||||
|
function checkArgs(adapter, defaultData) {
|
||||||
|
if (adapter === undefined) throw new Error("lowdb: missing adapter")
|
||||||
|
if (defaultData === undefined) throw new Error("lowdb: missing default data")
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Low {
|
||||||
|
constructor(adapter, defaultData) {
|
||||||
|
checkArgs(adapter, defaultData)
|
||||||
|
this.adapter = adapter
|
||||||
|
this.data = defaultData
|
||||||
|
}
|
||||||
|
|
||||||
|
async read() {
|
||||||
|
const data = await this.adapter.read()
|
||||||
|
if (data) this.data = data
|
||||||
|
}
|
||||||
|
|
||||||
|
async write() {
|
||||||
|
if (this.data) await this.adapter.write(this.data)
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(fn) {
|
||||||
|
fn(this.data)
|
||||||
|
await this.write()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class LowSync {
|
||||||
|
constructor(adapter, defaultData) {
|
||||||
|
checkArgs(adapter, defaultData)
|
||||||
|
this.adapter = adapter
|
||||||
|
this.data = defaultData
|
||||||
|
}
|
||||||
|
|
||||||
|
read() {
|
||||||
|
const data = this.adapter.read()
|
||||||
|
if (data) this.data = data
|
||||||
|
}
|
||||||
|
|
||||||
|
write() {
|
||||||
|
if (this.data) this.adapter.write(this.data)
|
||||||
|
}
|
||||||
|
|
||||||
|
update(fn) {
|
||||||
|
fn(this.data)
|
||||||
|
this.write()
|
||||||
|
}
|
||||||
|
}
|
23
packages/core/src/libraries/lowdb/presets/node.js
Executable file
23
packages/core/src/libraries/lowdb/presets/node.js
Executable file
@ -0,0 +1,23 @@
|
|||||||
|
import { Memory, MemorySync } from "../adapters/Memory.js"
|
||||||
|
import { JSONFile, JSONFileSync } from "../adapters/node/JSONFile.js"
|
||||||
|
import { Low, LowSync } from "../core/Low.js"
|
||||||
|
|
||||||
|
export async function JSONFilePreset(filename, defaultData) {
|
||||||
|
const adapter = process.env.NODE_ENV === "test" ? new Memory() : new JSONFile(filename)
|
||||||
|
|
||||||
|
const db = new Low(adapter, defaultData)
|
||||||
|
|
||||||
|
await db.read()
|
||||||
|
|
||||||
|
return db
|
||||||
|
}
|
||||||
|
|
||||||
|
export function JSONFileSyncPreset(filename, defaultData) {
|
||||||
|
const adapter = process.env.NODE_ENV === "test" ? new MemorySync() : new JSONFileSync(filename)
|
||||||
|
|
||||||
|
const db = new LowSync(adapter, defaultData)
|
||||||
|
|
||||||
|
db.read()
|
||||||
|
|
||||||
|
return db
|
||||||
|
}
|
47
src/main/lib/steno/index.ts → packages/core/src/libraries/lowdb/steno/index.js
Normal file → Executable file
47
src/main/lib/steno/index.ts → packages/core/src/libraries/lowdb/steno/index.js
Normal file → Executable file
@ -1,27 +1,22 @@
|
|||||||
import { PathLike } from 'node:fs'
|
import { rename, writeFile } from "node:fs/promises"
|
||||||
import { rename, writeFile } from 'node:fs/promises'
|
import { basename, dirname, join } from "node:path"
|
||||||
import { basename, dirname, join } from 'node:path'
|
import { fileURLToPath } from "node:url"
|
||||||
import { fileURLToPath } from 'node:url'
|
|
||||||
|
|
||||||
// Returns a temporary file
|
// Returns a temporary file
|
||||||
// Example: for /some/file will return /some/.file.tmp
|
// Example: for /some/file will return /some/.file.tmp
|
||||||
function getTempFilename(file: PathLike): string {
|
function getTempFilename(file) {
|
||||||
const f = file instanceof URL ? fileURLToPath(file) : file.toString()
|
const f = file instanceof URL ? fileURLToPath(file) : file.toString()
|
||||||
return join(dirname(f), `.${basename(f)}.tmp`)
|
return join(dirname(f), `.${basename(f)}.tmp`)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Retries an asynchronous operation with a delay between retries and a maximum retry count
|
// Retries an asynchronous operation with a delay between retries and a maximum retry count
|
||||||
async function retryAsyncOperation(
|
async function retryAsyncOperation(fn, maxRetries, delayMs) {
|
||||||
fn: () => Promise<void>,
|
|
||||||
maxRetries: number,
|
|
||||||
delayMs: number,
|
|
||||||
): Promise<void> {
|
|
||||||
for (let i = 0; i < maxRetries; i++) {
|
for (let i = 0; i < maxRetries; i++) {
|
||||||
try {
|
try {
|
||||||
return await fn()
|
return await fn()
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (i < maxRetries - 1) {
|
if (i < maxRetries - 1) {
|
||||||
await new Promise((resolve) => setTimeout(resolve, delayMs))
|
await new Promise(resolve => setTimeout(resolve, delayMs))
|
||||||
} else {
|
} else {
|
||||||
throw error // Rethrow the error if max retries reached
|
throw error // Rethrow the error if max retries reached
|
||||||
}
|
}
|
||||||
@ -29,21 +24,17 @@ async function retryAsyncOperation(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type Resolve = () => void
|
|
||||||
type Reject = (error: Error) => void
|
|
||||||
type Data = Parameters<typeof writeFile>[1]
|
|
||||||
|
|
||||||
export class Writer {
|
export class Writer {
|
||||||
#filename: PathLike
|
#filename
|
||||||
#tempFilename: PathLike
|
#tempFilename
|
||||||
#locked = false
|
#locked = false
|
||||||
#prev: [Resolve, Reject] | null = null
|
#prev = null
|
||||||
#next: [Resolve, Reject] | null = null
|
#next = null
|
||||||
#nextPromise: Promise<void> | null = null
|
#nextPromise = null
|
||||||
#nextData: Data | null = null
|
#nextData = null
|
||||||
|
|
||||||
// File is locked, add data for later
|
// File is locked, add data for later
|
||||||
#add(data: Data): Promise<void> {
|
#add(data) {
|
||||||
// Only keep most recent data
|
// Only keep most recent data
|
||||||
this.#nextData = data
|
this.#nextData = data
|
||||||
|
|
||||||
@ -59,18 +50,18 @@ export class Writer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// File isn't locked, write data
|
// File isn't locked, write data
|
||||||
async #write(data: Data): Promise<void> {
|
async #write(data) {
|
||||||
// Lock file
|
// Lock file
|
||||||
this.#locked = true
|
this.#locked = true
|
||||||
try {
|
try {
|
||||||
// Atomic write
|
// Atomic write
|
||||||
await writeFile(this.#tempFilename, data, 'utf-8')
|
await writeFile(this.#tempFilename, data, "utf-8")
|
||||||
await retryAsyncOperation(
|
await retryAsyncOperation(
|
||||||
async () => {
|
async () => {
|
||||||
await rename(this.#tempFilename, this.#filename)
|
await rename(this.#tempFilename, this.#filename)
|
||||||
},
|
},
|
||||||
10,
|
10,
|
||||||
100,
|
100
|
||||||
)
|
)
|
||||||
|
|
||||||
// Call resolve
|
// Call resolve
|
||||||
@ -96,12 +87,12 @@ export class Writer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
constructor(filename: PathLike) {
|
constructor(filename) {
|
||||||
this.#filename = filename
|
this.#filename = filename
|
||||||
this.#tempFilename = getTempFilename(filename)
|
this.#tempFilename = getTempFilename(filename)
|
||||||
}
|
}
|
||||||
|
|
||||||
async write(data: Data): Promise<void> {
|
async write(data) {
|
||||||
return this.#locked ? this.#add(data) : this.#write(data)
|
return this.#locked ? this.#add(data) : this.#write(data)
|
||||||
}
|
}
|
||||||
}
|
}
|
71
packages/core/src/libraries/mimic-function/index.js
Executable file
71
packages/core/src/libraries/mimic-function/index.js
Executable file
@ -0,0 +1,71 @@
|
|||||||
|
const copyProperty = (to, from, property, ignoreNonConfigurable) => {
|
||||||
|
// `Function#length` should reflect the parameters of `to` not `from` since we keep its body.
|
||||||
|
// `Function#prototype` is non-writable and non-configurable so can never be modified.
|
||||||
|
if (property === 'length' || property === 'prototype') {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// `Function#arguments` and `Function#caller` should not be copied. They were reported to be present in `Reflect.ownKeys` for some devices in React Native (#41), so we explicitly ignore them here.
|
||||||
|
if (property === 'arguments' || property === 'caller') {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const toDescriptor = Object.getOwnPropertyDescriptor(to, property);
|
||||||
|
const fromDescriptor = Object.getOwnPropertyDescriptor(from, property);
|
||||||
|
|
||||||
|
if (!canCopyProperty(toDescriptor, fromDescriptor) && ignoreNonConfigurable) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
Object.defineProperty(to, property, fromDescriptor);
|
||||||
|
};
|
||||||
|
|
||||||
|
// `Object.defineProperty()` throws if the property exists, is not configurable and either:
|
||||||
|
// - one its descriptors is changed
|
||||||
|
// - it is non-writable and its value is changed
|
||||||
|
const canCopyProperty = function (toDescriptor, fromDescriptor) {
|
||||||
|
return toDescriptor === undefined || toDescriptor.configurable || (
|
||||||
|
toDescriptor.writable === fromDescriptor.writable
|
||||||
|
&& toDescriptor.enumerable === fromDescriptor.enumerable
|
||||||
|
&& toDescriptor.configurable === fromDescriptor.configurable
|
||||||
|
&& (toDescriptor.writable || toDescriptor.value === fromDescriptor.value)
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const changePrototype = (to, from) => {
|
||||||
|
const fromPrototype = Object.getPrototypeOf(from);
|
||||||
|
if (fromPrototype === Object.getPrototypeOf(to)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
Object.setPrototypeOf(to, fromPrototype);
|
||||||
|
};
|
||||||
|
|
||||||
|
const wrappedToString = (withName, fromBody) => `/* Wrapped ${withName}*/\n${fromBody}`;
|
||||||
|
|
||||||
|
const toStringDescriptor = Object.getOwnPropertyDescriptor(Function.prototype, 'toString');
|
||||||
|
const toStringName = Object.getOwnPropertyDescriptor(Function.prototype.toString, 'name');
|
||||||
|
|
||||||
|
// We call `from.toString()` early (not lazily) to ensure `from` can be garbage collected.
|
||||||
|
// We use `bind()` instead of a closure for the same reason.
|
||||||
|
// Calling `from.toString()` early also allows caching it in case `to.toString()` is called several times.
|
||||||
|
const changeToString = (to, from, name) => {
|
||||||
|
const withName = name === '' ? '' : `with ${name.trim()}() `;
|
||||||
|
const newToString = wrappedToString.bind(null, withName, from.toString());
|
||||||
|
// Ensure `to.toString.toString` is non-enumerable and has the same `same`
|
||||||
|
Object.defineProperty(newToString, 'name', toStringName);
|
||||||
|
Object.defineProperty(to, 'toString', { ...toStringDescriptor, value: newToString });
|
||||||
|
};
|
||||||
|
|
||||||
|
export default function mimicFunction(to, from, { ignoreNonConfigurable = false } = {}) {
|
||||||
|
const { name } = to;
|
||||||
|
|
||||||
|
for (const property of Reflect.ownKeys(from)) {
|
||||||
|
copyProperty(to, from, property, ignoreNonConfigurable);
|
||||||
|
}
|
||||||
|
|
||||||
|
changePrototype(to, from);
|
||||||
|
changeToString(to, from, name);
|
||||||
|
|
||||||
|
return to;
|
||||||
|
}
|
51
packages/core/src/libraries/npm-run-path/index.js
Executable file
51
packages/core/src/libraries/npm-run-path/index.js
Executable file
@ -0,0 +1,51 @@
|
|||||||
|
import process from 'node:process';
|
||||||
|
import path from 'node:path';
|
||||||
|
import url from 'node:url';
|
||||||
|
|
||||||
|
function pathKey(options = {}) {
|
||||||
|
const {
|
||||||
|
env = process.env,
|
||||||
|
platform = process.platform
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
if (platform !== 'win32') {
|
||||||
|
return 'PATH';
|
||||||
|
}
|
||||||
|
|
||||||
|
return Object.keys(env).reverse().find(key => key.toUpperCase() === 'PATH') || 'Path';
|
||||||
|
}
|
||||||
|
|
||||||
|
export function npmRunPath(options = {}) {
|
||||||
|
const {
|
||||||
|
cwd = process.cwd(),
|
||||||
|
path: path_ = process.env[pathKey()],
|
||||||
|
execPath = process.execPath,
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
let previous;
|
||||||
|
const execPathString = execPath instanceof URL ? url.fileURLToPath(execPath) : execPath;
|
||||||
|
const cwdString = cwd instanceof URL ? url.fileURLToPath(cwd) : cwd;
|
||||||
|
let cwdPath = path.resolve(cwdString);
|
||||||
|
const result = [];
|
||||||
|
|
||||||
|
while (previous !== cwdPath) {
|
||||||
|
result.push(path.join(cwdPath, 'node_modules/.bin'));
|
||||||
|
previous = cwdPath;
|
||||||
|
cwdPath = path.resolve(cwdPath, '..');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure the running `node` binary is used.
|
||||||
|
result.push(path.resolve(cwdString, execPathString, '..'));
|
||||||
|
|
||||||
|
return [...result, path_].join(path.delimiter);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function npmRunPathEnv({ env = process.env, ...options } = {}) {
|
||||||
|
env = { ...env };
|
||||||
|
|
||||||
|
const path = pathKey({ env });
|
||||||
|
options.path = env[path];
|
||||||
|
env[path] = npmRunPath(options);
|
||||||
|
|
||||||
|
return env;
|
||||||
|
}
|
41
packages/core/src/libraries/onetime/index.js
Executable file
41
packages/core/src/libraries/onetime/index.js
Executable file
@ -0,0 +1,41 @@
|
|||||||
|
import mimicFunction from '../mimic-function';
|
||||||
|
|
||||||
|
const calledFunctions = new WeakMap();
|
||||||
|
|
||||||
|
const onetime = (function_, options = {}) => {
|
||||||
|
if (typeof function_ !== 'function') {
|
||||||
|
throw new TypeError('Expected a function');
|
||||||
|
}
|
||||||
|
|
||||||
|
let returnValue;
|
||||||
|
let callCount = 0;
|
||||||
|
const functionName = function_.displayName || function_.name || '<anonymous>';
|
||||||
|
|
||||||
|
const onetime = function (...arguments_) {
|
||||||
|
calledFunctions.set(onetime, ++callCount);
|
||||||
|
|
||||||
|
if (callCount === 1) {
|
||||||
|
returnValue = function_.apply(this, arguments_);
|
||||||
|
function_ = undefined;
|
||||||
|
} else if (options.throw === true) {
|
||||||
|
throw new Error(`Function \`${functionName}\` can only be called once`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return returnValue;
|
||||||
|
};
|
||||||
|
|
||||||
|
mimicFunction(onetime, function_);
|
||||||
|
calledFunctions.set(onetime, callCount);
|
||||||
|
|
||||||
|
return onetime;
|
||||||
|
};
|
||||||
|
|
||||||
|
onetime.callCount = function_ => {
|
||||||
|
if (!calledFunctions.has(function_)) {
|
||||||
|
throw new Error(`The given function \`${function_.name}\` is not wrapped by the \`onetime\` package`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return calledFunctions.get(function_);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default onetime;
|
26
packages/core/src/libraries/strip-final-newline/index.js
Executable file
26
packages/core/src/libraries/strip-final-newline/index.js
Executable file
@ -0,0 +1,26 @@
|
|||||||
|
export default function stripFinalNewline(input) {
|
||||||
|
if (typeof input === 'string') {
|
||||||
|
return stripFinalNewlineString(input);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(ArrayBuffer.isView(input) && input.BYTES_PER_ELEMENT === 1)) {
|
||||||
|
throw new Error('Input must be a string or a Uint8Array');
|
||||||
|
}
|
||||||
|
|
||||||
|
return stripFinalNewlineBinary(input);
|
||||||
|
}
|
||||||
|
|
||||||
|
const stripFinalNewlineString = input =>
|
||||||
|
input.at(-1) === LF
|
||||||
|
? input.slice(0, input.at(-2) === CR ? -2 : -1)
|
||||||
|
: input;
|
||||||
|
|
||||||
|
const stripFinalNewlineBinary = input =>
|
||||||
|
input.at(-1) === LF_BINARY
|
||||||
|
? input.subarray(0, input.at(-2) === CR_BINARY ? -2 : -1)
|
||||||
|
: input;
|
||||||
|
|
||||||
|
const LF = '\n';
|
||||||
|
const LF_BINARY = LF.codePointAt(0);
|
||||||
|
const CR = '\r';
|
||||||
|
const CR_BINARY = CR.codePointAt(0);
|
45
packages/core/src/logger.js
Executable file
45
packages/core/src/logger.js
Executable file
@ -0,0 +1,45 @@
|
|||||||
|
import winston from "winston"
|
||||||
|
import WinstonTransport from "winston-transport"
|
||||||
|
import colors from "cli-color"
|
||||||
|
|
||||||
|
const servicesToColor = {
|
||||||
|
"CORE": {
|
||||||
|
color: "whiteBright",
|
||||||
|
background: "bgBlackBright",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const paintText = (level, service, ...args) => {
|
||||||
|
let { color, background } = servicesToColor[service ?? "CORE"] ?? servicesToColor["CORE"]
|
||||||
|
|
||||||
|
if (level === "error") {
|
||||||
|
color = "whiteBright"
|
||||||
|
background = "bgRedBright"
|
||||||
|
}
|
||||||
|
|
||||||
|
return colors[background][color](...args)
|
||||||
|
}
|
||||||
|
|
||||||
|
const format = winston.format.printf(({ timestamp, service = "CORE", level, message, }) => {
|
||||||
|
return `${paintText(level, service, `(${level}) [${service}]`)} > ${message}`
|
||||||
|
})
|
||||||
|
|
||||||
|
class EventBusTransport extends WinstonTransport {
|
||||||
|
log(info, next) {
|
||||||
|
global._relic_eventBus.emit(`logger:new`, info)
|
||||||
|
next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default winston.createLogger({
|
||||||
|
format: winston.format.combine(
|
||||||
|
winston.format.timestamp(),
|
||||||
|
format
|
||||||
|
),
|
||||||
|
transports: [
|
||||||
|
new winston.transports.Console(),
|
||||||
|
new EventBusTransport(),
|
||||||
|
//new winston.transports.File({ filename: "error.log", level: "error" }),
|
||||||
|
//new winston.transports.File({ filename: "combined.log" }),
|
||||||
|
],
|
||||||
|
})
|
23
packages/core/src/manifest/libraries.js
Executable file
23
packages/core/src/manifest/libraries.js
Executable file
@ -0,0 +1,23 @@
|
|||||||
|
import PublicInternalLibraries from "./libs"
|
||||||
|
|
||||||
|
const isAClass = (x) => x && typeof x === "function" && x.prototype && typeof x.prototype.constructor === "function"
|
||||||
|
|
||||||
|
export default async (dependencies, bindCtx) => {
|
||||||
|
const libraries = {}
|
||||||
|
|
||||||
|
for await (const lib of dependencies) {
|
||||||
|
if (PublicInternalLibraries[lib]) {
|
||||||
|
if (typeof PublicInternalLibraries[lib] === "function" && isAClass(PublicInternalLibraries[lib])) {
|
||||||
|
libraries[lib] = new PublicInternalLibraries[lib](bindCtx)
|
||||||
|
|
||||||
|
if (libraries[lib].initialize) {
|
||||||
|
await libraries[lib].initialize()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
libraries[lib] = PublicInternalLibraries[lib]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return libraries
|
||||||
|
}
|
61
packages/core/src/manifest/libs/auth/index.js
Executable file
61
packages/core/src/manifest/libs/auth/index.js
Executable file
@ -0,0 +1,61 @@
|
|||||||
|
import open from "open"
|
||||||
|
import axios from "axios"
|
||||||
|
import ManifestAuthDB from "../../../classes/ManifestAuthDB"
|
||||||
|
import UnauthorizeMethod from "../../../handlers/deauthorize"
|
||||||
|
|
||||||
|
export default class Auth {
|
||||||
|
constructor(ctx) {
|
||||||
|
this.manifest = ctx.manifest
|
||||||
|
}
|
||||||
|
|
||||||
|
async get() {
|
||||||
|
const storagedData = await ManifestAuthDB.get(this.manifest.id)
|
||||||
|
|
||||||
|
if (storagedData && this.manifest.authService) {
|
||||||
|
if (!this.manifest.authService.getter) {
|
||||||
|
return storagedData
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await axios({
|
||||||
|
method: "GET",
|
||||||
|
url: this.manifest.authService.getter,
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"Authorization": `Bearer ${storagedData}`
|
||||||
|
}
|
||||||
|
}).catch((err) => {
|
||||||
|
global._relic_eventBus.emit("auth:getter:error", err)
|
||||||
|
|
||||||
|
try {
|
||||||
|
UnauthorizeMethod(this.manifest.id).then(() => {
|
||||||
|
this.request()
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error)
|
||||||
|
}
|
||||||
|
|
||||||
|
return err
|
||||||
|
})
|
||||||
|
|
||||||
|
if (result instanceof Error) {
|
||||||
|
throw result
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(result.data)
|
||||||
|
|
||||||
|
return result.data
|
||||||
|
}
|
||||||
|
|
||||||
|
return storagedData
|
||||||
|
}
|
||||||
|
|
||||||
|
request() {
|
||||||
|
if (!this.manifest.authService || !this.manifest.authService.fetcher) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
const authURL = this.manifest.authService.fetcher
|
||||||
|
|
||||||
|
open(authURL)
|
||||||
|
}
|
||||||
|
}
|
34
packages/core/src/manifest/libs/extract/index.js
Executable file
34
packages/core/src/manifest/libs/extract/index.js
Executable file
@ -0,0 +1,34 @@
|
|||||||
|
import extractFile from "../../../utils/extractFile"
|
||||||
|
import { execa } from "../../../libraries/execa"
|
||||||
|
import Vars from "../../../vars"
|
||||||
|
|
||||||
|
export default class Extract {
|
||||||
|
async extractFull(file, dest, { password } = {}) {
|
||||||
|
const args = [
|
||||||
|
"x",
|
||||||
|
"-y",
|
||||||
|
]
|
||||||
|
|
||||||
|
if (password) {
|
||||||
|
args.push(`-p"${password}"`)
|
||||||
|
}
|
||||||
|
|
||||||
|
args.push(`-o"${dest}"`)
|
||||||
|
|
||||||
|
args.push(`"${file}"`)
|
||||||
|
|
||||||
|
const cmd = `${Vars.sevenzip_bin} ${args.join(" ")}`
|
||||||
|
|
||||||
|
console.log(cmd)
|
||||||
|
|
||||||
|
await execa(cmd, {
|
||||||
|
shell: true,
|
||||||
|
stdout: "inherit",
|
||||||
|
stderr: "inherit",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async autoExtract(file, dest) {
|
||||||
|
return await extractFile(file, dest)
|
||||||
|
}
|
||||||
|
}
|
71
packages/core/src/manifest/libs/fs/index.js
Executable file
71
packages/core/src/manifest/libs/fs/index.js
Executable file
@ -0,0 +1,71 @@
|
|||||||
|
import fs from "node:fs"
|
||||||
|
import path from "node:path"
|
||||||
|
|
||||||
|
// Protect from reading or write operations outside of the package directory
|
||||||
|
export default class SecureFileSystem {
|
||||||
|
constructor(ctx) {
|
||||||
|
this.jailPath = ctx.manifest.install_path
|
||||||
|
}
|
||||||
|
|
||||||
|
checkOutsideJail(target) {
|
||||||
|
// if (!path.resolve(target).startsWith(this.jailPath)) {
|
||||||
|
// throw new Error("Cannot access resource outside of package directory")
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
|
||||||
|
readFileSync(destination, options) {
|
||||||
|
this.checkOutsideJail(destination)
|
||||||
|
|
||||||
|
return fs.readFileSync(destination, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
copyFileSync(from, to) {
|
||||||
|
this.checkOutsideJail(from)
|
||||||
|
this.checkOutsideJail(to)
|
||||||
|
|
||||||
|
return fs.copyFileSync(from, to)
|
||||||
|
}
|
||||||
|
|
||||||
|
writeFileSync(destination, data, options) {
|
||||||
|
this.checkOutsideJail(destination)
|
||||||
|
|
||||||
|
return fs.writeFileSync(destination, data, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
// don't need to check finalPath
|
||||||
|
existsSync(...args) {
|
||||||
|
return fs.existsSync(...args)
|
||||||
|
}
|
||||||
|
|
||||||
|
async rename(from, to) {
|
||||||
|
this.checkOutsideJail(from)
|
||||||
|
this.checkOutsideJail(to)
|
||||||
|
|
||||||
|
return await fs.promises.rename(from, to)
|
||||||
|
}
|
||||||
|
|
||||||
|
async writeFile(path, data, options) {
|
||||||
|
this.checkOutsideJail(path)
|
||||||
|
return await fs.promises.writeFile(path, data, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
async readDir(path) {
|
||||||
|
this.checkOutsideJail(path)
|
||||||
|
return await fs.promises.readdir(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
async rm(path, options) {
|
||||||
|
this.checkOutsideJail(path)
|
||||||
|
return await fs.promises.rm(path, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
async mkdir(path, options) {
|
||||||
|
this.checkOutsideJail(path)
|
||||||
|
return await fs.promises.mkdir(path, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
async stat(path) {
|
||||||
|
this.checkOutsideJail(path)
|
||||||
|
return await fs.promises.stat(path)
|
||||||
|
}
|
||||||
|
}
|
17
packages/core/src/manifest/libs/index.js
Executable file
17
packages/core/src/manifest/libs/index.js
Executable file
@ -0,0 +1,17 @@
|
|||||||
|
import Open from "./open"
|
||||||
|
import Path from "./path"
|
||||||
|
import Fs from "./fs"
|
||||||
|
import Auth from "./auth"
|
||||||
|
import Extract from "./extract"
|
||||||
|
|
||||||
|
// Third party libraries
|
||||||
|
import Mcl from "./mcl"
|
||||||
|
|
||||||
|
export default {
|
||||||
|
fs: Fs,
|
||||||
|
path: Path,
|
||||||
|
open: Open,
|
||||||
|
auth: Auth,
|
||||||
|
extract: Extract,
|
||||||
|
mcl: Mcl,
|
||||||
|
}
|
4
src/main/lib/mcl/authenticator.js → packages/core/src/manifest/libs/mcl/authenticator.js
Normal file → Executable file
4
src/main/lib/mcl/authenticator.js → packages/core/src/manifest/libs/mcl/authenticator.js
Normal file → Executable file
@ -1,5 +1,5 @@
|
|||||||
const request = require('request')
|
import request from "request"
|
||||||
const { v3 } = require('uuid')
|
import {v3} from "uuid"
|
||||||
|
|
||||||
let uuid
|
let uuid
|
||||||
let api_url = 'https://authserver.mojang.com'
|
let api_url = 'https://authserver.mojang.com'
|
12
src/main/lib/mcl/handler.js → packages/core/src/manifest/libs/mcl/handler.js
Normal file → Executable file
12
src/main/lib/mcl/handler.js → packages/core/src/manifest/libs/mcl/handler.js
Normal file → Executable file
@ -1,9 +1,9 @@
|
|||||||
const fs = require('fs')
|
import fs from "node:fs"
|
||||||
const path = require('path')
|
import path from "node:path"
|
||||||
const request = require('request')
|
import child from "node:child_process"
|
||||||
const checksum = require('checksum')
|
import request from "request"
|
||||||
const Zip = require('adm-zip')
|
import checksum from "checksum"
|
||||||
const child = require('child_process')
|
import Zip from "adm-zip"
|
||||||
let counter = 0
|
let counter = 0
|
||||||
|
|
||||||
export default class Handler {
|
export default class Handler {
|
23
src/main/public_libraries/mcl/index.js → packages/core/src/manifest/libs/mcl/index.js
Normal file → Executable file
23
src/main/public_libraries/mcl/index.js → packages/core/src/manifest/libs/mcl/index.js
Normal file → Executable file
@ -1,5 +1,9 @@
|
|||||||
import Client from "../../lib/mcl/launcher"
|
import Logger from "../../../logger"
|
||||||
import Authenticator from "../../lib/mcl/authenticator"
|
|
||||||
|
import Client from "./launcher"
|
||||||
|
import Authenticator from "./authenticator"
|
||||||
|
|
||||||
|
const Log = Logger.child({ service: "MCL" })
|
||||||
|
|
||||||
export default class MCL {
|
export default class MCL {
|
||||||
/**
|
/**
|
||||||
@ -9,7 +13,7 @@ export default class MCL {
|
|||||||
* @param {string} password - the password of the user
|
* @param {string} password - the password of the user
|
||||||
* @return {Promise<Object>} the authentication information
|
* @return {Promise<Object>} the authentication information
|
||||||
*/
|
*/
|
||||||
static async auth(username, password) {
|
async auth(username, password) {
|
||||||
return await Authenticator.getAuth(username, password)
|
return await Authenticator.getAuth(username, password)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -19,7 +23,7 @@ export default class MCL {
|
|||||||
* @param {Object} opts - The options to be passed for launching the client.
|
* @param {Object} opts - The options to be passed for launching the client.
|
||||||
* @return {Promise<Client>} A promise that resolves with the launched client.
|
* @return {Promise<Client>} A promise that resolves with the launched client.
|
||||||
*/
|
*/
|
||||||
static async launch(opts, callbacks) {
|
async launch(opts, callbacks) {
|
||||||
const launcher = new Client()
|
const launcher = new Client()
|
||||||
|
|
||||||
launcher.on("debug", (e) => console.log(e))
|
launcher.on("debug", (e) => console.log(e))
|
||||||
@ -27,6 +31,17 @@ export default class MCL {
|
|||||||
launcher.on("close", (e) => console.log(e))
|
launcher.on("close", (e) => console.log(e))
|
||||||
launcher.on("error", (e) => console.log(e))
|
launcher.on("error", (e) => console.log(e))
|
||||||
|
|
||||||
|
if (typeof callbacks === "undefined") {
|
||||||
|
callbacks = {
|
||||||
|
install: () => {
|
||||||
|
Log.info("Downloading Minecraft assets...")
|
||||||
|
},
|
||||||
|
init_assets: () => {
|
||||||
|
Log.info("Initializing Minecraft assets...")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
await launcher.launch(opts, callbacks)
|
await launcher.launch(opts, callbacks)
|
||||||
|
|
||||||
return launcher
|
return launcher
|
0
src/main/lib/mcl/launcher.js → packages/core/src/manifest/libs/mcl/launcher.js
Normal file → Executable file
0
src/main/lib/mcl/launcher.js → packages/core/src/manifest/libs/mcl/launcher.js
Normal file → Executable file
14
packages/core/src/manifest/libs/open/index.js
Executable file
14
packages/core/src/manifest/libs/open/index.js
Executable file
@ -0,0 +1,14 @@
|
|||||||
|
import Logger from "../../../logger"
|
||||||
|
|
||||||
|
import open, { apps } from "open"
|
||||||
|
|
||||||
|
const Log = Logger.child({ service: "OPEN-LIB" })
|
||||||
|
|
||||||
|
export default {
|
||||||
|
spawn: async (...args) => {
|
||||||
|
Log.info("Spawning with args >", args)
|
||||||
|
|
||||||
|
return await open(...args)
|
||||||
|
},
|
||||||
|
apps: apps,
|
||||||
|
}
|
3
packages/core/src/manifest/libs/path/index.js
Executable file
3
packages/core/src/manifest/libs/path/index.js
Executable file
@ -0,0 +1,3 @@
|
|||||||
|
import path from "node:path"
|
||||||
|
|
||||||
|
export default path
|
60
packages/core/src/manifest/reader.js
Executable file
60
packages/core/src/manifest/reader.js
Executable file
@ -0,0 +1,60 @@
|
|||||||
|
import fs from "node:fs"
|
||||||
|
import path from "node:path"
|
||||||
|
import axios from "axios"
|
||||||
|
import checksum from "checksum"
|
||||||
|
|
||||||
|
import Vars from "../vars"
|
||||||
|
|
||||||
|
export async function readManifest(manifest) {
|
||||||
|
// check if manifest is a directory or a url
|
||||||
|
const urlRegex = /https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\+.~#?&//=]*)/gi
|
||||||
|
|
||||||
|
const target = manifest?.remote_url ?? manifest
|
||||||
|
|
||||||
|
if (!fs.existsSync(Vars.cache_path)) {
|
||||||
|
fs.mkdirSync(Vars.cache_path, { recursive: true })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (urlRegex.test(target)) {
|
||||||
|
const { data: code } = await axios.get(target)
|
||||||
|
|
||||||
|
const manifestChecksum = checksum(code, { algorithm: "md5" })
|
||||||
|
|
||||||
|
const cachedManifest = path.join(Vars.cache_path, `${manifestChecksum}.rmanifest`)
|
||||||
|
|
||||||
|
await fs.promises.writeFile(cachedManifest, code)
|
||||||
|
|
||||||
|
return {
|
||||||
|
remote_manifest: manifest,
|
||||||
|
local_manifest: cachedManifest,
|
||||||
|
is_catched: true,
|
||||||
|
code: code,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (!fs.existsSync(target)) {
|
||||||
|
throw new Error(`Manifest not found: ${target}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!fs.statSync(target).isFile()) {
|
||||||
|
throw new Error(`Manifest is not a file: ${target}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// copy to cache
|
||||||
|
const cachedManifest = path.join(Vars.cache_path, path.basename(target))
|
||||||
|
|
||||||
|
await fs.promises.copyFile(target, cachedManifest)
|
||||||
|
|
||||||
|
if (!fs.existsSync(cachedManifest)) {
|
||||||
|
throw new Error(`Manifest copy failed: ${target}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
remote_manifest: undefined,
|
||||||
|
local_manifest: target,
|
||||||
|
is_catched: false,
|
||||||
|
code: fs.readFileSync(target, "utf8"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default readManifest
|
99
packages/core/src/manifest/vm.js
Executable file
99
packages/core/src/manifest/vm.js
Executable file
@ -0,0 +1,99 @@
|
|||||||
|
import Logger from "../logger"
|
||||||
|
|
||||||
|
import os from "node:os"
|
||||||
|
import vm from "node:vm"
|
||||||
|
import path from "node:path"
|
||||||
|
|
||||||
|
import DB from "../db"
|
||||||
|
import ManifestConfigManager from "../classes/ManifestConfig"
|
||||||
|
|
||||||
|
import resolveOs from "../utils/resolveOs"
|
||||||
|
import FetchLibraries from "./libraries"
|
||||||
|
|
||||||
|
import Settings from "../classes/Settings"
|
||||||
|
|
||||||
|
import Vars from "../vars"
|
||||||
|
|
||||||
|
async function BuildManifest(baseClass, context, { soft = false } = {}) {
|
||||||
|
// try to find install_path on db
|
||||||
|
const pkg = await DB.getPackages(baseClass.id)
|
||||||
|
|
||||||
|
if (pkg) {
|
||||||
|
if (pkg.install_path) {
|
||||||
|
context.install_path = pkg.install_path
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const packagesPath = await Settings.get("packages_path") ?? Vars.packages_path
|
||||||
|
|
||||||
|
// inject install_path
|
||||||
|
context.install_path = path.resolve(packagesPath, baseClass.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
baseClass.install_path = context.install_path
|
||||||
|
|
||||||
|
if (soft === true) {
|
||||||
|
return baseClass
|
||||||
|
}
|
||||||
|
|
||||||
|
const configManager = new ManifestConfigManager(baseClass.id)
|
||||||
|
|
||||||
|
await configManager.initialize()
|
||||||
|
|
||||||
|
let dependencies = []
|
||||||
|
|
||||||
|
if (Array.isArray(baseClass.useLib)) {
|
||||||
|
dependencies = [
|
||||||
|
...dependencies,
|
||||||
|
...baseClass.useLib
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
// modify context
|
||||||
|
context.Log = Logger.child({ service: `VM|${baseClass.id}` })
|
||||||
|
context.Lib = await FetchLibraries(dependencies, {
|
||||||
|
manifest: baseClass,
|
||||||
|
install_path: context.install_path,
|
||||||
|
})
|
||||||
|
context.Config = configManager
|
||||||
|
|
||||||
|
// Construct the instance
|
||||||
|
const instance = new baseClass()
|
||||||
|
|
||||||
|
instance.install_path = context.install_path
|
||||||
|
|
||||||
|
return instance
|
||||||
|
}
|
||||||
|
|
||||||
|
function injectUseManifest(code) {
|
||||||
|
return code + "\n\nuse(Manifest);"
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async (code, { soft = false } = {}) => {
|
||||||
|
return await new Promise(async (resolve, reject) => {
|
||||||
|
try {
|
||||||
|
code = injectUseManifest(code)
|
||||||
|
|
||||||
|
const context = {
|
||||||
|
Vars: Vars,
|
||||||
|
Log: Logger.child({ service: "MANIFEST_VM" }),
|
||||||
|
use: (baseClass) => {
|
||||||
|
return BuildManifest(
|
||||||
|
baseClass,
|
||||||
|
context,
|
||||||
|
{
|
||||||
|
soft: soft,
|
||||||
|
}
|
||||||
|
).then(resolve)
|
||||||
|
},
|
||||||
|
os_string: resolveOs(),
|
||||||
|
arch: os.arch(),
|
||||||
|
}
|
||||||
|
|
||||||
|
vm.createContext(context)
|
||||||
|
|
||||||
|
await vm.runInContext(code, context)
|
||||||
|
} catch (error) {
|
||||||
|
reject(error)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
105
packages/core/src/prerequisites.js
Executable file
105
packages/core/src/prerequisites.js
Executable file
@ -0,0 +1,105 @@
|
|||||||
|
import resolveRemoteBinPath from "./utils/resolveRemoteBinPath"
|
||||||
|
import Vars from "./vars"
|
||||||
|
import path from "node:path"
|
||||||
|
import axios from "axios"
|
||||||
|
|
||||||
|
const baseURL = "https://storage.ragestudio.net/rstudio/binaries"
|
||||||
|
|
||||||
|
export default [
|
||||||
|
{
|
||||||
|
id: "7z-bin",
|
||||||
|
finalBin: Vars.sevenzip_bin,
|
||||||
|
url: resolveRemoteBinPath(`${baseURL}/7z-full`, "7z.zip"),
|
||||||
|
destination: path.resolve(Vars.binaries_path, "7z.zip"),
|
||||||
|
extract: path.resolve(Vars.binaries_path, "7z-bin"),
|
||||||
|
rewriteExecutionPermission: true,
|
||||||
|
deleteBeforeExtract: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "git-bin",
|
||||||
|
finalBin: Vars.git_bin,
|
||||||
|
url: resolveRemoteBinPath(`${baseURL}/git`, "git-bundle-2.4.0.zip"),
|
||||||
|
destination: path.resolve(Vars.binaries_path, "git-bundle.zip"),
|
||||||
|
extract: path.resolve(Vars.binaries_path, "git-bin"),
|
||||||
|
requireOs: ["win32"],
|
||||||
|
rewriteExecutionPermission: true,
|
||||||
|
deleteBeforeExtract: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "aria2",
|
||||||
|
finalBin: Vars.aria2_bin,
|
||||||
|
url: async (os, arch) => {
|
||||||
|
return `https://storage.ragestudio.net/rstudio/binaries/aria2/${os}/${arch}/${os === "win32" ? "aria2c.exe" : "aria2c"}`
|
||||||
|
},
|
||||||
|
destination: Vars.aria2_bin,
|
||||||
|
rewriteExecutionPermission: Vars.aria2_bin,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "java22_jre_bin",
|
||||||
|
finalBin: Vars.java22_jre_bin,
|
||||||
|
url: async (os, arch) => {
|
||||||
|
const { data } = await axios({
|
||||||
|
method: "GET",
|
||||||
|
url: "https://api.azul.com/metadata/v1/zulu/packages",
|
||||||
|
params: {
|
||||||
|
arch: arch,
|
||||||
|
java_version: "22",
|
||||||
|
os: os === "win32" ? "windows" : os,
|
||||||
|
archive_type: "zip",
|
||||||
|
javafx_bundled: "false",
|
||||||
|
java_package_type: "jre",
|
||||||
|
page_size: "1",
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return data[0].download_url
|
||||||
|
},
|
||||||
|
destination: path.resolve(Vars.binaries_path, "java22-jre.zip"),
|
||||||
|
extract: path.resolve(Vars.binaries_path, "java22_jre_bin"),
|
||||||
|
extractTargetFromName: true,
|
||||||
|
moveDirs: [
|
||||||
|
{
|
||||||
|
requireOs: ["macos"],
|
||||||
|
from: path.resolve(Vars.binaries_path, "java22_jre_bin", "zulu-22.jre", "Contents"),
|
||||||
|
to: path.resolve(Vars.binaries_path, "java22_jre_bin", "Contents"),
|
||||||
|
deleteParentBefore: true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
rewriteExecutionPermission: path.resolve(Vars.binaries_path, "java22_jre_bin"),
|
||||||
|
deleteBeforeExtract: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "java17_jre_bin",
|
||||||
|
finalBin: Vars.java17_jre_bin,
|
||||||
|
url: async (os, arch) => {
|
||||||
|
const { data } = await axios({
|
||||||
|
method: "GET",
|
||||||
|
url: "https://api.azul.com/metadata/v1/zulu/packages",
|
||||||
|
params: {
|
||||||
|
arch: arch,
|
||||||
|
java_version: "17",
|
||||||
|
os: os === "win32" ? "windows" : os,
|
||||||
|
archive_type: "zip",
|
||||||
|
javafx_bundled: "false",
|
||||||
|
java_package_type: "jre",
|
||||||
|
page_size: "1",
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return data[0].download_url
|
||||||
|
},
|
||||||
|
destination: path.resolve(Vars.binaries_path, "java17-jre.zip"),
|
||||||
|
extract: path.resolve(Vars.binaries_path, "java17_jre_bin"),
|
||||||
|
extractTargetFromName: true,
|
||||||
|
moveDirs: [
|
||||||
|
{
|
||||||
|
requireOs: ["macos"],
|
||||||
|
from: path.resolve(Vars.binaries_path, "java17_jre_bin", "zulu-17.jre", "Contents"),
|
||||||
|
to: path.resolve(Vars.binaries_path, "java17_jre_bin", "Contents"),
|
||||||
|
deleteParentBefore: true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
rewriteExecutionPermission: path.resolve(Vars.binaries_path, "java17_jre_bin"),
|
||||||
|
deleteBeforeExtract: true,
|
||||||
|
},
|
||||||
|
]
|
16
packages/core/src/utils/chmodRecursive.js
Executable file
16
packages/core/src/utils/chmodRecursive.js
Executable file
@ -0,0 +1,16 @@
|
|||||||
|
import fs from "node:fs"
|
||||||
|
import path from "node:path"
|
||||||
|
|
||||||
|
async function chmodRecursive(target, mode) {
|
||||||
|
if (fs.lstatSync(target).isDirectory()) {
|
||||||
|
const files = await fs.promises.readdir(target, { withFileTypes: true })
|
||||||
|
|
||||||
|
for (const file of files) {
|
||||||
|
await chmodRecursive(path.join(target, file.name), mode)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
await fs.promises.chmod(target, mode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default chmodRecursive
|
10
src/main/utils/extractFile.js → packages/core/src/utils/extractFile.js
Normal file → Executable file
10
src/main/utils/extractFile.js → packages/core/src/utils/extractFile.js
Normal file → Executable file
@ -1,3 +1,5 @@
|
|||||||
|
import Logger from "../logger"
|
||||||
|
|
||||||
import fs from "node:fs"
|
import fs from "node:fs"
|
||||||
import path from "node:path"
|
import path from "node:path"
|
||||||
import { pipeline as streamPipeline } from "node:stream/promises"
|
import { pipeline as streamPipeline } from "node:stream/promises"
|
||||||
@ -7,10 +9,12 @@ import unzipper from "unzipper"
|
|||||||
|
|
||||||
import Vars from "../vars"
|
import Vars from "../vars"
|
||||||
|
|
||||||
|
const Log = Logger.child({ service: "EXTRACTOR" })
|
||||||
|
|
||||||
export async function extractFile(file, dest) {
|
export async function extractFile(file, dest) {
|
||||||
const ext = path.extname(file)
|
const ext = path.extname(file)
|
||||||
|
|
||||||
console.log(`extractFile() | Extracting ${file} to ${dest}`)
|
Log.info(`Extracting ${file} to ${dest}`)
|
||||||
|
|
||||||
switch (ext) {
|
switch (ext) {
|
||||||
case ".zip": {
|
case ".zip": {
|
||||||
@ -24,13 +28,13 @@ export async function extractFile(file, dest) {
|
|||||||
}
|
}
|
||||||
case ".7z": {
|
case ".7z": {
|
||||||
await extractFull(file, dest, {
|
await extractFull(file, dest, {
|
||||||
$bin: Vars.sevenzip_path,
|
$bin: Vars.sevenzip_bin,
|
||||||
})
|
})
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
case ".gz": {
|
case ".gz": {
|
||||||
await extractFull(file, dest, {
|
await extractFull(file, dest, {
|
||||||
$bin: Vars.sevenzip_path
|
$bin: Vars.sevenzip_bin
|
||||||
})
|
})
|
||||||
break
|
break
|
||||||
}
|
}
|
2
src/main/utils/parseStringVars.js → packages/core/src/utils/parseStringVars.js
Normal file → Executable file
2
src/main/utils/parseStringVars.js → packages/core/src/utils/parseStringVars.js
Normal file → Executable file
@ -8,7 +8,7 @@ export default function parseStringVars(str, pkg) {
|
|||||||
name: pkg.name,
|
name: pkg.name,
|
||||||
version: pkg.version,
|
version: pkg.version,
|
||||||
install_path: pkg.install_path,
|
install_path: pkg.install_path,
|
||||||
remote_url: pkg.remote_url,
|
remote: pkg.remote,
|
||||||
}
|
}
|
||||||
|
|
||||||
const regex = /%([^%]+)%/g
|
const regex = /%([^%]+)%/g
|
25
packages/core/src/utils/readDirRecurse.js
Executable file
25
packages/core/src/utils/readDirRecurse.js
Executable file
@ -0,0 +1,25 @@
|
|||||||
|
import fs from "node:fs"
|
||||||
|
import path from "node:path"
|
||||||
|
|
||||||
|
async function readDirRecurse(dir, maxDepth = 3, current = 0) {
|
||||||
|
if (current > maxDepth) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
const files = await fs.promises.readdir(dir)
|
||||||
|
|
||||||
|
const promises = files.map(async (file) => {
|
||||||
|
const filePath = path.join(dir, file)
|
||||||
|
const stat = await fs.promises.stat(filePath)
|
||||||
|
|
||||||
|
if (stat.isDirectory()) {
|
||||||
|
return readDirRecurse(filePath, maxDepth, current + 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return filePath
|
||||||
|
})
|
||||||
|
|
||||||
|
return (await Promise.all(promises)).flat()
|
||||||
|
}
|
||||||
|
|
||||||
|
export default readDirRecurse
|
17
packages/core/src/utils/resolveOs.js
Executable file
17
packages/core/src/utils/resolveOs.js
Executable file
@ -0,0 +1,17 @@
|
|||||||
|
import os from "node:os"
|
||||||
|
|
||||||
|
export default () => {
|
||||||
|
if (os.platform() === "win32") {
|
||||||
|
return "windows"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (os.platform() === "darwin") {
|
||||||
|
return "macos"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (os.platform() === "linux") {
|
||||||
|
return "linux"
|
||||||
|
}
|
||||||
|
|
||||||
|
return os.platform()
|
||||||
|
}
|
15
packages/core/src/utils/resolveRemoteBinPath.js
Executable file
15
packages/core/src/utils/resolveRemoteBinPath.js
Executable file
@ -0,0 +1,15 @@
|
|||||||
|
export default (pre, post) => {
|
||||||
|
let url = null
|
||||||
|
|
||||||
|
if (process.platform === "darwin") {
|
||||||
|
url = `${pre}/darwin/${process.arch}/${post}`
|
||||||
|
}
|
||||||
|
else if (process.platform === "win32") {
|
||||||
|
url = `${pre}/win32/${process.arch}/${post}`
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
url = `${pre}/linux/${process.arch}/${post}`
|
||||||
|
}
|
||||||
|
|
||||||
|
return url
|
||||||
|
}
|
9
packages/core/src/utils/resolveUserDataPath.js
Executable file
9
packages/core/src/utils/resolveUserDataPath.js
Executable file
@ -0,0 +1,9 @@
|
|||||||
|
import path from "node:path"
|
||||||
|
import upath from "upath"
|
||||||
|
|
||||||
|
export default () => {
|
||||||
|
return upath.normalizeSafe(path.resolve(
|
||||||
|
process.env.APPDATA ||
|
||||||
|
(process.platform == "darwin" ? process.env.HOME + "/Library/Preferences" : process.env.HOME + "/.local/share"),
|
||||||
|
))
|
||||||
|
}
|
67
packages/core/src/vars.js
Executable file
67
packages/core/src/vars.js
Executable file
@ -0,0 +1,67 @@
|
|||||||
|
import path from "node:path"
|
||||||
|
import upath from "upath"
|
||||||
|
import resolveUserDataPath from "./utils/resolveUserDataPath"
|
||||||
|
|
||||||
|
const isWin = process.platform.includes("win32")
|
||||||
|
const isMac = process.platform.includes("darwin")
|
||||||
|
|
||||||
|
const runtimeName = "rs-relic"
|
||||||
|
|
||||||
|
const userdata_path = resolveUserDataPath()
|
||||||
|
const runtime_path = upath.normalizeSafe(path.join(userdata_path, runtimeName))
|
||||||
|
const cache_path = upath.normalizeSafe(path.join(runtime_path, "cache"))
|
||||||
|
const packages_path = upath.normalizeSafe(path.join(runtime_path, "packages"))
|
||||||
|
const binaries_path = upath.normalizeSafe(
|
||||||
|
path.resolve(runtime_path, "binaries"),
|
||||||
|
)
|
||||||
|
const db_path = upath.normalizeSafe(path.resolve(runtime_path, "db.json"))
|
||||||
|
|
||||||
|
const binaries = {
|
||||||
|
sevenzip_bin: upath.normalizeSafe(
|
||||||
|
path.resolve(binaries_path, "7z-bin", isWin ? "7za.exe" : "7z"),
|
||||||
|
),
|
||||||
|
git_bin: upath.normalizeSafe(
|
||||||
|
path.resolve(
|
||||||
|
binaries_path,
|
||||||
|
"git-bin",
|
||||||
|
"bin",
|
||||||
|
isWin ? "git.exe" : "git",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
aria2_bin: upath.normalizeSafe(
|
||||||
|
path.resolve(binaries_path, "aria2", isWin ? "aria2c.exe" : "aria2c"),
|
||||||
|
),
|
||||||
|
java22_jre_bin: upath.normalizeSafe(
|
||||||
|
path.resolve(
|
||||||
|
binaries_path,
|
||||||
|
"java22_jre_bin",
|
||||||
|
isMac
|
||||||
|
? "Contents/Home/bin/java"
|
||||||
|
: isWin
|
||||||
|
? "bin/java.exe"
|
||||||
|
: "bin/java",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
java17_jre_bin: upath.normalizeSafe(
|
||||||
|
path.resolve(
|
||||||
|
binaries_path,
|
||||||
|
"java17_jre_bin",
|
||||||
|
isMac
|
||||||
|
? "Contents/Home/bin/java"
|
||||||
|
: isWin
|
||||||
|
? "bin/java.exe"
|
||||||
|
: "bin/java",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
runtimeName,
|
||||||
|
db_path,
|
||||||
|
userdata_path,
|
||||||
|
runtime_path,
|
||||||
|
cache_path,
|
||||||
|
packages_path,
|
||||||
|
binaries_path,
|
||||||
|
...binaries,
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user