diff --git a/README.md b/README.md index d42fb3cec..71250a610 100644 --- a/README.md +++ b/README.md @@ -145,8 +145,8 @@ It looks like this: `releases.js`: ```js -module.exports = function (request) { - return github(request, owner, repo).then(function (all) { +module.exports = function () { + return github(null, owner, repo).then(function (all) { // if you need to do something special, you can do it here // ... return all; diff --git a/_common/brew.js b/_common/brew.js index cbe3ac507..04d7a1431 100644 --- a/_common/brew.js +++ b/_common/brew.js @@ -1,49 +1,56 @@ 'use strict'; +let Fetcher = require('../_common/fetcher.js'); + /** * Gets releases from 'brew'. * * @param {null} _ * @param {string} formula - * @returns {PromiseLike | Promise} + * @returns {Promise} */ -function getDistributables(_, formula) { +async function getDistributables(_, formula) { if (!formula) { return Promise.reject('missing formula for brew'); } - return fetch('https://formulae.brew.sh/api/formula/' + formula + '.json') - .then(function (resp) { - if (!resp.ok) { - throw new Error(`HTTP error! Status: ${resp.status}`); - } - return resp.json(); // Parse JSON response - }) - .then(function (body) { - var ver = body.versions.stable; - var dl = ( - body.bottle.stable.files.high_sierra || - body.bottle.stable.files.catalina - ).url.replace(new RegExp(ver.replace(/\./g, '\\.'), 'g'), '{{ v }}'); - return [ - { - version: ver, - download: dl.replace(/{{ v }}/g, ver), - }, - ].concat( - body.versioned_formulae.map(function (f) { - var ver = f.replace(/.*@/, ''); - return { - version: ver, - download: dl, - }; - }), - ); - }) - .catch(function (err) { - console.error('Error fetching MariaDB versions (brew)'); - console.error(err); - return []; + + let resp; + try { + let url = `https://formulae.brew.sh/api/formula/${formula}.json`; + resp = await Fetcher.fetch(url, { + headers: { Accept: 'application/json' }, }); + } catch (e) { + /** @type {Error & { code: string, response: { status: number, body: string } }} */ //@ts-expect-error + let err = e; + if (err.code === 'E_FETCH_RELEASES') { + err.message = `failed to fetch '${formula}' release data from 'brew': ${err.response.status} ${err.response.body}`; + } + throw e; + } + let body = JSON.parse(resp.body); + + var ver = body.versions.stable; + var dl = ( + body.bottle.stable.files.high_sierra || body.bottle.stable.files.catalina + ).url.replace(new RegExp(ver.replace(/\./g, '\\.'), 'g'), '{{ v }}'); + return [ + { + version: ver, + download: dl.replace(/{{ v }}/g, ver), + }, + ].concat( + body.versioned_formulae.map( + /** @param {String} f */ + function (f) { + var ver = f.replace(/.*@/, ''); + return { + version: ver, + download: dl, + }; + }, + ), + ); } module.exports = getDistributables; diff --git a/_common/fetcher.js b/_common/fetcher.js new file mode 100644 index 000000000..5158c01c6 --- /dev/null +++ b/_common/fetcher.js @@ -0,0 +1,56 @@ +'use strict'; + +let Fetcher = module.exports; + +/** + * @typedef ResponseSummary + * @prop {Boolean} ok + * @prop {Headers} headers + * @prop {Number} status + * @prop {String} body + */ + +/** + * @param {String} url + * @param {RequestInit} opts + * @returns {Promise} + */ +Fetcher.fetch = async function (url, opts) { + let resp = await fetch(url, opts); + let summary = Fetcher.throwIfNotOk(resp); + + return summary; +}; + +/** + * @param {Response} resp + * @returns {Promise} + */ +Fetcher.throwIfNotOk = async function (resp) { + let text = await resp.text(); + + if (!resp.ok) { + let headers = Array.from(resp.headers); + console.error('[Fetcher] error: Response Headers:', headers); + console.error('[Fetcher] error: Response Text:', text); + let err = new Error(`fetch was not ok`); + Object.assign({ + status: 503, + code: 'E_FETCH_RELEASES', + response: { + status: resp.status, + headers: headers, + body: text, + }, + }); + throw err; + } + + let summary = { + ok: resp.ok, + headers: resp.headers, + status: resp.status, + body: text, + }; + return summary; +}; diff --git a/_common/gitea.js b/_common/gitea.js index 51b0e811e..3789d933d 100644 --- a/_common/gitea.js +++ b/_common/gitea.js @@ -41,9 +41,7 @@ if (module === require.main) { 'https://git.rootprojects.org', '', '', - ).then( - function (all) { - console.info(JSON.stringify(all, null, 2)); - }, - ); + ).then(function (all) { + console.info(JSON.stringify(all, null, 2)); + }); } diff --git a/_common/github-source.js b/_common/github-source.js index 42549b161..b6f4e6d98 100644 --- a/_common/github-source.js +++ b/_common/github-source.js @@ -10,7 +10,7 @@ let GitHubishSource = require('./githubish-source.js'); * @param {Object} opts * @param {String} opts.owner * @param {String} opts.repo - * @param {String} opts.baseurl + * @param {String} [opts.baseurl] * @param {String} [opts.username] * @param {String} [opts.token] */ diff --git a/_common/githubish-source.js b/_common/githubish-source.js index 753fa4285..343ad9284 100644 --- a/_common/githubish-source.js +++ b/_common/githubish-source.js @@ -1,5 +1,7 @@ 'use strict'; +let Fetcher = require('../_common/fetcher.js'); + let GitHubishSource = module.exports; /** @@ -44,30 +46,22 @@ GitHubishSource.getDistributables = async function ({ }); } - let resp = await fetch(url, opts); - if (!resp.ok) { - let headers = Array.from(resp.headers); - console.error('Bad Resp Headers:', headers); - let text = await resp.text(); - console.error('Bad Resp Body:', text); - let msg = `failed to fetch releases from '${baseurl}' with user '${username}'`; - throw new Error(msg); - } - - let respText = await resp.text(); - let gHubResp; + let resp; try { - gHubResp = JSON.parse(respText); + resp = await Fetcher.fetch(url, opts); } catch (e) { - console.error('Bad Resp JSON:', respText); - console.error(e.message); - let msg = `failed to parse releases from '${baseurl}' with user '${username}'`; - throw new Error(msg); + /** @type {Error & { code: string, response: { status: number, body: string } }} */ //@ts-expect-error + let err = e; + if (err.code === 'E_FETCH_RELEASES') { + err.message = `failed to fetch '${baseurl}' (githubish-source, user '${username}) release data: ${err.response.status} ${err.response.body}`; + } + throw e; } + let gHubResp = JSON.parse(resp.body); let all = { + /** @type {Array} */ releases: [], - // TODO make this ':baseurl' + ':releasename' download: '', }; @@ -84,6 +78,29 @@ GitHubishSource.getDistributables = async function ({ return all; }; +/** + * @typedef BuildInfo + * @prop {String} [name] - name to use instead of filename for hash urls + * @prop {String} version + * @prop {String} [_version] + * @prop {String} [arch] + * @prop {String} channel + * @prop {String} date + * @prop {String} download + * @prop {String} [ext] + * @prop {String} [_filename] + * @prop {String} [hash] + * @prop {String} [libc] + * @prop {Boolean} [_musl] + * @prop {Boolean} [lts] + * @prop {String} [size] + * @prop {String} os + */ + +/** + * @param {any} ghRelease - TODO + * @returns {Array} + */ GitHubishSource.releaseToDistributables = function (ghRelease) { let ghTag = ghRelease['tag_name']; // TODO tags aren't always semver / sensical let lts = /(\b|_)(lts)(\b|_)/.test(ghRelease['tag_name']); @@ -95,6 +112,7 @@ GitHubishSource.releaseToDistributables = function (ghRelease) { date = date.replace(/T.*/, ''); let urls = [ghRelease.tarball_url, ghRelease.zipball_url]; + /** @type {Array} */ let dists = []; for (let url of urls) { dists.push({ @@ -114,6 +132,9 @@ GitHubishSource.releaseToDistributables = function (ghRelease) { return dists; }; +/** + * @param {BuildInfo} dist + */ GitHubishSource.followDistributableDownloadAttachment = async function (dist) { let abortCtrl = new AbortController(); let resp = await fetch(dist.download, { diff --git a/_common/githubish.js b/_common/githubish.js index ce305a1b3..eedf2deb2 100644 --- a/_common/githubish.js +++ b/_common/githubish.js @@ -1,5 +1,7 @@ 'use strict'; +let Fetcher = require('../_common/fetcher.js'); + /** * @typedef DistributableRaw * @prop {String} name @@ -57,26 +59,18 @@ GitHubish.getDistributables = async function ({ }); } - let resp = await fetch(url, opts); - if (!resp.ok) { - let headers = Array.from(resp.headers); - console.error('Bad Resp Headers:', headers); - let text = await resp.text(); - console.error('Bad Resp Body:', text); - let msg = `failed to fetch releases from '${baseurl}' with user '${username}'`; - throw new Error(msg); - } - - let respText = await resp.text(); - let gHubResp; + let resp; try { - gHubResp = JSON.parse(respText); + resp = await Fetcher.fetch(url, opts); } catch (e) { - console.error('Bad Resp JSON:', respText); - console.error(e.message); - let msg = `failed to parse releases from '${baseurl}' with user '${username}'`; - throw new Error(msg); + /** @type {Error & { code: string, response: { status: number, body: string } }} */ //@ts-expect-error + let err = e; + if (err.code === 'E_FETCH_RELEASES') { + err.message = `failed to fetch '${baseurl}' (githubish, user '${username}) release data: ${err.response.status} ${err.response.body}`; + } + throw e; } + let gHubResp = JSON.parse(resp.body); let all = { /** @type {Array} */ @@ -88,13 +82,18 @@ GitHubish.getDistributables = async function ({ try { gHubResp.forEach(transformReleases); } catch (e) { - console.error(e.message); + /** @type {Error & { code: string, response: { status: number, body: string } }} */ //@ts-expect-error + let err = e; + console.error(err.message); console.error('Error Headers:', resp.headers); console.error('Error Body:', resp.body); let msg = `failed to transform releases from '${baseurl}' with user '${username}'`; throw new Error(msg); } + /** + * @param {any} release - TODO + */ function transformReleases(release) { for (let asset of release['assets']) { let name = asset['name']; diff --git a/_webi/builds-cacher.js b/_webi/builds-cacher.js index a644ef215..b2780e6d5 100644 --- a/_webi/builds-cacher.js +++ b/_webi/builds-cacher.js @@ -9,8 +9,6 @@ let HostTargets = require('./build-classifier/host-targets.js'); let Lexver = require('./build-classifier/lexver.js'); let Triplet = require('./build-classifier/triplet.js'); -let request = require('@root/request'); - var ALIAS_RE = /^alias: (\w+)$/m; var LEGACY_ARCH_MAP = { @@ -153,7 +151,7 @@ async function getLatestBuilds(Releases, installersDir, cacheDir, name, date) { } async function getLatestBuildsInner(Releases, cacheDir, name, date) { - let data = await Releases.latest(request); + let data = await Releases.latest(); if (!date) { date = new Date(); diff --git a/_webi/classify-one.js b/_webi/classify-one.js index 561274aca..7a1d376de 100644 --- a/_webi/classify-one.js +++ b/_webi/classify-one.js @@ -6,8 +6,6 @@ let Path = require('node:path'); let BuildsCacher = require('./builds-cacher.js'); let Triplet = require('./build-classifier/triplet.js'); -let request = require('@root/request'); - async function main() { let projName = process.argv[2]; if (!projName) { @@ -47,7 +45,7 @@ async function main() { Releases.latest = Releases; } - let projInfo = await Releases.latest(request); + let projInfo = await Releases.latest(); // let packages = await Builds.getPackage({ name: projName }); // console.log(packages); diff --git a/_webi/serve-installer.js b/_webi/serve-installer.js index 21939993c..02d1f8aa8 100644 --- a/_webi/serve-installer.js +++ b/_webi/serve-installer.js @@ -70,18 +70,20 @@ InstallerServer.helper = async function ({ console.log(`dbg: Get Project Installer Type for '${projectName}':`); let proj = await Builds.getProjectType(projectName); - console.log(proj); + if (proj.type === 'alias') { + console.log(`dbg: alias`, proj); + projectName = proj.detail; + proj = await Builds.getProjectType(projectName); // an alias should never resolve to an alias + } + console.log(`dbg: proj`, proj); - let validTypes = ['alias', 'selfhosted', 'valid']; + let validTypes = ['selfhosted', 'valid']; if (!validTypes.includes(proj.type)) { let msg = `'${projectName}' doesn't have an installer: '${proj.type}': '${proj.detail}'`; let err = new Error(msg); err.code = 'ENOENT'; throw err; } - if (proj.type === 'alias') { - projectName = proj.detail; - } let tmplParams = { pkg: projectName, diff --git a/_webi/transform-releases.js b/_webi/transform-releases.js index 050c9ebe8..d6e33d447 100644 --- a/_webi/transform-releases.js +++ b/_webi/transform-releases.js @@ -3,7 +3,6 @@ var Releases = module.exports; var path = require('path'); -var request = require('@root/request'); var _normalize = require('./normalize.js'); var cache = {}; @@ -28,7 +27,7 @@ Releases.get = async function (pkgdir) { throw err; } - let all = await get.latest(request); + let all = await get.latest(); return _normalize(all); }; diff --git a/brew/brew-update-service-install b/brew/brew-update-service-install index 5f26baad8..a97b677b0 100644 --- a/brew/brew-update-service-install +++ b/brew/brew-update-service-install @@ -7,13 +7,13 @@ main() { ( chmod a+x ~/.local/bin/brew-update-hourly echo "Checking for serviceman..." + ~/.local/bin/webi serviceman if ! command -v serviceman > /dev/null; then - "$HOME/.local/bin/webi" serviceman export PATH="$HOME/.local/bin:$PATH" - serviceman --version fi + serviceman --version - env PATH="$PATH" serviceman add --user \ + serviceman add --agent \ --workdir ~/.local/opt/brew/ \ --name sh.brew.updater -- \ ~/.local/bin/brew-update-hourly diff --git a/bun/README.md b/bun/README.md index 89f0699e7..e688ebd60 100644 --- a/bun/README.md +++ b/bun/README.md @@ -132,9 +132,7 @@ file) ``` 3. Add your project to the system launcher, running as the current user ```sh - sudo env PATH="$PATH" \ - serviceman add --path="$PATH" --system \ - --username "$(whoami)" --name my-project -- \ + serviceman add --name 'my-project' --daemon -- \ bun run ./my-project.js ``` 4. Restart the logging service @@ -155,6 +153,6 @@ For **macOS**: ``` 3. Add your project to the system launcher, running as the current user ```sh - serviceman add --path="$PATH" --user --name my-project -- \ + serviceman add --agent --name 'my-project' -- \ bun run ./my-project.js ``` diff --git a/bun/releases.js b/bun/releases.js index 1238c15f3..b54035d6f 100644 --- a/bun/releases.js +++ b/bun/releases.js @@ -18,6 +18,14 @@ module.exports = function () { return false; } + let isMusl = r.name.includes('-musl'); + if (isMusl) { + r._musl = true; + r.libc = 'musl'; + } else if (r.os === 'linux') { + r.libc = 'gnu'; + } + return true; }) .map(function (r) { diff --git a/caddy/README.md b/caddy/README.md index 0b28d29a2..a50142234 100644 --- a/caddy/README.md +++ b/caddy/README.md @@ -819,10 +819,10 @@ To avoid the nitty-gritty details of `launchd` plist files, you can use 2. Use Serviceman to create a _launchd_ plist file ```sh - my_username="$( id -u -n )" + my_username="$(id -u -n)" - serviceman add --user --name caddy -- \ - caddy run --config ./Caddyfile --envfile ~/.config/caddy/env + serviceman add --agent --name 'caddy' --workdir ./ -- \ + caddy run --envfile ~/.config/caddy/env --config ./Caddyfile --adapter caddyfile ``` (this will create `~/Library/LaunchAgents/caddy.plist`) @@ -837,8 +837,8 @@ This process creates a _User-Level_ service in `~/Library/LaunchAgents`. To create a _System-Level_ service in `/Library/LaunchDaemons/` instead: ```sh -sudo serviceman add --system --name caddy -- \ - caddy run --config ./Caddyfile --envfile ~/.config/caddy/env +serviceman add --name 'caddy' --workdir ./ --daemon -- \ + caddy run --envfile ~/.config/caddy/env --config ./Caddyfile --adapter caddyfile ``` ### How to run Caddy as a Windows Service @@ -856,7 +856,7 @@ sudo serviceman add --system --name caddy -- \ 3. Create a **Startup Registry Entry** with Serviceman. ```sh serviceman.exe add --name caddy -- \ - caddy run --config ./Caddyfile --envfile ~/.config/caddy/env + caddy run --envfile ~/.config/caddy/env --config ./Caddyfile --adapter caddyfile ``` 4. You can manage the service directly with Serviceman. For example: ```sh @@ -901,10 +901,8 @@ See the notes below to run as a **User Service** or use the JSON Config. ``` 4. Use Serviceman to create a _systemd_ config file. ```sh - my_username="$( id -u -n )" - sudo env PATH="$PATH" \ - serviceman add --system --username "${my_username}" --name caddy -- \ - caddy run --config ./Caddyfile --envfile ~/.config/caddy/env + serviceman add --name 'caddy' --daemon -- \ + caddy run --envfile ~/.config/caddy/env --config ./Caddyfile --adapter caddyfile ``` (this will create `/etc/systemd/system/caddy.service`) 5. Manage the service with `systemctl` and `journalctl`: @@ -915,10 +913,10 @@ See the notes below to run as a **User Service** or use the JSON Config. To create a **User Service** instead: -- don't use `sudo`, but do use `--user` when running `serviceman`: +- use `--agent` when running `serviceman`: ```sh - serviceman add --user --name caddy -- \ - caddy run --config ./Caddyfile --envfile ~/.config/caddy/env + serviceman add --agent --name caddy -- \ + caddy run --envfile ~/.config/caddy/env --config ./Caddyfile --adapter caddyfile ``` (this will create `~/.config/systemd/user/`) - user the `--user` flag to manage services and logs: @@ -1363,19 +1361,13 @@ See also: 2. Generate the `service` file: \ - JSON Config ```sh - my_app_user="$( id -u -n )" - sudo env PATH="${PATH}" \ - serviceman add --system --cap-net-bind \ - --username "${my_app_user}" --name caddy -- \ - caddy run --resume --envfile ./caddy.env + serviceman add --name 'caddy' --daemon -- \ + caddy run --resume --envfile ./caddy.env ``` - Caddyfile ```sh - my_app_user="$( id -u -n )" - sudo env PATH="${PATH}" \ - serviceman add --system --cap-net-bind \ - --username "${my_app_user}" --name caddy -- \ - caddy run --config ./Caddyfile --envfile ./caddy.env + serviceman add --name 'caddy' --daemon -- \ + caddy run --config ./Caddyfile --envfile ./caddy.env ``` 3. Reload `systemd` config files, the logging service (it may not be started on a new VPS), and caddy diff --git a/chromedriver/releases.js b/chromedriver/releases.js index 333129f16..ae6b054a7 100644 --- a/chromedriver/releases.js +++ b/chromedriver/releases.js @@ -1,5 +1,7 @@ 'use strict'; +let Fetcher = require('../_common/fetcher.js'); + // See const releaseApiUrl = 'https://googlechromelabs.github.io/chrome-for-testing/known-good-versions-with-downloads.json'; @@ -41,18 +43,23 @@ const releaseApiUrl = // } module.exports = async function () { - let resp = await fetch(releaseApiUrl); - - if (!resp.ok) { - let text = await resp.text(); - let msg = `failed to fetch releases from '${releaseApiUrl}': ${resp.status} ${text}`; - throw new Error(msg); + let resp; + try { + resp = await Fetcher.fetch(releaseApiUrl, { + headers: { Accept: 'application/json' }, + }); + } catch (e) { + /** @type {Error & { code: string, response: { status: number, body: string } }} */ //@ts-expect-error + let err = e; + if (err.code === 'E_FETCH_RELEASES') { + err.message = `failed to fetch 'chromedriver' release data: ${err.response.status} ${err.response.body}`; + } + throw e; } - - let body = await resp.json(); + let data = JSON.parse(resp.body); let builds = []; - for (let release of body.versions) { + for (let release of data.versions) { if (!release.downloads.chromedriver) { continue; } diff --git a/dashcore-utils/README.md b/dashcore-utils/README.md index 41c54edcc..f2e3d4d08 100644 --- a/dashcore-utils/README.md +++ b/dashcore-utils/README.md @@ -100,8 +100,7 @@ mkdir -p ~/.dashcore/wallets/ mkdir -p /mnt/slc1_vol_100g/dashcore/_data mkdir -p /mnt/slc1_vol_100g/dashcore/_caches -sudo env PATH="$PATH" serviceman add \ - --system --user "$my_user" --path "$PATH" --name dashd --force -- \ +serviceman add --name 'dashd' --daemon -- \ dashd \ -usehd \ -conf="$HOME/.dashcore/dash.conf" \ diff --git a/dashcore-utils/dashd-hd-service-install b/dashcore-utils/dashd-hd-service-install index 13aa9b299..a25a7f854 100644 --- a/dashcore-utils/dashd-hd-service-install +++ b/dashcore-utils/dashd-hd-service-install @@ -84,20 +84,8 @@ fn_srv_install() { ( my_name="dashd-${my_netname}" fi - my_system_args="" - my_kernel="$( - uname -s - )" - if test "Darwin" != "${my_kernel}"; then - my_user="$( - id -u -n - )" - my_system_args="--system --username ${my_user}" - fi - # shellcheck disable=SC2016,SC1090 - echo 'sudo env PATH="$PATH"' \ - "serviceman add ${my_system_args} --path \"\$PATH\" --name \"${my_name}\" --force --" \ + echo "serviceman add --name \"${my_name}\" --" \ "dashd " \ "${my_net_flag}" \ -usehd \ @@ -107,16 +95,16 @@ fn_srv_install() { ( "-datadir=\"${my_datadir}\"" \ "-blocksdir=\"${my_blocksdir}\"" + echo "" + echo "Installing latest 'serviceman'..." + echo "" + "$HOME/.local/bin/webi" serviceman > /dev/null if ! command -v serviceman > /dev/null; then - echo "" - echo "Installing 'serviceman'..." - echo "" - { - "$HOME/.local/bin/webi" serviceman - } > /dev/null - - # shellcheck disable=SC1090 - . ~/.config/envman/PATH.env || true + export PATH="$HOME/.local/bin:$PATH" + fi + serviceman --version + if ! command -v dashd > /dev/null; then + export PATH="$HOME/.local/opt/dashcore/bin:$PATH" fi mkdir -p "$HOME/.dashcore/wallets/" @@ -131,8 +119,7 @@ fn_srv_install() { ( cd "${my_vol}" || return 1 # leave options unquoted so they're interpreted separately # shellcheck disable=SC2086 - sudo env PATH="${PATH}" \ - serviceman add ${my_system_args} --path "${PATH}" --name "${my_name}" --force -- \ + serviceman add --name "${my_name}" -- \ dashd \ ${my_net_flag} \ -usehd \ diff --git a/dashd/README.md b/dashd/README.md index 2dd32d413..a89ce5675 100644 --- a/dashd/README.md +++ b/dashd/README.md @@ -219,14 +219,7 @@ You can use [`serviceman`](../serviceman/): **Linux** ```sh -sudo env PATH="$PATH" \ - serviceman add \ - --system \ - --username "$(id -n -u)" \ - --path "$PATH" \ - --name dashd \ - --force \ - -- \ +serviceman add --name 'dashd' -- \ dashd \ -usehd \ -conf="$HOME/.dashcore/dash.conf" \ @@ -239,11 +232,7 @@ sudo env PATH="$PATH" \ **Mac** ```sh -serviceman add \ - --path "$PATH" \ - --name dashd \ - --force \ - -- \ +serviceman add --name 'dashd' -- \ dashd \ -usehd \ -conf="$HOME/.dashcore/dash.conf" \ diff --git a/fish/README.md b/fish/README.md index 2e27699ef..7e672da94 100644 --- a/fish/README.md +++ b/fish/README.md @@ -43,7 +43,7 @@ the file: ```sh #!/bin/bash -echo "Who am I? I'm $(whoami)." +echo "Who am I? I'm $(id -u -n)." ``` You can also run bash explicitly: @@ -99,7 +99,7 @@ You should use `chsh` to change your shell: ```sh #!/bin/sh -sudo chsh -s "$(command -v fish)" "$(whoami)" +sudo chsh -s "$(command -v fish)" "$(id -u -n)" ``` If vim uses `fish` instead of `bash`, annoying errors will happen. diff --git a/flutter/releases.js b/flutter/releases.js index f2e097b19..49bcb0a23 100644 --- a/flutter/releases.js +++ b/flutter/releases.js @@ -1,9 +1,14 @@ 'use strict'; -var FLUTTER_OSES = ['macos', 'linux', 'windows']; +let Fetcher = require('../_common/fetcher.js'); -// stable, beta, dev -var channelMap = {}; +let FLUTTER_OSES = ['macos', 'linux', 'windows']; + +/** + * stable, beta, dev + * @type {Object.} + */ +let channelMap = {}; // This can be spot-checked against // https://docs.flutter.dev/release/archive?tab=windows @@ -53,21 +58,45 @@ var channelMap = {}; // ] // } -module.exports = async function (request) { +/** + * @typedef BuildInfo + * @prop {String} version + * @prop {String} [_version] + * @prop {Boolean} lts + * @prop {String} channel + * @prop {String} date + * @prop {String} download + * @prop {String} [_filename] + */ + +module.exports = async function () { let all = { download: '', + /** @type {Array} */ releases: [], + /** @type {Array} */ channels: [], }; for (let osname of FLUTTER_OSES) { - let resp = await request({ - url: `https://storage.googleapis.com/flutter_infra_release/releases/releases_${osname}.json`, - json: true, - }); + let resp; + try { + let url = `https://storage.googleapis.com/flutter_infra_release/releases/releases_${osname}.json`; + resp = await Fetcher.fetch(url, { + headers: { Accept: 'application/json' }, + }); + } catch (e) { + /** @type {Error & { code: string, response: { status: number, body: string } }} */ //@ts-expect-error + let err = e; + if (err.code === 'E_FETCH_RELEASES') { + err.message = `failed to fetch 'flutter' release data for ${osname}: ${err.response.status} ${err.response.body}`; + } + throw e; + } + let data = JSON.parse(resp.body); - let osBaseUrl = resp.body.base_url; - let osReleases = resp.body.releases; + let osBaseUrl = data.base_url; + let osReleases = data.releases; for (let asset of osReleases) { if (!channelMap[asset.channel]) { @@ -80,7 +109,6 @@ module.exports = async function (request) { lts: false, channel: asset.channel, date: asset.release_date.replace(/T.*/, ''), - //sha256: asset.sha256, download: `${osBaseUrl}/${asset.archive}`, _filename: asset.archive, }); @@ -97,7 +125,7 @@ module.exports = async function (request) { }; if (module === require.main) { - module.exports(require('@root/request')).then(function (all) { + module.exports().then(function (all) { all.releases = all.releases.slice(25); console.info(JSON.stringify(all, null, 2)); }); diff --git a/go/README.md b/go/README.md index f3a0a5168..f1b074a67 100644 --- a/go/README.md +++ b/go/README.md @@ -80,8 +80,7 @@ webi serviceman pushd ./hello/ # swap 'hello' and './hello' for the name of your project and binary -sudo env PATH="$PATH" \ - serviceman add --system --username "$(whoami)" --name hello -- \ +serviceman add --name 'hello' -- \ ./hello # Restart the logging service diff --git a/go/releases.js b/go/releases.js index f712aebbf..6e1f3a60e 100644 --- a/go/releases.js +++ b/go/releases.js @@ -1,14 +1,21 @@ 'use strict'; -var osMap = { +let Fetcher = require('../_common/fetcher.js'); + +/** @type {Object.} */ +let osMap = { darwin: 'macos', }; -var archMap = { +/** @type {Object.} */ +let archMap = { 386: 'x86', }; let ODDITIES = ['bootstrap', '-arm6.']; +/** + * @param {String} filename + */ function isOdd(filename) { for (let oddity of ODDITIES) { let isOddity = filename.includes(oddity); @@ -18,7 +25,22 @@ function isOdd(filename) { } } -function getDistributables(request) { +/** + * @typedef BuildInfo + * @prop {String} version + * @prop {String} [_version] + * @prop {String} arch + * @prop {String} channel + * @prop {String} date + * @prop {String} download + * @prop {String} ext + * @prop {String} [_filename] + * @prop {String} hash + * @prop {Boolean} lts + * @prop {String} os + */ + +async function getDistributables() { /* { version: 'go1.13.8', @@ -37,60 +59,71 @@ function getDistributables(request) { ] }; */ - return request({ - url: 'https://golang.org/dl/?mode=json&include=all', - json: true, - }).then((resp) => { - var goReleases = resp.body; - var all = { - releases: [], - download: '', - }; - goReleases.forEach((release) => { - // strip 'go' prefix, standardize version - var parts = release.version.slice(2).split('.'); - while (parts.length < 3) { - parts.push('0'); - } - var version = parts.join('.'); - // nix 'go' prefix - var fileversion = release.version.slice(2); + let resp; + try { + let url = 'https://golang.org/dl/?mode=json&include=all'; + resp = await Fetcher.fetch(url, { + headers: { Accept: 'application/json' }, + }); + } catch (e) { + /** @type {Error & { code: string, response: { status: number, body: string } }} */ //@ts-expect-error + let err = e; + if (err.code === 'E_FETCH_RELEASES') { + err.message = `failed to fetch 'Go' release data: ${err.response.status} ${err.response.body}`; + } + throw e; + } + let goReleases = JSON.parse(resp.body); - release.files.forEach((asset) => { - let odd = isOdd(asset.filename); - if (odd) { - return; - } + let all = { + /** @type {Array} */ + releases: [], + download: '', + }; - var filename = asset.filename; - var os = osMap[asset.os] || asset.os || '-'; - var arch = archMap[asset.arch] || asset.arch || '-'; - all.releases.push({ - version: version, - _version: fileversion, - // all go versions >= 1.0.0 are effectively LTS - lts: (parts[0] > 0 && release.stable) || false, - channel: (release.stable && 'stable') || 'beta', - date: '1970-01-01', // the world may never know - os: os, - arch: arch, - ext: '', // let normalize run the split/test/join - hash: '-', // not ready to standardize this yet - download: `https://dl.google.com/go/${filename}`, - }); - }); - }); + for (let release of goReleases) { + // Strip 'go' prefix, standardize version + let parts = release.version.slice(2).split('.'); + while (parts.length < 3) { + parts.push('0'); + } + let version = parts.join('.'); + let fileversion = release.version.slice(2); - return all; - }); + for (let asset of release.files) { + if (isOdd(asset.filename)) { + continue; + } + + let filename = asset.filename; + let os = osMap[asset.os] || asset.os || '-'; + let arch = archMap[asset.arch] || asset.arch || '-'; + let build = { + version: version, + _version: fileversion, + lts: (parts[0] > 0 && release.stable) || false, + channel: (release.stable && 'stable') || 'beta', + date: '1970-01-01', // the world may never know + os: os, + arch: arch, + ext: '', // let normalize run the split/test/join + hash: '-', // not ready to standardize this yet + download: `https://dl.google.com/go/${filename}`, + }; + all.releases.push(build); + } + } + + return all; } module.exports = getDistributables; if (module === require.main) { - getDistributables(require('@root/request')).then(function (all) { + getDistributables().then(function (all) { all = require('../_webi/normalize.js')(all); + //@ts-expect-error all.releases = all.releases.slice(0, 10); console.info(JSON.stringify(all, null, 2)); }); diff --git a/golang/README.md b/golang/README.md index 1bc0e0939..934d75c72 100644 --- a/golang/README.md +++ b/golang/README.md @@ -85,8 +85,7 @@ webi serviceman pushd ./hello/ # swap 'hello' and './hello' for the name of your project and binary -sudo env PATH="$PATH" \ - serviceman add --system --username "$(whoami)" --name hello -- \ +serviceman add --name 'hello' -- \ ./hello # Restart the logging service diff --git a/gpg/releases.js b/gpg/releases.js index bea218a8b..8d145839e 100644 --- a/gpg/releases.js +++ b/gpg/releases.js @@ -1,5 +1,7 @@ 'use strict'; +let Fetcher = require('../_common/fetcher.js'); + let ltsRe = /GnuPG-(2\.2\.[\d\.]+)/; function createRssMatcher() { @@ -16,12 +18,42 @@ function createUrlMatcher() { ); } -async function getRawReleases(request) { - let matcher = createRssMatcher(); +/** + * @typedef BuildInfo + * @prop {String} version + * @prop {String} [_version] + * @prop {String} arch + * @prop {String} channel + * @prop {String} date + * @prop {String} download + * @prop {String} ext + * @prop {String} [_filename] + * @prop {String} hash + * @prop {Boolean} lts + * @prop {String} os + */ - let resp = await request({ - url: 'https://sourceforge.net/projects/gpgosx/rss?path=/', - }); +async function getRawReleases() { + let resp; + try { + let url = 'https://sourceforge.net/projects/gpgosx/rss?path=/'; + resp = await Fetcher.fetch(url, { + headers: { Accept: 'application/rss+xml' }, + }); + } catch (e) { + /** @type {Error & { code: string, response: { status: number, body: string } }} */ //@ts-expect-error + let err = e; + if (err.code === 'E_FETCH_RELEASES') { + err.message = `failed to fetch 'gpg' release data: ${err.response.status} ${err.response.body}`; + } + throw e; + } + let contentType = resp.headers.get('Content-Type'); + if (!contentType?.includes('xml')) { + throw new Error(`Unexpected content type: ${contentType}`); + } + + let matcher = createRssMatcher(); let links = []; for (;;) { let m = matcher.exec(resp.body); @@ -30,54 +62,58 @@ async function getRawReleases(request) { } links.push(m[1]); } + return links; } +/** + * @param {Array} links + */ function transformReleases(links) { //console.log(JSON.stringify(links, null, 2)); //console.log(links.length); let matcher = createUrlMatcher(); - let releases = links - .map(function (link) { - let isLts = ltsRe.test(link); - let parts = link.match(matcher); - if (!parts || !parts[2]) { - return null; - } - let segs = parts[2].split('.'); - let version = segs.slice(0, 3).join('.'); - if (segs.length > 3) { - version += '+' + segs.slice(3); - } - let fileversion = segs.join('.'); - - return { - name: parts[1], - version: version, - _version: fileversion, - // all go versions >= 1.0.0 are effectively LTS - lts: isLts, - channel: 'stable', - // TODO Sat, 19 Nov 2016 16:17:33 UT - date: '1970-01-01', // the world may never know - os: 'macos', - arch: 'amd64', - ext: 'dmg', - download: link, - }; - }) - .filter(Boolean); + let builds = []; + for (let link of links) { + let isLts = ltsRe.test(link); + let parts = link.match(matcher); + if (!parts || !parts[2]) { + continue; + } + + let segs = parts[2].split('.'); + let version = segs.slice(0, 3).join('.'); + if (segs.length > 3) { + version += '+' + segs.slice(3); + } + let fileversion = segs.join('.'); + + let build = { + name: parts[1], + version: version, + _version: fileversion, + lts: isLts, + channel: 'stable', + // TODO Sat, 19 Nov 2016 16:17:33 UT + date: '1970-01-01', // the world may never know + os: 'macos', + arch: 'amd64', + ext: 'dmg', + download: link, + }; + builds.push(build); + } return { _names: ['GnuPG', 'gpgosx'], - releases: releases, + releases: builds, }; } -async function getDistributables(request) { - let releases = await getRawReleases(request); +async function getDistributables() { + let releases = await getRawReleases(); let all = transformReleases(releases); return all; } @@ -85,7 +121,7 @@ async function getDistributables(request) { module.exports = getDistributables; if (module === require.main) { - getDistributables(require('@root/request')).then(function (all) { + getDistributables().then(function (all) { all = require('../_webi/normalize.js')(all); all.releases = all.releases.slice(0, 10000); console.info(JSON.stringify(all, null, 2)); diff --git a/iterm2/releases.js b/iterm2/releases.js index 26e3167c8..f3d61c3e2 100644 --- a/iterm2/releases.js +++ b/iterm2/releases.js @@ -1,73 +1,91 @@ 'use strict'; -function getRawReleases(request) { - return request({ url: 'https://iterm2.com/downloads.html' }).then( - function (resp) { - var links = resp.body - .split(/[<>]+/g) - .map(function (str) { - var m = str.match( - /href="(https:\/\/iterm2\.com\/downloads\/.*\.zip)"/, - ); - if (m && /iTerm2-[34]/.test(m[1])) { - return m[1]; - } - }) - .filter(Boolean); - return links; - }, - ); +let Fetcher = require('../_common/fetcher.js'); + +async function getRawReleases() { + let resp; + try { + let url = 'https://iterm2.com/downloads.html'; + resp = await Fetcher.fetch(url, { + headers: { Accept: 'text/html' }, + }); + } catch (e) { + /** @type {Error & { code: string, response: { status: number, body: string } }} */ //@ts-expect-error + let err = e; + if (err.code === 'E_FETCH_RELEASES') { + err.message = `failed to fetch 'iterm2' release data: ${err.response.status} ${err.response.body}`; + } + throw e; + } + + let contentType = resp.headers.get('Content-Type'); + if (!contentType || !contentType.includes('text/html')) { + throw new Error(`Unexpected Content-Type: ${contentType}`); + } + + let lines = resp.body.split(/[<>]+/g); + + /** @type {Array} */ + let links = []; + for (let str of lines) { + let m = str.match(/href="(https:\/\/iterm2\.com\/downloads\/.*\.zip)"/); + if (m && /iTerm2-[34]/.test(m[1])) { + if (m[1]) { + links.push(m[1]); + } + } + } + + return links; } +/** + * @param {Array} links + */ function transformReleases(links) { - //console.log(JSON.stringify(links, null, 2)); - //console.log(links.length); + let builds = []; + for (let link of links) { + let channel = /\/stable\//.test(link) ? 'stable' : 'beta'; - return { - _names: ['iTerm2', 'iterm2'], - releases: links - .map(function (link) { - var channel = /\/stable\//.test(link) ? 'stable' : 'beta'; + let parts = link.replace(/.*\/iTerm2[-_]v?(\d_.*)\.zip/, '$1').split('_'); + let version = parts.join('.').replace(/([_-])?beta/, '-beta'); - var parts = link - .replace(/.*\/iTerm2[-_]v?(\d_.*)\.zip/, '$1') - .split('_'); - var version = parts.join('.').replace(/([_-])?beta/, '-beta'); + // ex: 3.5.0-beta17 => 3_5_0beta17 + // ex: 3.0.2-preview => 3_0_2-preview + let fileversion = version.replace(/\./g, '_'); + fileversion = fileversion.replace(/-beta/g, 'beta'); - // ex: 3.5.0-beta17 => 3_5_0beta17 - // ex: 3.0.2-preview => 3_0_2-preview - let fileversion = version.replace(/\./g, '_'); - fileversion = fileversion.replace(/-beta/g, 'beta'); + let build = { + version: version, + _version: fileversion, + lts: 'stable' === channel, + channel: channel, + date: '1970-01-01', // the world may never know + os: 'macos', + arch: 'amd64', + ext: '', // let normalize run the split/test/join + download: link, + }; + builds.push(build); + } - return { - version: version, - _version: fileversion, - // all go versions >= 1.0.0 are effectively LTS - lts: 'stable' === channel, - channel: channel, - date: '1970-01-01', // the world may never know - os: 'macos', - arch: 'amd64', - ext: '', // let normalize run the split/test/join - download: link, - }; - }) - .filter(Boolean), + return { + _names: ['iTerm2', 'iterm2'], + releases: builds, }; } -function getDistributables(request) { - return getRawReleases(request) - .then(transformReleases) - .then(function (all) { - return all; - }); +async function getDistributables() { + let rawReleases = await getRawReleases(); + let all = transformReleases(rawReleases); + + return all; } module.exports = getDistributables; if (module === require.main) { - getDistributables(require('@root/request')).then(function (all) { + getDistributables().then(function (all) { all = require('../_webi/normalize.js')(all); all.releases = all.releases.slice(0, 10000); console.info(JSON.stringify(all, null, 2)); diff --git a/julia/releases.js b/julia/releases.js index ec0f7f83a..2c00624cf 100644 --- a/julia/releases.js +++ b/julia/releases.js @@ -1,31 +1,61 @@ 'use strict'; -var osMap = { +let Fetcher = require('../_common/fetcher.js'); + +/** @type {Object.} */ +let osMap = { winnt: 'windows', mac: 'darwin', }; -var archMap = { + +/** @type {Object.} */ +let archMap = { armv7l: 'armv7', i686: 'x86', powerpc64le: 'ppc64le', }; +/** + * @typedef BuildInfo + * @prop {String} version + * @prop {String} [_version] + * @prop {String} [arch] + * @prop {String} channel + * @prop {String} date + * @prop {String} download + * @prop {String} [ext] + * @prop {String} [_filename] + * @prop {String} [hash] + * @prop {String} [libc] + * @prop {Boolean} [_musl] + * @prop {Boolean} [lts] + * @prop {String} [size] + * @prop {String} os + */ + async function getDistributables() { let all = { + /** @type {Array} */ releases: [], download: '', _names: ['julia', 'macaarch64'], }; - let resp = await fetch( - 'https://julialang-s3.julialang.org/bin/versions.json', - { - headers: { - Accept: 'application/json', - }, - }, - ); - let buildsByVersion = await resp.json(); + let resp; + try { + let url = 'https://julialang-s3.julialang.org/bin/versions.json'; + resp = await Fetcher.fetch(url, { + headers: { Accept: 'application/json' }, + }); + } catch (e) { + /** @type {Error & { code: string, response: { status: number, body: string } }} */ //@ts-expect-error + let err = e; + if (err.code === 'E_FETCH_RELEASES') { + err.message = `failed to fetch 'julia' release data: ${err.response.status} ${err.response.body}`; + } + throw e; + } + let buildsByVersion = JSON.parse(resp.body); /* { @@ -105,6 +135,12 @@ async function getDistributables() { return all; } +/** + * @param {Object} a + * @param {String} a.version + * @param {Object} b + * @param {String} b.version + */ function sortByVersion(a, b) { let [aVer, aPre] = a.version.split('-'); let [bVer, bPre] = b.version.split('-'); diff --git a/macos/releases.js b/macos/releases.js index ae443b033..703f6f066 100644 --- a/macos/releases.js +++ b/macos/releases.js @@ -1,6 +1,8 @@ 'use strict'; -var oses = [ +let Fetcher = require('../_common/fetcher.js'); + +let oses = [ { name: 'macOS Sierra', version: '10.12.6', @@ -25,7 +27,7 @@ var oses = [ }, ]; -var headers = { +let headers = { Connection: 'keep-alive', 'Cache-Control': 'max-age=0', 'Upgrade-Insecure-Requests': '1', @@ -40,55 +42,103 @@ var headers = { 'Accept-Language': 'en-US,en;q=0.9,sq;q=0.8', }; -module.exports = function (request) { - var all = { +/** + * @param {typeof oses[0]} os + */ +async function fetchReleasesForOS(os) { + let resp; + try { + resp = await Fetcher.fetch(os.url, { + headers: headers, + }); + } catch (e) { + /** @type {Error & { code: string, response: { status: number, body: string } }} */ //@ts-expect-error + let err = e; + if (err.code === 'E_FETCH_RELEASES') { + err.message = `failed to fetch 'macos' release data: ${err.response.status} ${err.response.body}`; + } + throw e; + } + + // Extract the download link + let match = resp.body.match(/(http[^>]+Install[^>]+\.dmg)/); + if (match) { + return match[1]; + } +} + +/** + * @typedef BuildInfo + * @prop {String} version + * @prop {String} [_version] + * @prop {String} arch + * @prop {String} channel + * @prop {String} date + * @prop {String} download + * @prop {String} ext + * @prop {String} [_filename] + * @prop {String} hash + * @prop {Boolean} lts + * @prop {String} os + */ + +let osnames = ['macos', 'linux']; + +async function getDistributables() { + let all = { _names: ['InstallOS'], download: '', + /** @type {Array} */ releases: [], }; - return Promise.all( - oses.map(function (os) { - return request({ - method: 'GET', - url: os.url, - headers: headers, - }).then(function (resp) { - var m = resp.body.match(/(http[^>]+Install[^>]+.dmg)/); - var download = m && m[1]; - ['macos', 'linux'].forEach(function (osname) { - all.releases.push({ - version: os.version, - lts: os.lts || false, - channel: os.channel || 'beta', - date: os.date, - os: osname, - arch: 'amd64', - ext: 'dmg', - hash: '-', - download: download, - }); - }); - }); - }), - ).then(function () { - all.releases.sort(function (a, b) { - if ('10.11.6' === a.version) { - return -1; - } - if (a.date > b.date) { - return 1; - } - if (a.date < b.date) { - return -1; - } - }); - return all; + // Fetch data for each OS and populate the releases array + for (let os of oses) { + let download = await fetchReleasesForOS(os); + if (!download) { + continue; + } + + // Add releases for macOS and Linux + for (let osname of osnames) { + let build = { + version: os.version, + lts: os.lts || false, + channel: os.channel || 'beta', + date: os.date, + os: osname, + arch: 'amd64', + ext: 'dmg', + hash: '-', + download: download, + }; + + all.releases.push(build); + } + } + + // Sort releases + all.releases.sort(function (a, b) { + if (a.version === '10.11.6') { + return -1; + } + + if (a.date > b.date) { + return 1; + } else if (a.date < b.date) { + return -1; + } + + return 0; }); -}; + + return all; +} + +module.exports = getDistributables; if (module === require.main) { - module.exports(require('@root/request')).then(function (all) { + module.exports().then(function (all) { console.info(JSON.stringify(all, null, 2)); }); } diff --git a/mariadb/releases.js b/mariadb/releases.js deleted file mode 100644 index 32d3d0675..000000000 --- a/mariadb/releases.js +++ /dev/null @@ -1,210 +0,0 @@ -'use strict'; - -var brewReleases = require('../_common/brew.js'); - -module.exports = function (request) { - // So many places to get (incomplete) release info... - // - // MariaDB official - // - https://downloads.mariadb.org/mariadb/+releases/ - // - http://archive.mariadb.org/ - // Brew - // - https://formulae.brew.sh/api/formula/mariadb@10.3.json - // - https://formulae.brew.sh/docs/api/ - // - https://formulae.brew.sh/formula/mariadb@10.2#default - // - // Note: This could be very fragile due to using the html - // as an API. It's pretty rather than minified, but that - // doesn't guarantee that it's meant as a consumable API. - // - - var promises = [mariaReleases(), brewReleases(request, 'mariadb')]; - return Promise.all(promises).then(function (many) { - var versions = many[0]; - var brews = many[1]; - - var all = { download: '', releases: [] }; - - // linux x86 - // linux x64 - // windows x86 - // windows x64 - // (and mac, wedged-in from Homebrew) - versions.forEach(function (ver) { - all.releases.push({ - version: ver.version, - lts: false, - channel: ver.channel, - date: ver.date, - os: 'linux', - arch: 'amd64', - download: - 'http://archive.mariadb.org/mariadb-{{ v }}/bintar-linux-x86_64/mariadb-{{ v }}-linux-x86_64.tar.gz'.replace( - /{{ v }}/g, - ver.version, - ), - }); - all.releases.push({ - version: ver.version, - lts: false, - channel: ver.channel, - date: ver.date, - os: 'linux', - arch: 'amd64', - download: - 'http://archive.mariadb.org/mariadb-{{ v }}/bintar-linux-x86/mariadb-{{ v }}-linux-x86.tar.gz'.replace( - /{{ v }}/g, - ver.version, - ), - }); - - // windows - all.releases.push({ - version: ver.version, - lts: false, - channel: ver.channel, - date: ver.date, - os: 'windows', - arch: 'amd64', - download: - 'http://archive.mariadb.org/mariadb-{{ v }}/winx64-packages/mariadb-{{ v }}-winx64.zip'.replace( - /{{ v }}/g, - ver.version, - ), - }); - all.releases.push({ - version: ver.version, - lts: false, - channel: ver.channel, - date: ver.date, - os: 'windows', - arch: 'x86', - download: - 'http://archive.mariadb.org/mariadb-{{ v }}/win32-packages/mariadb-{{ v }}-win32.zip'.replace( - /{{ v }}/g, - ver.version, - ), - }); - - // Note: versions are sorted most-recent first. - // We just assume that the brew version is most recent stable - // ... but we can't really know for sure - - // TODO - brews.some(function (brew, i) { - // 10.3 => ^10.2(\b|\.) - var reBrewVer = new RegExp( - '^' + brew.version.replace(/\./, '\\.') + '(\\b|\\.)', - 'g', - ); - if (!ver.version.match(reBrewVer)) { - return; - } - all.releases.push({ - version: ver.version, - lts: false, - channel: ver.channel, - date: ver.date, - os: 'macos', - arch: 'amd64', - download: brew.download.replace(/{{ v }}/g, ver.version), - }); - brews.splice(i, 1); // remove - return true; - }); - }); - - return all; - }); - - function mariaReleases() { - return request({ - url: 'https://downloads.mariadb.org/mariadb/+releases/', - fail: true, // https://git.coolaj86.com/coolaj86/request.js/issues/2 - }) - .then(failOnBadStatus) - .then(function (resp) { - // fragile, but simple - - // Make release info go from this: - var html = resp.body; - // - // - // 10.0.38 - // 2019-01-31 - // Stable - // - - // To this: - var reLine = /\s*(<(tr|td)[^>]*>)\s*/g; - // - // 10.0.382019-01-31Stable - // 10.0.372018-11-01Stable - // 10.0.362018-08-01Stable - // - // To this: - var reVer = - /.*mariadb\/(10[^\/]+)\/">.*(20\d\d-\d\d-\d\d)<\/td>(\w+)<\/td>/; - // - // { "version": "10.0.36", "date": "2018-08-01", "channel": "stable" } - - return html - .replace(reLine, '$1') - .split(/\n/) - .map(function (line) { - var m = line.match(reVer); - if (!m) { - return; - } - return { - version: m[1], - channel: mapChannel(m[3].toLowerCase()), - date: m[2], - }; - }) - .filter(Boolean); - }) - .catch(function (err) { - console.error('Error fetching (official) MariaDB versions'); - console.error(err); - return []; - }); - } -}; - -function mapChannel(ch) { - if ('alpha' === ch) { - return 'dev'; - } - // stable,rc,beta - return ch; -} - -function failOnBadStatus(resp) { - if (resp.statusCode >= 400) { - var err = new Error('Non-successful status code: ' + resp.statusCode); - err.code = 'ESTATUS'; - err.response = resp; - throw err; - } - return resp; -} - -if (module === require.main) { - module.exports(require('@root/request')).then(function (all) { - console.info('official releases look like:'); - console.info(JSON.stringify(all.releases.slice(0, 2), null, 2)); - console.info('Homebrew releases look like:'); - console.info( - JSON.stringify( - all.releases - .filter(function (rel) { - return 'macos' === rel.os; - }) - .slice(0, 2), - null, - 2, - ), - ); - }); -} diff --git a/node/README.md b/node/README.md index 27a4436ec..7851c963f 100644 --- a/node/README.md +++ b/node/README.md @@ -227,9 +227,9 @@ Node app as a Non-System (Unprivileged) Service on Mac, Windows, and Linux: or _User Unit_ (Linux): ```sh - my_username="$( id -u -n )" + my_username="$(id -u -n)" - serviceman add --user --name my-node-project -- \ + serviceman add --agent --name my-node-project -- \ caddy run --config ./Caddyfile --envfile ~/.config/caddy/env ``` @@ -275,11 +275,8 @@ Node app as a Non-System (Unprivileged) Service on Mac, Windows, and Linux: ```sh pushd ./my-node-project/ -my_username="$( id -u -n )" -sudo env PATH="$PATH" \ - serviceman add --system --path "$PATH" --cap-net-bind \ - --name my-node-project --username "${my_username}" -- \ - npm run start +serviceman add --name 'my-node-project' -- \ + npm run start ``` #### ... with auto-reload in Dev @@ -287,10 +284,8 @@ sudo env PATH="$PATH" \ ```sh pushd ./my-node-project/ -sudo env PATH="$PATH" \ - serviceman add --system --path "$PATH" --cap-net-bind \ - --name my-node-project --username "$(id -u -n)" -- \ - npx -p nodemon@3 -- nodemon ./server.js +serviceman add --name 'my-node-project' -- \ + npx -p nodemon@3 -- nodemon ./server.js ``` #### View Logs & Restart @@ -367,3 +362,17 @@ jobs: - run: npm run lint - run: npm run test ``` + +### How to Install Node's Linux Dependencies + +Typically Node just needs `openssl` and `libstdc++`. + +```sh +# Apline +sudo apk add --no-cache libstdc++ libssl3 +``` + +```sh +# Debian / Ubuntu +sudo apt-get install -y libstdc++6 libssl3 +``` diff --git a/node/install.sh b/node/install.sh index 52573679a..117633b04 100644 --- a/node/install.sh +++ b/node/install.sh @@ -43,4 +43,58 @@ pkg_done_message() { b_dst="$(fn_sub_home "${pkg_dst}")" echo "" echo " Installed $(t_pkg 'node') and $(t_pkg 'npm') at $(t_path "${b_dst}/")" + + if command -v apk > /dev/null; then + if ! apk info | grep -F 'libstdc++' > /dev/null; then + echo "" + echo " $(t_pkg 'WARNING'): $(t_pkg 'libstdc++') is required for $(t_pkg 'node'), but not installed" >&2 + if command -v sudo > /dev/null; then + cmd_sudo='sudo ' + fi + _install_webi_essentials_apk "${cmd_sudo}" 'libstdc++' + fi + fi } + +_install_webi_essentials_apk() { ( + cmd_sudo="${1}" + b_pkgs="${2}" + + #echo " $(t_dim 'Running') $(t_cmd "${cmd_sudo}apk add --no-cache")" + fn_polite_sudo "${cmd_sudo}" " $(t_cmd "apk add --no-cache ${b_pkgs}")" + # shellcheck disable=SC2086 + ${cmd_sudo} apk add --no-cache ${b_pkgs} +); } + +fn_polite_sudo() { ( + a_sudo="${1}" + a_cmds="${2}" + + # no sudo needed, so don't ask + if test -z "${a_sudo}"; then + return 0 + fi + + # this is scripted, not user-interactive, continue + if test -z "${WEBI_TTY}"; then + return 0 + fi + + # this is user interactive, ask the user,defaulting to yes + echo "" + #shellcheck disable=SC2005 # echo for newline + printf '%s\n' "$(t_attn 'Use sudo to run the following? [Y/n] ')" + echo "${a_cmds}" + read -r b_yes < /dev/tty + + b_yes="$( + echo "${b_yes}" | + tr '[:upper:]' '[:lower:]' | + tr -d '[:space:]' + )" + if test -z "${b_yes}" || test "${b_yes}" = "y" || test "${b_yes}" = "yes"; then + return 0 + fi + echo " aborted" + return 1 +); } diff --git a/node/releases.js b/node/releases.js index ac896018e..7fc7e917e 100644 --- a/node/releases.js +++ b/node/releases.js @@ -1,12 +1,15 @@ 'use strict'; +let Fetcher = require('../_common/fetcher.js'); + // https://blog.risingstack.com/update-nodejs-8-end-of-life-no-support/ // 6 mos "current" + 18 mos LTS "active" + 12 mos LTS "maintenance" -//var endOfLife = 3 * 366 * 24 * 60 * 60 * 1000; +//let endOfLife = 3 * 366 * 24 * 60 * 60 * 1000; // If there have been no updates in 12 months, it's almost certainly end-of-life const END_OF_LIFE = 366 * 24 * 60 * 60 * 1000; // OSes +/** @type {Object.} */ let osMap = { osx: 'macos', // NOTE: filename is 'darwin' linux: 'linux', @@ -16,6 +19,7 @@ let osMap = { }; // CPU architectures +/** @type {Object.} */ let archMap = { x64: 'amd64', x86: 'x86', @@ -28,6 +32,7 @@ let archMap = { }; // file extensions +/** @type {Object.>} */ let pkgMap = { pkg: ['pkg'], //exe: ['exe'], // disable @@ -40,8 +45,25 @@ let pkgMap = { musl: ['tar.gz', 'tar.xz'], }; -async function getDistributables(request) { +/** + * @typedef BuildInfo + * @prop {String} version + * @prop {String} [_version] + * @prop {String} arch + * @prop {String} channel + * @prop {String} date + * @prop {String} download + * @prop {String} ext + * @prop {String} [_filename] + * @prop {String} [hash] + * @prop {String} libc + * @prop {Boolean} lts + * @prop {String} os + */ + +async function getDistributables() { let all = { + /** @type {Array} */ releases: [], download: '', }; @@ -64,37 +86,62 @@ async function getDistributables(request) { ] */ - // Alternate: 'https://nodejs.org/dist/index.json', - let baseUrl = `https://nodejs.org/download/release`; - let officialP = request({ - url: `${baseUrl}/index.json`, - json: true, - }).then(function (resp) { - transform(baseUrl, resp.body); - return; - }); + { + // Alternate: 'https://nodejs.org/dist/index.json', + let baseUrl = `https://nodejs.org/download/release`; + + // Fetch official builds + let resp; + try { + resp = await Fetcher.fetch(`${baseUrl}/index.json`, { + headers: { Accept: 'application/json' }, + }); + } catch (e) { + /** @type {Error & { code: string, response: { status: number, body: string } }} */ //@ts-expect-error + let err = e; + if (err.code === 'E_FETCH_RELEASES') { + err.message = `failed to fetch 'node' release data: ${err.response.status} ${err.response.body}`; + } + throw e; + } + let data = JSON.parse(resp.body); + + void transform(baseUrl, data); + } + + { + let unofficialBaseUrl = `https://unofficial-builds.nodejs.org/download/release`; - let unofficialBaseUrl = `https://unofficial-builds.nodejs.org/download/release`; - let unofficialP = request({ - url: `${unofficialBaseUrl}/index.json`, - json: true, - }) - .then(function (resp) { - transform(unofficialBaseUrl, resp.body); - return; - }) - .catch(function (err) { - console.error('failed to fetch unofficial-builds'); - console.error(err); - }); + // Fetch unofficial builds + let resp; + try { + resp = await Fetcher.fetch(`${unofficialBaseUrl}/index.json`, { + headers: { Accept: 'application/json' }, + }); + } catch (e) { + /** @type {Error & { code: string, response: { status: number, body: string } }} */ //@ts-expect-error + let err = e; + if (err.code === 'E_FETCH_RELEASES') { + err.message = `failed to fetch 'node' (unofficial) release data: ${err.response.status} ${err.response.body}`; + } + throw e; + } + let data = JSON.parse(resp.body); + + transform(unofficialBaseUrl, data); + } + /** + * @param {String} baseUrl + * @param {Array} builds + */ function transform(baseUrl, builds) { - builds.forEach(function (build) { + for (let build of builds) { let buildDate = new Date(build.date).valueOf(); let age = Date.now() - buildDate; let maintained = age < END_OF_LIFE; if (!maintained) { - return; + continue; } let lts = false !== build.lts; @@ -108,9 +155,9 @@ async function getDistributables(request) { channel = 'beta'; } - build.files.forEach(function (file) { + for (let file of build.files) { if ('src' === file || 'headers' === file) { - return; + continue; } let fileParts = file.split('-'); @@ -126,7 +173,7 @@ async function getDistributables(request) { pkgs = pkgMap.tar; } if (!pkgs?.length) { - return; + continue; } let extra = ''; @@ -143,7 +190,7 @@ async function getDistributables(request) { osPart = 'darwin'; } - pkgs.forEach(function (pkg) { + for (let pkg of pkgs) { let filename = `node-${build.version}-${osPart}-${archPart}${extra}.${pkg}`; if ('msi' === pkg) { filename = `node-${build.version}-${archPart}${extra}.${pkg}`; @@ -164,20 +211,17 @@ async function getDistributables(request) { }; all.releases.push(release); - }); - }); - }); + } + } + } } - await officialP; - await unofficialP; - return all; } module.exports = getDistributables; if (module === require.main) { - getDistributables(require('@root/request')).then(function (all) { + getDistributables().then(function (all) { all = require('../_webi/normalize.js')(all); console.info(JSON.stringify(all)); //console.info(JSON.stringify(all, null, 2)); diff --git a/package-lock.json b/package-lock.json index c5866861b..1e54e99af 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,34 +9,49 @@ "version": "1.1.1", "license": "MPL-2.0", "dependencies": { - "@root/request": "^1.9.2", - "dotenv": "^8.2.0", - "marked": "^4.1.1" + "dotenv": "^16.4.7", + "marked": "^15.0.4" + }, + "devDependencies": { + "@types/node": "^22.10.2" } }, - "node_modules/@root/request": { - "version": "1.9.2", - "resolved": "https://registry.npmjs.org/@root/request/-/request-1.9.2.tgz", - "integrity": "sha512-wVaL9yVV9oDR9UNbPZa20qgY+4Ch6YN8JUkaE4el/uuS5dmhD8Lusm/ku8qJVNtmQA56XLzEDCRS6/vfpiHK2A==" + "node_modules/@types/node": { + "version": "22.10.2", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.2.tgz", + "integrity": "sha512-Xxr6BBRCAOQixvonOye19wnzyDiUtTeqldOOmj3CkeblonbccA12PFwlufvRdrpjXxqnmUaeiU5EOA+7s5diUQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.20.0" + } }, "node_modules/dotenv": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-8.2.0.tgz", - "integrity": "sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw==", + "version": "16.4.7", + "license": "BSD-2-Clause", "engines": { - "node": ">=8" + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" } }, "node_modules/marked": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/marked/-/marked-4.1.1.tgz", - "integrity": "sha512-0cNMnTcUJPxbA6uWmCmjWz4NJRe/0Xfk2NhXCUHjew9qJzFN20krFnsUe7QynwqOwa5m1fZ4UDg0ycKFVC0ccw==", + "version": "15.0.4", + "license": "MIT", "bin": { "marked": "bin/marked.js" }, "engines": { - "node": ">= 12" + "node": ">= 18" } + }, + "node_modules/undici-types": { + "version": "6.20.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", + "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==", + "dev": true, + "license": "MIT" } } } diff --git a/package.json b/package.json index c5fac7956..afa06cdf4 100644 --- a/package.json +++ b/package.json @@ -38,8 +38,10 @@ }, "homepage": "https://github.com/webinstall/webi-installers#readme", "dependencies": { - "@root/request": "^1.9.2", - "dotenv": "^8.2.0", - "marked": "^4.1.1" + "dotenv": "^16.4.7", + "marked": "^15.0.4" + }, + "devDependencies": { + "@types/node": "^22.10.2" } } diff --git a/pg/install.sh b/pg/install.sh index 5c388190a..8895a2550 100644 --- a/pg/install.sh +++ b/pg/install.sh @@ -58,17 +58,20 @@ __init_pg() { } pkg_done_message() { - # TODO show with serviceman echo " Installed $(t_pkg "$pkg_cmd_name v$WEBI_VERSION") (and $(t_pkg "psql")) to $(t_link "$(fn_sub_home "${pkg_dst_bin}")")" echo "" echo "IMPORTANT!!!" echo "" - echo "Database initialized at $POSTGRES_DATA_DIR:" - echo " postgres -D $POSTGRES_DATA_DIR -p 5432" + echo "Database initialized at:" + echo " $POSTGRES_DATA_DIR" echo "" echo "Username and password set to 'postgres':" echo " psql 'postgres://postgres:postgres@localhost:5432/postgres'" echo "" + echo "To install as a service:" + echo " serviceman add --name 'postgres' --workdir '$POSTGRES_DATA_DIR' -- \\" + echo " postgres -D '$POSTGRES_DATA_DIR' -p 5432" + echo "" } } diff --git a/postgres/README.md b/postgres/README.md index b72250390..436f8be2b 100644 --- a/postgres/README.md +++ b/postgres/README.md @@ -34,8 +34,7 @@ To enable Postgres as a Linux Service with [serviceman](../serviceman/): \ (see macOS below) ```sh -sudo env PATH="$PATH" \ - serviceman add --system --username "$(whoami)" --name 'postgres' -- \ +serviceman add --name 'postgres' --workdir ~/.local/share/postgres/var -- \ postgres -D ~/.local/share/postgres/var -p 5432 sudo systemctl restart systemd-journald @@ -119,8 +118,7 @@ curl https://webi.sh/serviceman | sh ``` ```sh -sudo env PATH="$PATH" \ - serviceman add --system --username "$(whoami)" --name 'postgres' -- \ +serviceman add --name 'postgres' --workdir ~/.local/share/postgres/var -- \ postgres -D ~/.local/share/postgres/var -p 5432 sudo systemctl restart systemd-journald @@ -185,7 +183,7 @@ sudo tail -f /var/log/postgres #### macOS ```sh -serviceman add --name 'postgres' -- \ +serviceman add --name 'postgres' --workdir ~/.local/share/postgres/var -- \ postgres -D ~/.local/share/postgres/var -p 5432 tail -f ~/.local/share/postgres/var/log/postgres.log diff --git a/postgres/install.sh b/postgres/install.sh index 6a0d46fcf..eccf1f6ee 100644 --- a/postgres/install.sh +++ b/postgres/install.sh @@ -78,17 +78,20 @@ __init_postgres() { } pkg_done_message() { - # TODO show with serviceman echo "Installed 'postgres' and 'psql' at $pkg_dst" echo "" echo "IMPORTANT!!!" echo "" - echo "Database initialized at $POSTGRES_DATA_DIR:" - echo " postgres -D $POSTGRES_DATA_DIR -p 5432" + echo "Database initialized at:" + echo " $POSTGRES_DATA_DIR" echo "" echo "Username and password set to 'postgres':" echo " psql 'postgres://postgres:postgres@localhost:5432/postgres'" echo "" + echo "To install as a service:" + echo " serviceman add --name 'postgres' --workdir '$POSTGRES_DATA_DIR' -- \\" + echo " postgres -D '$POSTGRES_DATA_DIR' -p 5432" + echo "" } } diff --git a/serviceman/README.md b/serviceman/README.md index ef5d5eecb..a9011c27d 100644 --- a/serviceman/README.md +++ b/serviceman/README.md @@ -1,6 +1,6 @@ --- title: Serviceman -homepage: https://git.rootprojects.org/root/serviceman +homepage: https://github.com/bnnanet/serviceman tagline: | Serviceman generates and enables startup files on Linux, Mac, and Windows. --- @@ -71,8 +71,7 @@ changes) ### Example: Bash ```sh -sudo env PATH="$PATH" \ - serviceman add --system --path="$PATH" -- \ +serviceman add --name 'backup' -- \ bash ./backup.sh /mnt/data ``` @@ -83,9 +82,7 @@ sudo env PATH="$PATH" \ ```sh pushd ./my-node-app/ -sudo env PATH="$PATH" \ - serviceman add --system --path="$PATH" \ - --cap-net-bind -- \ +serviceman add --name 'my-node-app' -- \ npx nodemon ./server.js ``` @@ -94,9 +91,7 @@ sudo env PATH="$PATH" \ ```sh pushd ./my-node-app/ -sudo env PATH="$PATH" \ - serviceman add --system --path="$PATH" \ - --cap-net-bind -- \ +serviceman add --name 'my-node-app' -- \ npm start ``` @@ -105,9 +100,7 @@ sudo env PATH="$PATH" \ ```sh pushd ./my-go-package/ -sudo env PATH="$PATH" \ - serviceman add --system --path="$PATH" \ - -- \ +serviceman add --name 'my-service' -- \ go run -mod=vendor cmd/my-service/*.go --port 3000 ``` @@ -115,17 +108,15 @@ sudo env PATH="$PATH" \ pushd ./my-go-package/ go build -mod=vendor cmd/my-service -sudo env PATH="$PATH" \ - serviceman add --system --path="$PATH" \ - --cap-net-bind -- \ +serviceman add --name 'my-service' -- \ ./my-service --port 80 ``` ### How to see all services ```sh -serviceman list --system -serviceman list --user +serviceman list --system --all +serviceman list --agent --all ``` ```text @@ -140,8 +131,8 @@ You can either `add` the service again (which will update any changed options), or you can `stop` and then `start` any service by its name: ```sh -sudo env PATH="$PATH" serviceman stop example-service -sudo env PATH="$PATH" serviceman start example-service +serviceman stop 'example-service' +serviceman start 'example-service' ``` ### See the (sub)command help @@ -161,9 +152,7 @@ serviceman add --help ### Use `--dryrun` to see the generated launcher config: ```sh -sudo env PATH="$PATH" \ - serviceman add --system --path="$PATH" \ - --dryrun -- \ +serviceman add --name 'my-backups' --dryrun -- \ bash ./backup.sh /mnt/data ``` @@ -173,26 +162,59 @@ sudo env PATH="$PATH" \ desktop distros. ```text +# Generated for serviceman. Edit as needed. Keep this line for 'serviceman list'. +# https://www.freedesktop.org/software/systemd/man/latest/systemd.service.html + [Unit] -Description=example-service +Description=postgres postgres daemon +Documentation=(none) After=network-online.target Wants=network-online.target systemd-networkd-wait-online.service [Service] Restart=always -StartLimitInterval=10 -StartLimitBurst=3 +RestartSec=3 +RestartSteps=5 +RestartMaxDelaySec=300 -User=root -Group=root +User=app +Group=app -WorkingDirectory=/srv/example-service -ExecStart=/srv/example-service/bin/example-command start +Environment="PATH=/Users/app/.local/opt/pg-essentials/bin:/home/app/.local/opt/postgres/bin:/usr/bin:/bin" +WorkingDirectory=/home/app/.local/share/postgres/var +ExecStart="/home/app/.local/opt/postgres/bin/postgres" "-D" "/home/app/.local/share/postgres/var" "-p" "5432" ExecReload=/bin/kill -USR1 $MAINPID -# Allow the program to bind on privileged ports, such as 80 and 443 -CapabilityBoundingSet=CAP_NET_BIND_SERVICE -AmbientCapabilities=CAP_NET_BIND_SERVICE +# Limit the number of file descriptors and processes; see `man systemd.exec` for more limit settings. +# These are reasonable defaults for a production system. +# Note: systemd "user units" do not support this +LimitNOFILE=1048576 +LimitNPROC=65536 + +# Enable if desired for extra file system security +# (ex: non-containers, multi-user systems) +# +# Use private /tmp and /var/tmp, which are discarded after the service stops. +; PrivateTmp=true +# Use a minimal /dev +; PrivateDevices=true +# Hide /home, /root, and /run/user. Nobody will steal your SSH-keys. +; ProtectHome=true +# Make /usr, /boot, /etc and possibly some more folders read-only. +; ProtectSystem=full +# ... except /opt/{{ .Name }} because we want a place for the database +# and /var/log/{{ .Name }} because we want a place where logs can go. +# This merely retains r/w access rights, it does not add any new. +# Must still be writable on the host! +; ReadWriteDirectories=/opt/postgres /var/log/postgres + +# Grant restricted, root-like privileges to the service. +# CAP_NET_BIND_SERVICE allows binding on privileged ports as a non-root user +# CAP_LEASE allows locking files and is sometimes used for handling file uploads +# Some services may require additional capabilities: +# https://man7.org/linux/man-pages/man7/capabilities.7.html +CapabilityBoundingSet=CAP_NET_BIND_SERVICE CAP_LEASE +AmbientCapabilities=CAP_NET_BIND_SERVICE CAP_LEASE NoNewPrivileges=true [Install] @@ -208,81 +230,88 @@ _container-friendly_ Linuxes. ```sh #!/sbin/openrc-run -supervisor=supervise-daemon - -name="Example System Daemon" -description="A Service for Logging 'Hello, World', a lot!" -description_checkconfig="Check configuration" -description_reload="Reload configuration without downtime" - -# example: -# exampled run --port 1337 --envfile /path/to/env -# exampled check-config --port 1337 --envfile /path/to/env -# exampled reload --port 1337 --envfile /path/to/env -# for setting Config -: ${exampled_opts:="--envfile /root/.config/exampled/env"} +# Generated for serviceman. Edit as needed. Keep this line for 'serviceman list'. +name="postgres" +# docs: (none) +description="postgres daemon" -command=/root/bin/exampled -command_args="run --port 1337 $exampled_opts" -command_user=root:root -extra_commands="checkconfig" -extra_started_commands="reload" -output_log=/var/log/exampled.log -error_log=/var/log/exampled.err +supervisor="supervise-daemon" +output_log="/var/log/postgres" +error_log="/var/log/postgres" depend() { - need net localmount - after firewall + need net } -checkconfig() { - ebegin "Checking configuration for $name" - su ${command_user%:*} -s /bin/sh -c "$command check-config $exampled_opts" - eend $? +start_pre() { + checkpath --directory --owner root /var/log/ + checkpath --file --owner 'app:app' ${output_log} ${error_log} } -reload() { - ebegin "Reloading $name" - su ${command_user%:*} -s /bin/sh -c "$command reload $exampled_opts" +start() { + ebegin "Starting ${name}" + supervise-daemon ${name} --start \ + --chdir '/home/app/.local/share/postgres/var' \ + --env 'PATH=/Users/app/.local/opt/pg-essentials/bin:/home/app/.local/opt/postgres/bin:/usr/bin:/bin' \ + --user 'app' \ + --group 'app' \ + --stdout ${output_log} \ + --stderr ${error_log} \ + --pidfile /run/${RC_SVCNAME}.pid \ + --respawn-delay 5 \ + --respawn-max 51840 \ + --capabilities=CAP_NET_BIND_SERVICE \ + -- \ + '/home/app/.local/opt/postgres/bin/postgres' '-D' '/home/app/.local/share/postgres/var' '-p' '5432' eend $? } -stop_pre() { - if [ "$RC_CMD" = restart ]; then - checkconfig || return $? - fi +stop() { + ebegin "Stopping ${name}" + supervise-daemon ${name} --stop \ + --pidfile /run/${RC_SVCNAME}.pid + eend $? } ``` ### What a typical launchd .plist file looks like -```text +```xml - + - Label - example-service - ProgramArguments - - /Users/me/example-service/bin/example-command - start - - - RunAtLoad - - KeepAlive - - - WorkingDirectory - /Users/me/example-service - - StandardErrorPath - /Users/me/.local/share/example-service/var/log/example-service.log - StandardOutPath - /Users/me/.local/share/example-service/var/log/example-service.log + Label + postgres + ProgramArguments + + /Users/app/.local/opt/postgres/bin/postgres + -D + /Users/app/.local/share/postgres/var + -p + 5432 + + + EnvironmentVariables + + PATH + /Users/app/.local/opt/pg-essentials/bin:/Users/app/.local/opt/postgres/bin:/usr/bin:/bin + + + RunAtLoad + + KeepAlive + + + WorkingDirectory + /Users/app/.local/share/postgres/var + + StandardOutPath + /Users/app/.local/share/postgres/var/log/postgres.log + StandardErrorPath + /Users/app/.local/share/postgres/var/log/postgres.log ``` diff --git a/serviceman/install.sh b/serviceman/install.sh index 7422b4882..c7844a875 100644 --- a/serviceman/install.sh +++ b/serviceman/install.sh @@ -12,28 +12,62 @@ __init_serviceman() { pkg_cmd_name="serviceman" pkg_dst_cmd="$HOME/.local/bin/serviceman" + # shellcheck disable=SC2034 pkg_dst="$pkg_dst_cmd" pkg_src_cmd="$HOME/.local/opt/serviceman-v$WEBI_VERSION/bin/serviceman" + pkg_src_bin="$HOME/.local/opt/serviceman-v$WEBI_VERSION/bin" pkg_src_dir="$HOME/.local/opt/serviceman-v$WEBI_VERSION" + # shellcheck disable=SC2034 pkg_src="$pkg_src_cmd" pkg_install() { - # $HOME/.local/opt/serviceman-v0.8.0/bin - mkdir -p "$pkg_src_bin" + if test -e ./*"$pkg_cmd_name"*/share; then + rm -rf "${pkg_src_dir}" + # mv ./bnnanet-serviceman-* "$HOME/.local/opt/serviceman-v0.9.1" + mv ./*"$pkg_cmd_name"*/ "${pkg_src_dir}" + else + echo "NO share" + # $HOME/.local/opt/serviceman-v0.8.0/bin + mkdir -p "$pkg_src_bin" - # mv ./serviceman* "$HOME/.local/opt/serviceman-v0.8.0/bin/serviceman" - mv ./"$pkg_cmd_name"* "$pkg_src_cmd" + # mv ./serviceman* "$HOME/.local/opt/serviceman-v0.8.0/bin/serviceman" + mv ./"$pkg_cmd_name"* "$pkg_src_cmd" - # chmod a+x "$HOME/.local/opt/serviceman-v0.8.0/bin/serviceman" - chmod a+x "$pkg_src_cmd" + # chmod a+x "$HOME/.local/opt/serviceman-v0.8.0/bin/serviceman" + chmod a+x "$pkg_src_cmd" + fi + } + + pkg_link() { + ( + cd ~/.local/opt/ || return 1 + rm -rf ./serviceman + ln -s "serviceman-v$WEBI_VERSION" 'serviceman' + ) + + ( + mkdir -p ~/.local/share/ + cd ~/.local/share/ || return 1 + rm -rf ./serviceman + ln -s "../opt/serviceman-v$WEBI_VERSION/share/serviceman" 'serviceman' + ) + + ( + mkdir -p ~/.local/bin/ + cd ~/.local/bin/ || return 1 + rm -rf ./serviceman + ln -s "../opt/serviceman-v$WEBI_VERSION/bin/serviceman" 'serviceman' + ) } pkg_get_current_version() { # 'serviceman version' has output in this format: - # serviceman v0.8.0 (f3ab547) 2020-12-02T16:19:10-07:00 + # serviceman v0.9.1 (2024-12-11 14:29 -0500) + # Copyright 2024 AJ ONeal + # Licensed under the MPL-2.0 # This trims it down to just the version number: - # 0.8.0 + # 0.9.1 serviceman --version 2> /dev/null | head -n 1 | cut -d' ' -f2 | sed 's:^v::' } diff --git a/serviceman/releases.js b/serviceman/releases.js index 2c7634aa8..91e3274d2 100644 --- a/serviceman/releases.js +++ b/serviceman/releases.js @@ -1,18 +1,35 @@ 'use strict'; -var github = require('../_common/github.js'); -var owner = 'therootcompany'; -var repo = 'serviceman'; +let Releases = module.exports; -module.exports = function () { - return github(null, owner, repo).then(function (all) { - return all; - }); +let GitHub = require('../_common/github.js'); +let oldOwner = 'therootcompany'; +let oldRepo = 'serviceman'; + +let GitHubSource = require('../_common/github-source.js'); +let owner = 'bnnanet'; +let repo = 'serviceman'; + +Releases.latest = async function () { + let all = await GitHubSource.getDistributables({ owner, repo }); + for (let pkg of all.releases) { + //@ts-expect-error + pkg.os = 'posix_2017'; + } + + let all2 = await GitHub.getDistributables(null, oldOwner, oldRepo); + for (let pkg of all2.releases) { + //@ts-expect-error + all.releases.push(pkg); + } + + return all; }; if (module === require.main) { - module.exports().then(function (all) { + //@ts-expect-error + Releases.latest().then(function (all) { all = require('../_webi/normalize.js')(all); - console.info(JSON.stringify(all)); + console.info(JSON.stringify(all, null, 2)); }); } diff --git a/shellcheck/README.md b/shellcheck/README.md index 50c53a401..af8d3cbb8 100644 --- a/shellcheck/README.md +++ b/shellcheck/README.md @@ -14,6 +14,7 @@ These are the files / directories that are created and/or modified with this install: ```text +~/.shellcheckrc ~/.config/envman/PATH.env ~/.local/opt/shellcheck/ ~/.local/bin/shellcheck @@ -32,6 +33,16 @@ Also recommended by Google's shellcheck ./script.sh ``` +With common options: + +```sh +shellcheck \ + -s sh -S style \ + -e SC1090 -e SC1091 \ + -o add-default-case -o deprecate-which \ + scripts/* +``` + ### How to run shellcheck in vim `shellcheck` is @@ -56,20 +67,103 @@ check-scripts: shellcheck myscripts/*.sh ``` -### How to ignore an error +### How to Enable or Ignore Checks + +You can use SCXXXX codes to disable shellcheck errors and warnings at any level +through a variety of means, also described at +, +, and `shellcheck --list-optional`. -You can ignore an error by putting a comment with the `SCXXXX` error code above -it: +**Single Execution** ```sh -# shellcheck disable= +shellcheck -s sh -S style --exclude=SC1090,SC1091 --enable=add-default-case */*.sh ``` +**Single Line** + +(place directly above the offending line) + ```sh -# shellcheck disable=SC1004 -NOT_AN_ERROR='Look, a literal \ -inside of a string!' +# shellcheck disable=SC2016,SC2088 enable=require-variable-braces +echo '~/ is an alias for $HOME' +``` + +**Whole Function** + +(place directly above the function definition) + +```sh +# shellcheck disable=SC2016,SC2088 enable=require-variable-braces +fn_help() { ( + echo '~/ is an alias for $HOME' +); } +``` + +**Whole File** + +(place directly under the shebang, before any expressions) + +```sh +#!/bin/sh +# shellcheck disable=SC1090,SC1091 enable=require-variable-braces +``` + +**Global Process** + +```sh +export SHELLCHECK_OPTS="-e SC1090 -e SC1091 -o deprecate-which" +``` + +**Global Config** + +`~/.shellcheckrc`: + +```sh +disable=SC1090,SC1091 +disable=SC2155 +enable=add-default-case,check-extra-masked-returns,deprecate-which +enable=quote-safe-variables,check-set-e-suppressed,require-variable-braces +``` + +### Common Ignored & Optional Shellcheck Codes + +```text +SC1003 - Want to escape a single quote? echo 'This is how it'\''s done'. +SC1004 - This backslash+linefeed is literal. Break outside single quotes if you just want to break the line. +SC1090 - Can't follow non-constant source. Use a directive to specify location. +SC1091 - Not following: (error message here) # for source .env, etc +SC2005 - Useless `echo`? Instead of `echo $(cmd)`, just use `cmd` +SC2010 - Don't use ls | grep. Use a glob or a for loop with a condition to allow non-alphanumeric filenames. +SC2016 - Expressions don't expand in single quotes, use double quotes for that. +SC2029 - Note that, unescaped, this expands on the client side. +SC2046 - Quote this to prevent word splitting +SC2059 - Don't use variables in the printf format string. Use printf "..%s.." "$foo". +SC2072 - Decimals are not supported. Either use integers only, or use bc or awk to compare. +SC2086 - Double quote to prevent globbing and word splitting. +SC2087 - Quote 'EOF' to make here document expansions happen on the server side rather than on the client. +SC2088 - Tilde does not expand in quotes. Use $HOME. +SC2155 - Declare and assign separately to avoid masking return values. ``` Complete list of `SCXXXX` error codes: + +```text +add-default-case - Suggest adding a default case in `case` statements +avoid-nullary-conditions - Suggest explicitly using -n in `[ $var ]` +check-extra-masked-returns - Check for additional cases where exit codes are masked +check-set-e-suppressed - Notify when set -e is suppressed during function invocation +check-unassigned-uppercase - Warn when uppercase variables are unassigned +deprecate-which - Suggest 'command -v' instead of 'which' +quote-safe-variables - Suggest quoting variables without metacharacters +require-double-brackets - Require [[ and warn about [ in Bash/Ksh +require-variable-braces - Suggest putting braces around all variable references +``` + +Complete list of optional checks: + +```sh +# https://www.shellcheck.net/wiki/optional +shellcheck --list-optional +``` diff --git a/shellcheck/install.sh b/shellcheck/install.sh index 36984c2e9..3889bc1ba 100644 --- a/shellcheck/install.sh +++ b/shellcheck/install.sh @@ -20,6 +20,16 @@ __init_shellcheck() { # pkg_install must be defined by every package pkg_install() { + if ! test -e ~/.shellcheckrc; then + { + echo '# ignore: https://www.shellcheck.net/wiki/Ignore' + echo '# enable: https://www.shellcheck.net/wiki/optional' + echo '#disable=SC1090,SC1091' + echo '#enable=add-default-case,check-extra-masked-returns,deprecate-which' + echo '#enable=quote-safe-variables,check-set-e-suppressed,require-variable-braces' + } > ~/.shellcheckrc + fi + # ~/.local/opt/shellcheck-v0.99.9/bin mkdir -p "$(dirname "$pkg_src_cmd")" diff --git a/ssh-adduser/ssh-adduser b/ssh-adduser/ssh-adduser index a9335644f..2fdf1fadc 100644 --- a/ssh-adduser/ssh-adduser +++ b/ssh-adduser/ssh-adduser @@ -14,7 +14,7 @@ main() { my_key_url="${2:-}" my_keys="" - if [ "root" != "$(whoami)" ]; then + if [ "root" != "$(id -u -n)" ]; then echo "webi adduser: running user is already a non-root user" exit 0 fi diff --git a/ssh-pubkey/README.md b/ssh-pubkey/README.md index 84d2e134c..768f073d1 100644 --- a/ssh-pubkey/README.md +++ b/ssh-pubkey/README.md @@ -61,7 +61,7 @@ folder: ```sh rsync -av "$HOME/.ssh/id_rsa.pub" \ - "$HOME/Downloads/id_rsa.$(whoami).pub" + "$HOME/Downloads/id_rsa.$(id -u -n).pub" ``` How to print your public key to the Terminal: diff --git a/ssh-pubkey/ssh-pubkey b/ssh-pubkey/ssh-pubkey index 61ab2281a..f7ace425d 100755 --- a/ssh-pubkey/ssh-pubkey +++ b/ssh-pubkey/ssh-pubkey @@ -82,11 +82,11 @@ main() { # TODO use the comment (if any) for the name of the file echo >&2 "" #shellcheck disable=SC2088 - echo >&2 "~/Downloads/id_${my_keytype}.$(whoami).pub": + echo >&2 "~/Downloads/id_${my_keytype}.$(id -u -n).pub": echo >&2 "" - rm -f "$HOME/Downloads/id_${my_keytype}.$(whoami).pub" - cp -RPp "$HOME/.ssh/id_${my_keytype}.pub" "$HOME/Downloads/id_${my_keytype}.$(whoami).pub" - cat "$HOME/Downloads/id_${my_keytype}.$(whoami).pub" + rm -f "$HOME/Downloads/id_${my_keytype}.$(id -u -n).pub" + cp -RPp "$HOME/.ssh/id_${my_keytype}.pub" "$HOME/Downloads/id_${my_keytype}.$(id -u -n).pub" + cat "$HOME/Downloads/id_${my_keytype}.$(id -u -n).pub" echo >&2 "" if test -f ~/.ssh/id_rsa; then diff --git a/syncthing/README.md b/syncthing/README.md index b0b5885ba..43f4f86e9 100644 --- a/syncthing/README.md +++ b/syncthing/README.md @@ -46,7 +46,7 @@ webi serviceman ```sh mkdir -p ~/.config/syncthing/ -env PATH="$PATH" serviceman add --user --name syncthing -- \ +serviceman add --agent --name syncthing -- \ syncthing --home ~/.config/syncthing/ ``` diff --git a/terraform/releases.js b/terraform/releases.js index d473bcd7a..7c5cd3f88 100644 --- a/terraform/releases.js +++ b/terraform/releases.js @@ -1,46 +1,77 @@ 'use strict'; -function getDistributables(request) { - return request({ - url: 'https://releases.hashicorp.com/terraform/index.json', - json: true, - }).then(function (resp) { - let releases = resp.body; - let all = { - releases: [], - download: '', // Full URI provided in response body - }; - - function getBuildsForVersion(version) { - releases.versions[version].builds.forEach(function (build) { - let r = { - version: build.version, - download: build.url, - // These are generic enough for the autodetect, - // and the per-file logic has proven to get outdated sooner - //os: convert[build.os], - //arch: convert[build.arch], - //channel: 'stable|-rc|-beta|-alpha', - }; - all.releases.push(r); - }); - } +let Fetcher = require('../_common/fetcher.js'); + +let alphaRe = /\d-alpha\d/; +let betaRe = /\d-beta\d/; +let rcRe = /\d-rc\d/; - // Releases are listed chronologically, we want the latest first. - const allVersions = Object.keys(releases.versions).reverse(); +/** + * @typedef BuildInfo + * @prop {String} version + * @prop {String} download + */ - allVersions.forEach(function (version) { - getBuildsForVersion(version); +async function getDistributables() { + let resp; + try { + let url = 'https://releases.hashicorp.com/terraform/index.json'; + resp = await Fetcher.fetch(url, { + headers: { Accept: 'application/json' }, }); + } catch (e) { + /** @type {Error & { code: string, response: { status: number, body: string } }} */ //@ts-expect-error + let err = e; + if (err.code === 'E_FETCH_RELEASES') { + err.message = `failed to fetch 'terraform' release data: ${err.response.status} ${err.response.body}`; + } + throw e; + } + let releases = JSON.parse(resp.body); - return all; - }); + let all = { + /** @type {Array} */ + releases: [], + download: '', + }; + + let allVersions = Object.keys(releases.versions); + allVersions.reverse(); // Releases are listed chronologically, we want the latest first. + + for (let version of allVersions) { + for (let build of releases.versions[version].builds) { + let channel = 'stable'; + let isRc = rcRe.test(version); + let isBeta = betaRe.test(version); + let isAlpha = alphaRe.test(version); + if (isRc) { + channel = 'rc'; + } else if (isBeta) { + channel = 'beta'; + } else if (isAlpha) { + channel = 'alpha'; + } + + let r = { + version: build.version, + download: build.url, + // These are generic enough for the autodetect, + // and the per-file logic has proven to get outdated sooner + //os: convert[build.os], + //arch: convert[build.arch], + channel: channel, + }; + all.releases.push(r); + } + } + + return all; } module.exports = getDistributables; if (module === require.main) { - getDistributables(require('@root/request')).then(function (all) { + getDistributables().then(function (all) { all = require('../_webi/normalize.js')(all); console.info(JSON.stringify(all)); }); diff --git a/terramate/README.md b/terramate/README.md new file mode 100644 index 000000000..7a1a3c335 --- /dev/null +++ b/terramate/README.md @@ -0,0 +1,95 @@ +--- +title: Terramate +homepage: https://github.com/terramate-io/terramate +tagline: | + Terramate simplifies managing large-scale Terraform codebases +--- + +To update or switch versions, run `webi terramate@stable` (or `@v0.11.4`, +`@beta`, etc). + +## Cheat Sheet + +> `Terramate` enables scalable automation for Terraform by providing a robust +> framework for managing multiple stacks, generating code, and executing +> targeted workflows. + +### **1. Create a New Project** + +```sh +git init -b 'main' ./terramate-quickstart +cd ./terramate-quickstart +git commit --allow-empty -m "Initial empty commit" +``` + +### **2. Create a Stack** + +```sh +terramate create \ + --name "StackName" \ + --description "Description of the stack" \ + ./stacks/stackname/ + +git add ./stacks/stackname/stack.tm.hcl +git commit -m "Create a stack" +``` + +### **3. List Stacks** + +```sh +terramate list +``` + +### **4. Detect Changes** + +```sh +terramate list --changed +``` + +### **5. Generate Code** + +1. Create a `.tm.hcl` file for code generation: + + ```sh + cat < ./stacks/backend.tm.hcl + generate_hcl "backend.tf" { + content { + terraform { + backend "local" {} + } + } + } + EOF + ``` + +2. Run the generation command: + ```sh + terramate generate + ``` + +### **6. Run Terraform Commands** + +- **Initialize stacks:** + + ```sh + terramate run terraform init + ``` + +- **Plan changes:** + + ```sh + terramate run terraform plan + ``` + +- **Apply changes:** + + ```sh + terramate run terraform apply -auto-approve + ``` + +- **Run commands only on changed stacks:** + ```sh + terramate run --changed terraform init + terramate run --changed terraform plan + terramate run --changed terraform apply -auto-approve + ``` diff --git a/terramate/install.ps1 b/terramate/install.ps1 new file mode 100644 index 000000000..ca38f9ccf --- /dev/null +++ b/terramate/install.ps1 @@ -0,0 +1,56 @@ +#!/usr/bin/env pwsh + +############### +# Install terramate # +############### + +# Every package should define these variables +$pkg_cmd_name = "terramate" + +$pkg_dst_cmd = "$Env:USERPROFILE\.local\bin\terramate.exe" +$pkg_dst = "$pkg_dst_cmd" + +$pkg_src_cmd = "$Env:USERPROFILE\.local\opt\terramate-v$Env:WEBI_VERSION\bin\terramate.exe" +$pkg_src_bin = "$Env:USERPROFILE\.local\opt\terramate-v$Env:WEBI_VERSION\bin" +$pkg_src_dir = "$Env:USERPROFILE\.local\opt\terramate-v$Env:WEBI_VERSION" +$pkg_src = "$pkg_src_cmd" + +New-Item "$Env:USERPROFILE\Downloads\webi" -ItemType Directory -Force | Out-Null +$pkg_download = "$Env:USERPROFILE\Downloads\webi\$Env:WEBI_PKG_FILE" + +# Fetch archive +IF (!(Test-Path -Path "$Env:USERPROFILE\Downloads\webi\$Env:WEBI_PKG_FILE")) { + Write-Output "Downloading terramate from $Env:WEBI_PKG_URL to $pkg_download" + & curl.exe -A "$Env:WEBI_UA" -fsSL "$Env:WEBI_PKG_URL" -o "$pkg_download.part" + & Move-Item "$pkg_download.part" "$pkg_download" +} + +IF (!(Test-Path -Path "$pkg_src_cmd")) { + Write-Output "Installing terramate" + + # TODO: create package-specific temp directory + # Enter tmp + Push-Location .local\tmp + + # Remove any leftover tmp cruft + Remove-Item -Path ".\terramate-v*" -Recurse -ErrorAction Ignore + Remove-Item -Path ".\terramate.exe" -Recurse -ErrorAction Ignore + + # Unpack archive file into this temporary directory + # Windows BSD-tar handles zip. Imagine that. + Write-Output "Unpacking $pkg_download" + & tar xf "$pkg_download" + + # Settle unpacked archive into place + Write-Output "Install Location: $pkg_src_cmd" + New-Item "$pkg_src_bin" -ItemType Directory -Force | Out-Null + Move-Item -Path "terramate.exe" -Destination "$pkg_src_bin" + + # Exit tmp + Pop-Location +} + +Write-Output "Copying into '$pkg_dst_cmd' from '$pkg_src_cmd'" +Remove-Item -Path "$pkg_dst_cmd" -Recurse -ErrorAction Ignore | Out-Null +Copy-Item -Path "$pkg_src" -Destination "$pkg_dst" -Recurse +Remove-Item -Path "$pkg_src" -Recurse -ErrorAction Ignore diff --git a/terramate/install.sh b/terramate/install.sh new file mode 100644 index 000000000..1fc9bdcca --- /dev/null +++ b/terramate/install.sh @@ -0,0 +1,47 @@ +#!/bin/sh +set -e +set -u + +__init_terramate() { + + ##################### + # Install terramate # + ##################### + + # Every package should define these 6 variables + pkg_cmd_name="terramate" + + pkg_dst_cmd="$HOME/.local/bin/terramate" + pkg_dst="$pkg_dst_cmd" + + pkg_src_cmd="$HOME/.local/opt/terramate-v$WEBI_VERSION/bin/terramate" + pkg_src_dir="$HOME/.local/opt/terramate-v$WEBI_VERSION" + pkg_src="$pkg_src_cmd" + + # pkg_install must be defined by every package + pkg_install() { + # ~/.local/opt/terramate-v0.11.4/bin + mkdir -p "$(dirname "$pkg_src_cmd")" + + # mv ./terramate* ~/.local/opt/terramate-v0.11.4/bin/terramate + mv terramate* "$pkg_src_cmd" + } + + # pkg_get_current_version is recommended, but (soon) not required + pkg_get_current_version() { + # 'terramate version' has output in this format: + + # 0.11.3 + # + # Your version of Terramate is out of date! The latest version + # is 0.11.4 (released on Tue Dec 3 19:27:35 UTC 2024). + # You can update by downloading from https://github.com/terramate-io/terramate/releases/tag/v0.11.4 + + # This trims it down to just the version number: + # 0.11.4 + terramate version | head -n 1 | cut -d' ' -f1 + } + +} + +__init_terramate diff --git a/terramate/releases.js b/terramate/releases.js new file mode 100644 index 000000000..c3d1c712c --- /dev/null +++ b/terramate/releases.js @@ -0,0 +1,36 @@ +'use strict'; + +let github = require('../_common/github.js'); +let owner = 'terramate-io'; +let repo = 'terramate'; + +let junkFiles = ['checksums.txt', 'cosign.pub']; + +async function getDistributables() { + let all = await github(null, owner, repo); + let releases = []; + for (let release of all.releases) { + let isJunk = !junkFiles.includes(release.name); + if (isJunk) { + continue; + } + releases.push(release); + } + + all.releases = releases; + return all; +} + +module.exports = getDistributables; + +if (module === require.main) { + getDistributables().then(function (all) { + all = require('../_webi/normalize.js')(all); + // just select the first 5 for demonstration + // all.releases = all.releases.slice(0, 5); + // all.releases = all.releases.filter( + // release => !["checksums.txt.sig", "cosign.pub","terramate_0.9.0_windows_x86_64.zip"].includes(release.name) + // ); + console.info(JSON.stringify(all, null, 2)); + }); +} diff --git a/uuidv7/README.md b/uuidv7/README.md index 71f0bf834..fe66fc319 100644 --- a/uuidv7/README.md +++ b/uuidv7/README.md @@ -53,6 +53,30 @@ uuidv7 ; uuidv7 ; uuidv7 01928d74-3ffb-7e06-abe9-3fe20e5cb5f2 ``` +### How to Generate UPPER CASE (like `uuidgen`) + +```sh +uuidv7 | tr '[:lower:]' '[:upper:]' +``` + +```text +01928D73-D8ED-7211-A314-7081D763271D +``` + +### How to Generate v4 UUIDs? + +Use `uuidgen`. + +```sh +uuidgen +uuidgen | tr '[:upper:]' '[:lower:]' +``` + +```text +84FA79E5-024E-4388-8D10-91618B93BE9D +84fa79e5-024e-4388-8d10-91618b93be9d +``` + ### How could I roll my own UUID v7 generator? It's not that hard. There are examples in many languages here: diff --git a/zig/releases.js b/zig/releases.js index 3d0dff30f..1377ff2b6 100644 --- a/zig/releases.js +++ b/zig/releases.js @@ -1,87 +1,114 @@ 'use strict'; -var NON_BUILDS = ['bootstrap', 'src']; -var ODDITIES = NON_BUILDS.concat(['armv6kz-linux']); +let Fetcher = require('../_common/fetcher.js'); -module.exports = function (request) { - return request({ - url: 'https://ziglang.org/download/index.json', - json: true, - }).then(function (resp) { - let versions = resp.body; - let releases = []; +let NON_BUILDS = ['bootstrap', 'src']; +let ODDITIES = NON_BUILDS.concat(['armv6kz-linux']); - let refs = Object.keys(versions); - refs.forEach(function (ref) { - let pkgs = versions[ref]; - let version = pkgs.version || ref; +/** + * @typedef BuildInfo + * @prop {String} version + * @prop {String} [_version] + * @prop {String} [arch] + * @prop {String} channel + * @prop {String} date + * @prop {String} download + * @prop {String} [ext] + * @prop {String} [_filename] + * @prop {String} [hash] + * @prop {String} [libc] + * @prop {Boolean} [lts] + * @prop {String} [size] + * @prop {String} os + */ - // "platform" = arch + os combo - let platforms = Object.keys(pkgs); - platforms.forEach(function (platform) { - let pkg = pkgs[platform]; +module.exports = async function () { + let resp; + try { + let url = 'https://ziglang.org/download/index.json'; + resp = await Fetcher.fetch(url, { + headers: { Accept: 'application/json' }, + }); + } catch (e) { + /** @type {Error & { code: string, response: { status: number, body: string } }} */ //@ts-expect-error + let err = e; + if (err.code === 'E_FETCH_RELEASES') { + err.message = `failed to fetch 'zig' release data: ${err.response.status} ${err.response.body}`; + } + throw e; + } + let versions = JSON.parse(resp.body); - // don't grab 'date' or 'notes', which are (confusingly) - // at the same level as platform releases - let isNotPackage = !pkg || 'object' !== typeof pkg || !pkg.tarball; - if (isNotPackage) { - return; - } + /** @type {Array} */ + let releases = []; + let refs = Object.keys(versions); + for (let ref of refs) { + let pkgs = versions[ref]; + let version = pkgs.version || ref; - let isOdd = ODDITIES.includes(platform); - if (isOdd) { - return; - } + // "platform" = arch + os combo + let platforms = Object.keys(pkgs); + for (let platform of platforms) { + let pkg = pkgs[platform]; - // Ex: aarch64-macos => ['aarch64', 'macos'] - let parts = platform.split('-'); - //let arch = parts[0]; - let os = parts[1]; - if (parts.length > 2) { - console.warn( - `unexpected platform name with multiple '-': ${platform}`, - ); - return; - } + // don't grab 'date' or 'notes', which are (confusingly) + // at the same level as platform releases + let isNotPackage = !pkg || 'object' !== typeof pkg || !pkg.tarball; + if (isNotPackage) { + continue; + } - let p = { - version: version, - date: pkgs.date, - channel: 'stable', - // linux, macos, windows - os: os, - // TODO map explicitly (rather than normalization auto-detect) - //arch: arch, - download: pkg.tarball, - hash: pkg.shasum, - size: pkg.size, - // TODO docs + release notes? - //docs: 'https://ziglang.org/documentation/0.9.1/', - //stdDocs: 'https://ziglang.org/documentation/0.9.1/std/', - //notes: 'https://ziglang.org/download/0.9.1/release-notes.html' - }; + let isOdd = ODDITIES.includes(platform); + if (isOdd) { + continue; + } - // Mark branches or tags as beta (for now) - // Ex: 'master' - // Also mark prereleases (with build tags) as beta - // Ex: 0.10.0-dev.1606+97a53bb8a - let isNotStable = !/\./.test(ref) || /\+|-/.test(p.version); - if (isNotStable) { - p.channel = 'beta'; - } + // Ex: aarch64-macos => ['aarch64', 'macos'] + let parts = platform.split('-'); + //let arch = parts[0]; + let os = parts[1]; + if (parts.length > 2) { + console.warn(`unexpected platform name with multiple '-': ${platform}`); + continue; + } - releases.push(p); - }); - }); + let p = { + version: version, + date: pkgs.date, + channel: 'stable', + // linux, macos, windows + os: os, + // TODO map explicitly (rather than normalization auto-detect) + //arch: arch, + download: pkg.tarball, + hash: pkg.shasum, + size: pkg.size, + // TODO docs + release notes? + //docs: 'https://ziglang.org/documentation/0.9.1/', + //stdDocs: 'https://ziglang.org/documentation/0.9.1/std/', + //notes: 'https://ziglang.org/download/0.9.1/release-notes.html' + }; - return { - releases: releases, - }; - }); + // Mark branches or tags as beta (for now) + // Ex: 'master' + // Also mark prereleases (with build tags) as beta + // Ex: 0.10.0-dev.1606+97a53bb8a + let isNotStable = !/\./.test(ref) || /\+|-/.test(p.version); + if (isNotStable) { + p.channel = 'beta'; + } + + releases.push(p); + } + } + + return { + releases: releases, + }; }; if (module === require.main) { - module.exports(require('@root/request')).then(function (all) { + module.exports().then(function (all) { all = require('../_webi/normalize.js')(all); // just select the first 5 for demonstration all.releases = all.releases.slice(0, 5);