From 7e81d84f27a6ff9b54ff7a871b0fce889d113a31 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Fri, 11 Jul 2025 15:31:59 +0200 Subject: [PATCH 01/45] add mpx package --- packages/mpx/.prettierignore | 9 + packages/mpx/.prettierrc.cjs | 14 + packages/mpx/bin/mpw.js | 551 ++++++++++++++++++++++++++++++++ packages/mpx/package-lock.json | 15 + packages/mpx/package.json | 29 ++ packages/mpx/pnpm-lock.yaml | 262 +++++++++++++++ packages/mpx/rolldown.config.ts | 13 + packages/mpx/src/build.ts | 23 ++ packages/mpx/src/cli.ts | 25 ++ packages/mpx/src/constants.ts | 7 + packages/mpx/tsconfig.json | 6 + 11 files changed, 954 insertions(+) create mode 100644 packages/mpx/.prettierignore create mode 100644 packages/mpx/.prettierrc.cjs create mode 100644 packages/mpx/bin/mpw.js create mode 100644 packages/mpx/package-lock.json create mode 100644 packages/mpx/package.json create mode 100644 packages/mpx/pnpm-lock.yaml create mode 100644 packages/mpx/rolldown.config.ts create mode 100644 packages/mpx/src/build.ts create mode 100644 packages/mpx/src/cli.ts create mode 100644 packages/mpx/src/constants.ts create mode 100644 packages/mpx/tsconfig.json diff --git a/packages/mpx/.prettierignore b/packages/mpx/.prettierignore new file mode 100644 index 00000000..c4138507 --- /dev/null +++ b/packages/mpx/.prettierignore @@ -0,0 +1,9 @@ +packages/generator-widget/generators/app/templates/packages/__tests__/outputs/ +packages/generator-widget/generators/app/templates/**/*.ejs +packages/pluggable-widgets-tools/tests/projects +*.png +*.svg +*.snap +**/dist +.idea +.vscodepnpm-lock.yaml diff --git a/packages/mpx/.prettierrc.cjs b/packages/mpx/.prettierrc.cjs new file mode 100644 index 00000000..6113d6f9 --- /dev/null +++ b/packages/mpx/.prettierrc.cjs @@ -0,0 +1,14 @@ +module.exports = { + trailingComma: "none", + useTabs: false, + tabWidth: 4, + semi: true, + singleQuote: false, + printWidth: 120, + bracketSpacing: true, + bracketSameLine: false, + arrowParens: "avoid", + proseWrap: "always", + xmlSelfClosingSpace: true, + xmlWhitespaceSensitivity: "ignore", +}; diff --git a/packages/mpx/bin/mpw.js b/packages/mpx/bin/mpw.js new file mode 100644 index 00000000..f7a2585d --- /dev/null +++ b/packages/mpx/bin/mpw.js @@ -0,0 +1,551 @@ +#!/usr/bin/env node +import { EventEmitter } from "events"; +import process$1 from "node:process"; +import fsPromises, { readFile } from "node:fs/promises"; +import { fileURLToPath } from "node:url"; +import { readFileSync } from "node:fs"; +import path from "node:path"; + +//#region node_modules/.pnpm/cac@6.7.14/node_modules/cac/dist/index.mjs +function toArr(any) { + return any == null ? [] : Array.isArray(any) ? any : [any]; +} +function toVal(out, key, val, opts) { + var x, old = out[key], nxt = !!~opts.string.indexOf(key) ? val == null || val === true ? "" : String(val) : typeof val === "boolean" ? val : !!~opts.boolean.indexOf(key) ? val === "false" ? false : val === "true" || (out._.push((x = +val, x * 0 === 0) ? x : val), !!val) : (x = +val, x * 0 === 0) ? x : val; + out[key] = old == null ? nxt : Array.isArray(old) ? old.concat(nxt) : [old, nxt]; +} +function mri2(args, opts) { + args = args || []; + opts = opts || {}; + var k, arr, arg, name, val, out = { _: [] }; + var i = 0, j = 0, idx = 0, len = args.length; + const alibi = opts.alias !== void 0; + const strict = opts.unknown !== void 0; + const defaults = opts.default !== void 0; + opts.alias = opts.alias || {}; + opts.string = toArr(opts.string); + opts.boolean = toArr(opts.boolean); + if (alibi) for (k in opts.alias) { + arr = opts.alias[k] = toArr(opts.alias[k]); + for (i = 0; i < arr.length; i++) (opts.alias[arr[i]] = arr.concat(k)).splice(i, 1); + } + for (i = opts.boolean.length; i-- > 0;) { + arr = opts.alias[opts.boolean[i]] || []; + for (j = arr.length; j-- > 0;) opts.boolean.push(arr[j]); + } + for (i = opts.string.length; i-- > 0;) { + arr = opts.alias[opts.string[i]] || []; + for (j = arr.length; j-- > 0;) opts.string.push(arr[j]); + } + if (defaults) for (k in opts.default) { + name = typeof opts.default[k]; + arr = opts.alias[k] = opts.alias[k] || []; + if (opts[name] !== void 0) { + opts[name].push(k); + for (i = 0; i < arr.length; i++) opts[name].push(arr[i]); + } + } + const keys = strict ? Object.keys(opts.alias) : []; + for (i = 0; i < len; i++) { + arg = args[i]; + if (arg === "--") { + out._ = out._.concat(args.slice(++i)); + break; + } + for (j = 0; j < arg.length; j++) if (arg.charCodeAt(j) !== 45) break; + if (j === 0) out._.push(arg); + else if (arg.substring(j, j + 3) === "no-") { + name = arg.substring(j + 3); + if (strict && !~keys.indexOf(name)) return opts.unknown(arg); + out[name] = false; + } else { + for (idx = j + 1; idx < arg.length; idx++) if (arg.charCodeAt(idx) === 61) break; + name = arg.substring(j, idx); + val = arg.substring(++idx) || i + 1 === len || ("" + args[i + 1]).charCodeAt(0) === 45 || args[++i]; + arr = j === 2 ? [name] : name; + for (idx = 0; idx < arr.length; idx++) { + name = arr[idx]; + if (strict && !~keys.indexOf(name)) return opts.unknown("-".repeat(j) + name); + toVal(out, name, idx + 1 < arr.length || val, opts); + } + } + } + if (defaults) { + for (k in opts.default) if (out[k] === void 0) out[k] = opts.default[k]; + } + if (alibi) for (k in out) { + arr = opts.alias[k] || []; + while (arr.length > 0) out[arr.shift()] = out[k]; + } + return out; +} +const removeBrackets = (v) => v.replace(/[<[].+/, "").trim(); +const findAllBrackets = (v) => { + const ANGLED_BRACKET_RE_GLOBAL = /<([^>]+)>/g; + const SQUARE_BRACKET_RE_GLOBAL = /\[([^\]]+)\]/g; + const res = []; + const parse = (match) => { + let variadic = false; + let value = match[1]; + if (value.startsWith("...")) { + value = value.slice(3); + variadic = true; + } + return { + required: match[0].startsWith("<"), + value, + variadic + }; + }; + let angledMatch; + while (angledMatch = ANGLED_BRACKET_RE_GLOBAL.exec(v)) res.push(parse(angledMatch)); + let squareMatch; + while (squareMatch = SQUARE_BRACKET_RE_GLOBAL.exec(v)) res.push(parse(squareMatch)); + return res; +}; +const getMriOptions = (options) => { + const result = { + alias: {}, + boolean: [] + }; + for (const [index, option] of options.entries()) { + if (option.names.length > 1) result.alias[option.names[0]] = option.names.slice(1); + if (option.isBoolean) if (option.negated) { + const hasStringTypeOption = options.some((o, i) => { + return i !== index && o.names.some((name) => option.names.includes(name)) && typeof o.required === "boolean"; + }); + if (!hasStringTypeOption) result.boolean.push(option.names[0]); + } else result.boolean.push(option.names[0]); + } + return result; +}; +const findLongest = (arr) => { + return arr.sort((a, b) => { + return a.length > b.length ? -1 : 1; + })[0]; +}; +const padRight = (str, length) => { + return str.length >= length ? str : `${str}${" ".repeat(length - str.length)}`; +}; +const camelcase = (input) => { + return input.replace(/([a-z])-([a-z])/g, (_, p1, p2) => { + return p1 + p2.toUpperCase(); + }); +}; +const setDotProp = (obj, keys, val) => { + let i = 0; + let length = keys.length; + let t = obj; + let x; + for (; i < length; ++i) { + x = t[keys[i]]; + t = t[keys[i]] = i === length - 1 ? val : x != null ? x : !!~keys[i + 1].indexOf(".") || !(+keys[i + 1] > -1) ? {} : []; + } +}; +const setByType = (obj, transforms) => { + for (const key of Object.keys(transforms)) { + const transform = transforms[key]; + if (transform.shouldTransform) { + obj[key] = Array.prototype.concat.call([], obj[key]); + if (typeof transform.transformFunction === "function") obj[key] = obj[key].map(transform.transformFunction); + } + } +}; +const getFileName = (input) => { + const m = /([^\\\/]+)$/.exec(input); + return m ? m[1] : ""; +}; +const camelcaseOptionName = (name) => { + return name.split(".").map((v, i) => { + return i === 0 ? camelcase(v) : v; + }).join("."); +}; +var CACError = class extends Error { + constructor(message) { + super(message); + this.name = this.constructor.name; + if (typeof Error.captureStackTrace === "function") Error.captureStackTrace(this, this.constructor); + else this.stack = new Error(message).stack; + } +}; +var Option = class { + constructor(rawName, description, config) { + this.rawName = rawName; + this.description = description; + this.config = Object.assign({}, config); + rawName = rawName.replace(/\.\*/g, ""); + this.negated = false; + this.names = removeBrackets(rawName).split(",").map((v) => { + let name = v.trim().replace(/^-{1,2}/, ""); + if (name.startsWith("no-")) { + this.negated = true; + name = name.replace(/^no-/, ""); + } + return camelcaseOptionName(name); + }).sort((a, b) => a.length > b.length ? 1 : -1); + this.name = this.names[this.names.length - 1]; + if (this.negated && this.config.default == null) this.config.default = true; + if (rawName.includes("<")) this.required = true; + else if (rawName.includes("[")) this.required = false; + else this.isBoolean = true; + } +}; +const processArgs = process.argv; +const platformInfo = `${process.platform}-${process.arch} node-${process.version}`; +var Command = class { + constructor(rawName, description, config = {}, cli$1) { + this.rawName = rawName; + this.description = description; + this.config = config; + this.cli = cli$1; + this.options = []; + this.aliasNames = []; + this.name = removeBrackets(rawName); + this.args = findAllBrackets(rawName); + this.examples = []; + } + usage(text) { + this.usageText = text; + return this; + } + allowUnknownOptions() { + this.config.allowUnknownOptions = true; + return this; + } + ignoreOptionDefaultValue() { + this.config.ignoreOptionDefaultValue = true; + return this; + } + version(version$1, customFlags = "-v, --version") { + this.versionNumber = version$1; + this.option(customFlags, "Display version number"); + return this; + } + example(example) { + this.examples.push(example); + return this; + } + option(rawName, description, config) { + const option = new Option(rawName, description, config); + this.options.push(option); + return this; + } + alias(name) { + this.aliasNames.push(name); + return this; + } + action(callback) { + this.commandAction = callback; + return this; + } + isMatched(name) { + return this.name === name || this.aliasNames.includes(name); + } + get isDefaultCommand() { + return this.name === "" || this.aliasNames.includes("!"); + } + get isGlobalCommand() { + return this instanceof GlobalCommand; + } + hasOption(name) { + name = name.split(".")[0]; + return this.options.find((option) => { + return option.names.includes(name); + }); + } + outputHelp() { + const { name, commands } = this.cli; + const { versionNumber, options: globalOptions, helpCallback } = this.cli.globalCommand; + let sections = [{ body: `${name}${versionNumber ? `/${versionNumber}` : ""}` }]; + sections.push({ + title: "Usage", + body: ` $ ${name} ${this.usageText || this.rawName}` + }); + const showCommands = (this.isGlobalCommand || this.isDefaultCommand) && commands.length > 0; + if (showCommands) { + const longestCommandName = findLongest(commands.map((command) => command.rawName)); + sections.push({ + title: "Commands", + body: commands.map((command) => { + return ` ${padRight(command.rawName, longestCommandName.length)} ${command.description}`; + }).join("\n") + }); + sections.push({ + title: `For more info, run any command with the \`--help\` flag`, + body: commands.map((command) => ` $ ${name}${command.name === "" ? "" : ` ${command.name}`} --help`).join("\n") + }); + } + let options = this.isGlobalCommand ? globalOptions : [...this.options, ...globalOptions || []]; + if (!this.isGlobalCommand && !this.isDefaultCommand) options = options.filter((option) => option.name !== "version"); + if (options.length > 0) { + const longestOptionName = findLongest(options.map((option) => option.rawName)); + sections.push({ + title: "Options", + body: options.map((option) => { + return ` ${padRight(option.rawName, longestOptionName.length)} ${option.description} ${option.config.default === void 0 ? "" : `(default: ${option.config.default})`}`; + }).join("\n") + }); + } + if (this.examples.length > 0) sections.push({ + title: "Examples", + body: this.examples.map((example) => { + if (typeof example === "function") return example(name); + return example; + }).join("\n") + }); + if (helpCallback) sections = helpCallback(sections) || sections; + console.log(sections.map((section) => { + return section.title ? `${section.title}: +${section.body}` : section.body; + }).join("\n\n")); + } + outputVersion() { + const { name } = this.cli; + const { versionNumber } = this.cli.globalCommand; + if (versionNumber) console.log(`${name}/${versionNumber} ${platformInfo}`); + } + checkRequiredArgs() { + const minimalArgsCount = this.args.filter((arg) => arg.required).length; + if (this.cli.args.length < minimalArgsCount) throw new CACError(`missing required args for command \`${this.rawName}\``); + } + checkUnknownOptions() { + const { options, globalCommand } = this.cli; + if (!this.config.allowUnknownOptions) { + for (const name of Object.keys(options)) if (name !== "--" && !this.hasOption(name) && !globalCommand.hasOption(name)) throw new CACError(`Unknown option \`${name.length > 1 ? `--${name}` : `-${name}`}\``); + } + } + checkOptionValue() { + const { options: parsedOptions, globalCommand } = this.cli; + const options = [...globalCommand.options, ...this.options]; + for (const option of options) { + const value = parsedOptions[option.name.split(".")[0]]; + if (option.required) { + const hasNegated = options.some((o) => o.negated && o.names.includes(option.name)); + if (value === true || value === false && !hasNegated) throw new CACError(`option \`${option.rawName}\` value is missing`); + } + } + } +}; +var GlobalCommand = class extends Command { + constructor(cli$1) { + super("@@global@@", "", {}, cli$1); + } +}; +var __assign = Object.assign; +var CAC = class extends EventEmitter { + constructor(name = "") { + super(); + this.name = name; + this.commands = []; + this.rawArgs = []; + this.args = []; + this.options = {}; + this.globalCommand = new GlobalCommand(this); + this.globalCommand.usage(" [options]"); + } + usage(text) { + this.globalCommand.usage(text); + return this; + } + command(rawName, description, config) { + const command = new Command(rawName, description || "", config, this); + command.globalCommand = this.globalCommand; + this.commands.push(command); + return command; + } + option(rawName, description, config) { + this.globalCommand.option(rawName, description, config); + return this; + } + help(callback) { + this.globalCommand.option("-h, --help", "Display this message"); + this.globalCommand.helpCallback = callback; + this.showHelpOnExit = true; + return this; + } + version(version$1, customFlags = "-v, --version") { + this.globalCommand.version(version$1, customFlags); + this.showVersionOnExit = true; + return this; + } + example(example) { + this.globalCommand.example(example); + return this; + } + outputHelp() { + if (this.matchedCommand) this.matchedCommand.outputHelp(); + else this.globalCommand.outputHelp(); + } + outputVersion() { + this.globalCommand.outputVersion(); + } + setParsedInfo({ args, options }, matchedCommand, matchedCommandName) { + this.args = args; + this.options = options; + if (matchedCommand) this.matchedCommand = matchedCommand; + if (matchedCommandName) this.matchedCommandName = matchedCommandName; + return this; + } + unsetMatchedCommand() { + this.matchedCommand = void 0; + this.matchedCommandName = void 0; + } + parse(argv = processArgs, { run = true } = {}) { + this.rawArgs = argv; + if (!this.name) this.name = argv[1] ? getFileName(argv[1]) : "cli"; + let shouldParse = true; + for (const command of this.commands) { + const parsed = this.mri(argv.slice(2), command); + const commandName = parsed.args[0]; + if (command.isMatched(commandName)) { + shouldParse = false; + const parsedInfo = __assign(__assign({}, parsed), { args: parsed.args.slice(1) }); + this.setParsedInfo(parsedInfo, command, commandName); + this.emit(`command:${commandName}`, command); + } + } + if (shouldParse) { + for (const command of this.commands) if (command.name === "") { + shouldParse = false; + const parsed = this.mri(argv.slice(2), command); + this.setParsedInfo(parsed, command); + this.emit(`command:!`, command); + } + } + if (shouldParse) { + const parsed = this.mri(argv.slice(2)); + this.setParsedInfo(parsed); + } + if (this.options.help && this.showHelpOnExit) { + this.outputHelp(); + run = false; + this.unsetMatchedCommand(); + } + if (this.options.version && this.showVersionOnExit && this.matchedCommandName == null) { + this.outputVersion(); + run = false; + this.unsetMatchedCommand(); + } + const parsedArgv = { + args: this.args, + options: this.options + }; + if (run) this.runMatchedCommand(); + if (!this.matchedCommand && this.args[0]) this.emit("command:*"); + return parsedArgv; + } + mri(argv, command) { + const cliOptions = [...this.globalCommand.options, ...command ? command.options : []]; + const mriOptions = getMriOptions(cliOptions); + let argsAfterDoubleDashes = []; + const doubleDashesIndex = argv.indexOf("--"); + if (doubleDashesIndex > -1) { + argsAfterDoubleDashes = argv.slice(doubleDashesIndex + 1); + argv = argv.slice(0, doubleDashesIndex); + } + let parsed = mri2(argv, mriOptions); + parsed = Object.keys(parsed).reduce((res, name) => { + return __assign(__assign({}, res), { [camelcaseOptionName(name)]: parsed[name] }); + }, { _: [] }); + const args = parsed._; + const options = { "--": argsAfterDoubleDashes }; + const ignoreDefault = command && command.config.ignoreOptionDefaultValue ? command.config.ignoreOptionDefaultValue : this.globalCommand.config.ignoreOptionDefaultValue; + let transforms = Object.create(null); + for (const cliOption of cliOptions) { + if (!ignoreDefault && cliOption.config.default !== void 0) for (const name of cliOption.names) options[name] = cliOption.config.default; + if (Array.isArray(cliOption.config.type)) { + if (transforms[cliOption.name] === void 0) { + transforms[cliOption.name] = Object.create(null); + transforms[cliOption.name]["shouldTransform"] = true; + transforms[cliOption.name]["transformFunction"] = cliOption.config.type[0]; + } + } + } + for (const key of Object.keys(parsed)) if (key !== "_") { + const keys = key.split("."); + setDotProp(options, keys, parsed[key]); + setByType(options, transforms); + } + return { + args, + options + }; + } + runMatchedCommand() { + const { args, options, matchedCommand: command } = this; + if (!command || !command.commandAction) return; + command.checkUnknownOptions(); + command.checkOptionValue(); + command.checkRequiredArgs(); + const actionArgs = []; + command.args.forEach((arg, index) => { + if (arg.variadic) actionArgs.push(args.slice(index)); + else actionArgs.push(args[index]); + }); + actionArgs.push(options); + return command.commandAction.apply(this, actionArgs); + } +}; +const cac = (name = "") => new CAC(name); + +//#endregion +//#region node_modules/.pnpm/find-up-simple@1.0.1/node_modules/find-up-simple/index.js +const toPath = (urlOrPath) => urlOrPath instanceof URL ? fileURLToPath(urlOrPath) : urlOrPath; +async function findUp(name, { cwd = process$1.cwd(), type = "file", stopAt } = {}) { + let directory = path.resolve(toPath(cwd) ?? ""); + const { root } = path.parse(directory); + stopAt = path.resolve(directory, toPath(stopAt ?? root)); + const isAbsoluteName = path.isAbsolute(name); + while (directory) { + const filePath = isAbsoluteName ? name : path.join(directory, name); + try { + const stats = await fsPromises.stat(filePath); + if (type === "file" && stats.isFile() || type === "directory" && stats.isDirectory()) return filePath; + } catch {} + if (directory === stopAt || directory === root) break; + directory = path.dirname(directory); + } +} + +//#endregion +//#region src/build.ts +async function build() { + const result = await readPackageUp(); + if (!result) { + console.error("No package.json found"); + process.exit(1); + } + console.dir(result); +} +async function readPackageUp() { + const filePath = await findUp("package.json"); + if (!filePath) return; + const data = await readFile(filePath, "utf-8"); + try { + return JSON.parse(data); + } catch {} +} + +//#endregion +//#region src/constants.ts +const { version } = JSON.parse(readFileSync(new URL("../package.json", import.meta.url)).toString()); +const VERSION = version; + +//#endregion +//#region src/cli.ts +const cli = cac("mpw"); +cli.command("build", "Build the project").action(build); +cli.help(); +cli.version(VERSION); +if (process.argv.length === 2) { + cli.outputHelp(); + process.exit(1); +} +cli.on("command:*", () => { + console.error(`Unknown command: "%s"`, cli.args.join(" ")); + console.error("See 'mpw --help' for a list of available commands."); + process.exit(1); +}); +cli.parse(); + +//#endregion \ No newline at end of file diff --git a/packages/mpx/package-lock.json b/packages/mpx/package-lock.json new file mode 100644 index 00000000..86cf0e03 --- /dev/null +++ b/packages/mpx/package-lock.json @@ -0,0 +1,15 @@ +{ + "name": "@mendix/mpx", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@mendix/mpx", + "version": "0.1.0", + "hasInstallScript": true, + "license": "Apache-2.0", + "devDependencies": {} + } + } +} diff --git a/packages/mpx/package.json b/packages/mpx/package.json new file mode 100644 index 00000000..598006c3 --- /dev/null +++ b/packages/mpx/package.json @@ -0,0 +1,29 @@ +{ + "name": "@mendix/mpx", + "version": "0.1.0", + "description": "Mendix tool for bundling pluggable widgets", + "type": "module", + "scripts": { + "preinstall": "npx only-allow pnpm", + "test": "echo 'test is missing'" + }, + "keywords": [ + "mendix", + "cli", + "widgets", + "tool" + ], + "author": "", + "license": "Apache-2.0", + "packageManager": "pnpm@10.13.1+sha512.37ebf1a5c7a30d5fabe0c5df44ee8da4c965ca0c5af3dbab28c3a1681b70a256218d05c81c9c0dcf767ef6b8551eb5b960042b9ed4300c59242336377e01cfad", + "dependencies": { + "rolldown": "1.0.0-beta.26" + }, + "devDependencies": { + "@tsconfig/node22": "^22.0.2", + "@types/node": "^24.0.13", + "cac": "^6.7.14", + "find-up-simple": "^1.0.1", + "prettier": "^3.6.2" + } +} diff --git a/packages/mpx/pnpm-lock.yaml b/packages/mpx/pnpm-lock.yaml new file mode 100644 index 00000000..0c446de4 --- /dev/null +++ b/packages/mpx/pnpm-lock.yaml @@ -0,0 +1,262 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + dependencies: + rolldown: + specifier: 1.0.0-beta.26 + version: 1.0.0-beta.26 + devDependencies: + '@tsconfig/node22': + specifier: ^22.0.2 + version: 22.0.2 + '@types/node': + specifier: ^24.0.13 + version: 24.0.13 + cac: + specifier: ^6.7.14 + version: 6.7.14 + find-up-simple: + specifier: ^1.0.1 + version: 1.0.1 + prettier: + specifier: ^3.6.2 + version: 3.6.2 + +packages: + + '@emnapi/core@1.4.4': + resolution: {integrity: sha512-A9CnAbC6ARNMKcIcrQwq6HeHCjpcBZ5wSx4U01WXCqEKlrzB9F9315WDNHkrs2xbx7YjjSxbUYxuN6EQzpcY2g==} + + '@emnapi/runtime@1.4.4': + resolution: {integrity: sha512-hHyapA4A3gPaDCNfiqyZUStTMqIkKRshqPIuDOXv1hcBnD4U3l8cP0T1HMCfGRxQ6V64TGCcoswChANyOAwbQg==} + + '@emnapi/wasi-threads@1.0.3': + resolution: {integrity: sha512-8K5IFFsQqF9wQNJptGbS6FNKgUTsSRYnTqNCG1vPP8jFdjSv18n2mQfJpkt2Oibo9iBEzcDnDxNwKTzC7svlJw==} + + '@napi-rs/wasm-runtime@0.2.12': + resolution: {integrity: sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==} + + '@oxc-project/runtime@0.76.0': + resolution: {integrity: sha512-17iezP/BukiovZZR7lp6fZZjNTOmodCWQKkI7sn2sOB1TiccRWzO2bpxnE94jhg8l+nBRMrwnM/cjFCr23winw==} + engines: {node: '>=6.9.0'} + + '@oxc-project/types@0.76.0': + resolution: {integrity: sha512-CH3THIrSViKal8yV/Wh3FK0pFhp40nzW1MUDCik9fNuid2D/7JJXKJnfFOAvMxInGXDlvmgT6ACAzrl47TqzkQ==} + + '@rolldown/binding-darwin-arm64@1.0.0-beta.26': + resolution: {integrity: sha512-I73Ej+PVoCJiYQHpy45CHKkLgFqrYv9O1CUJs6TIav6f8f9WAVeN/k0YXrs0tgMO20AfsyEN8zenz2wprVWOYQ==} + cpu: [arm64] + os: [darwin] + + '@rolldown/binding-darwin-x64@1.0.0-beta.26': + resolution: {integrity: sha512-IcXzfO2/9bnm6WfCNmGxBiD1kQQdA0pTjjGcjvglUub8H6RlEY0tz+IIQxUirsl/++84S0PkCuafAxZi8Am8fg==} + cpu: [x64] + os: [darwin] + + '@rolldown/binding-freebsd-x64@1.0.0-beta.26': + resolution: {integrity: sha512-foLJNqEFdvwFm2MXDFxgywxJMic+wovbpEyszlz5K/sUbN7sP2+NJ7MZAUMHuggiswB4Rt1HqRLYKy26zJev8g==} + cpu: [x64] + os: [freebsd] + + '@rolldown/binding-linux-arm-gnueabihf@1.0.0-beta.26': + resolution: {integrity: sha512-1BWDpLtujfZCvWAcfIamqHGWo2+VnPWvpZQR0DL5qNit6cu3FC0sRZ+bZzTUK0QWDTA7nUy5RR9fUTL2PQxH2g==} + cpu: [arm] + os: [linux] + + '@rolldown/binding-linux-arm64-gnu@1.0.0-beta.26': + resolution: {integrity: sha512-lg6DVwciFb7sIw0ONDHeLhRuFQl/wz+J26bxfVOVzVoQ7Zgl07gDklv7q96W7SRDAjlG/20flBOexdiPim/I3g==} + cpu: [arm64] + os: [linux] + + '@rolldown/binding-linux-arm64-musl@1.0.0-beta.26': + resolution: {integrity: sha512-0X14trOBVtU13Y0XYeb8EvOvb3/TxJVOmalDakEID/UUX9qkvOmlU0fvoDVmsnhH6yx23bDlpmOj0f8V3BCgIw==} + cpu: [arm64] + os: [linux] + + '@rolldown/binding-linux-x64-gnu@1.0.0-beta.26': + resolution: {integrity: sha512-stb8XloM+N3hSKUs6kS5tNqrlTGsCoYuh9emFZtTovfFzzdFYevgXoOdeGoXv9KkPh5B7MOMl4/7c+WaX46Opg==} + cpu: [x64] + os: [linux] + + '@rolldown/binding-linux-x64-musl@1.0.0-beta.26': + resolution: {integrity: sha512-5udEpAS5IUy2t74d/m40JUYyk3Ga8QXQDvK7eGqDDOwz8/7Piq0kCwmNuLnpSRiqbXNP8mnVlvtIcASJUEtRPA==} + cpu: [x64] + os: [linux] + + '@rolldown/binding-wasm32-wasi@1.0.0-beta.26': + resolution: {integrity: sha512-Is5tTdScXXQzslj7+jCFncPoRNARJ/+fYt/C9+Yx0QQ67/m8pGPLFoCzIKmJQZ8QHzOfq5ML4CQlMgBbCFlZqQ==} + engines: {node: '>=14.21.3'} + cpu: [wasm32] + + '@rolldown/binding-win32-arm64-msvc@1.0.0-beta.26': + resolution: {integrity: sha512-bH+TB+/8Z/95cxGws0fH995HsbsopVYdGcuM1Z/Hnqe7KPLkhqkubsambHQYd1V/QNbLzAgJ0nMAFLyBrwFZZQ==} + cpu: [arm64] + os: [win32] + + '@rolldown/binding-win32-ia32-msvc@1.0.0-beta.26': + resolution: {integrity: sha512-Nsg7ZzfwLHwKGneuNHEpqdBekmZA5pzVOuFx5R8EVyva8dg+sgtDHQRmiVSVYe25YYISNFXDSuHKwNhrWI4HWA==} + cpu: [ia32] + os: [win32] + + '@rolldown/binding-win32-x64-msvc@1.0.0-beta.26': + resolution: {integrity: sha512-NE5Btf10Fu3IbpHxrlRkgcO/d05iEpbIiP/XdMYW7Lc9BGSgE4f8njUHnM0V2XJKyXkC1fqv/uHSEw2dCNgzxQ==} + cpu: [x64] + os: [win32] + + '@rolldown/pluginutils@1.0.0-beta.26': + resolution: {integrity: sha512-r/5po89voz/QRPDmoErL10+hVuTAuz1SHvokx+yWBlOIPB5C41jC7QhLqq9kaebx/+EHyoV3z22/qBfX81Ns8A==} + + '@tsconfig/node22@22.0.2': + resolution: {integrity: sha512-Kmwj4u8sDRDrMYRoN9FDEcXD8UpBSaPQQ24Gz+Gamqfm7xxn+GBR7ge/Z7pK8OXNGyUzbSwJj+TH6B+DS/epyA==} + + '@tybys/wasm-util@0.10.0': + resolution: {integrity: sha512-VyyPYFlOMNylG45GoAe0xDoLwWuowvf92F9kySqzYh8vmYm7D2u4iUJKa1tOUpS70Ku13ASrOkS4ScXFsTaCNQ==} + + '@types/node@24.0.13': + resolution: {integrity: sha512-Qm9OYVOFHFYg3wJoTSrz80hoec5Lia/dPp84do3X7dZvLikQvM1YpmvTBEdIr/e+U8HTkFjLHLnl78K/qjf+jQ==} + + ansis@4.1.0: + resolution: {integrity: sha512-BGcItUBWSMRgOCe+SVZJ+S7yTRG0eGt9cXAHev72yuGcY23hnLA7Bky5L/xLyPINoSN95geovfBkqoTlNZYa7w==} + engines: {node: '>=14'} + + cac@6.7.14: + resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} + engines: {node: '>=8'} + + find-up-simple@1.0.1: + resolution: {integrity: sha512-afd4O7zpqHeRyg4PfDQsXmlDe2PfdHtJt6Akt8jOWaApLOZk5JXs6VMR29lz03pRe9mpykrRCYIYxaJYcfpncQ==} + engines: {node: '>=18'} + + prettier@3.6.2: + resolution: {integrity: sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==} + engines: {node: '>=14'} + hasBin: true + + rolldown@1.0.0-beta.26: + resolution: {integrity: sha512-2rad1JDFst/GD1J86RuqN1SIP8O8Xv4UbqNyKaVayXTjgF0D6HpvTnUZ1RQ6tANpZweGmq4v6Ay0uyRNEycFPw==} + hasBin: true + + tslib@2.8.1: + resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} + + undici-types@7.8.0: + resolution: {integrity: sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw==} + +snapshots: + + '@emnapi/core@1.4.4': + dependencies: + '@emnapi/wasi-threads': 1.0.3 + tslib: 2.8.1 + optional: true + + '@emnapi/runtime@1.4.4': + dependencies: + tslib: 2.8.1 + optional: true + + '@emnapi/wasi-threads@1.0.3': + dependencies: + tslib: 2.8.1 + optional: true + + '@napi-rs/wasm-runtime@0.2.12': + dependencies: + '@emnapi/core': 1.4.4 + '@emnapi/runtime': 1.4.4 + '@tybys/wasm-util': 0.10.0 + optional: true + + '@oxc-project/runtime@0.76.0': {} + + '@oxc-project/types@0.76.0': {} + + '@rolldown/binding-darwin-arm64@1.0.0-beta.26': + optional: true + + '@rolldown/binding-darwin-x64@1.0.0-beta.26': + optional: true + + '@rolldown/binding-freebsd-x64@1.0.0-beta.26': + optional: true + + '@rolldown/binding-linux-arm-gnueabihf@1.0.0-beta.26': + optional: true + + '@rolldown/binding-linux-arm64-gnu@1.0.0-beta.26': + optional: true + + '@rolldown/binding-linux-arm64-musl@1.0.0-beta.26': + optional: true + + '@rolldown/binding-linux-x64-gnu@1.0.0-beta.26': + optional: true + + '@rolldown/binding-linux-x64-musl@1.0.0-beta.26': + optional: true + + '@rolldown/binding-wasm32-wasi@1.0.0-beta.26': + dependencies: + '@napi-rs/wasm-runtime': 0.2.12 + optional: true + + '@rolldown/binding-win32-arm64-msvc@1.0.0-beta.26': + optional: true + + '@rolldown/binding-win32-ia32-msvc@1.0.0-beta.26': + optional: true + + '@rolldown/binding-win32-x64-msvc@1.0.0-beta.26': + optional: true + + '@rolldown/pluginutils@1.0.0-beta.26': {} + + '@tsconfig/node22@22.0.2': {} + + '@tybys/wasm-util@0.10.0': + dependencies: + tslib: 2.8.1 + optional: true + + '@types/node@24.0.13': + dependencies: + undici-types: 7.8.0 + + ansis@4.1.0: {} + + cac@6.7.14: {} + + find-up-simple@1.0.1: {} + + prettier@3.6.2: {} + + rolldown@1.0.0-beta.26: + dependencies: + '@oxc-project/runtime': 0.76.0 + '@oxc-project/types': 0.76.0 + '@rolldown/pluginutils': 1.0.0-beta.26 + ansis: 4.1.0 + optionalDependencies: + '@rolldown/binding-darwin-arm64': 1.0.0-beta.26 + '@rolldown/binding-darwin-x64': 1.0.0-beta.26 + '@rolldown/binding-freebsd-x64': 1.0.0-beta.26 + '@rolldown/binding-linux-arm-gnueabihf': 1.0.0-beta.26 + '@rolldown/binding-linux-arm64-gnu': 1.0.0-beta.26 + '@rolldown/binding-linux-arm64-musl': 1.0.0-beta.26 + '@rolldown/binding-linux-x64-gnu': 1.0.0-beta.26 + '@rolldown/binding-linux-x64-musl': 1.0.0-beta.26 + '@rolldown/binding-wasm32-wasi': 1.0.0-beta.26 + '@rolldown/binding-win32-arm64-msvc': 1.0.0-beta.26 + '@rolldown/binding-win32-ia32-msvc': 1.0.0-beta.26 + '@rolldown/binding-win32-x64-msvc': 1.0.0-beta.26 + + tslib@2.8.1: + optional: true + + undici-types@7.8.0: {} diff --git a/packages/mpx/rolldown.config.ts b/packages/mpx/rolldown.config.ts new file mode 100644 index 00000000..e193f5a3 --- /dev/null +++ b/packages/mpx/rolldown.config.ts @@ -0,0 +1,13 @@ +import type { RolldownOptions } from "rolldown"; + +const config: RolldownOptions = { + input: "./src/cli.ts", + external: ["rolldown"], + output: { + file: "./bin/mpw.js", + inlineDynamicImports: true + }, + platform: "node" +}; + +export default config; diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts new file mode 100644 index 00000000..065d3b5e --- /dev/null +++ b/packages/mpx/src/build.ts @@ -0,0 +1,23 @@ +import { findUp } from "find-up-simple" +import { readFile } from "node:fs/promises" + +export async function build() { + const result = await readPackageUp() + if (!result) { + console.error("No package.json found") + process.exit(1) + } + console.dir(result) +} + +export async function readPackageUp(): Promise<{} | undefined> { + const filePath = await findUp("package.json"); + if (!filePath) { + return; + } + const data = await readFile(filePath, "utf-8"); + + try { + return JSON.parse(data); + } catch {} +} \ No newline at end of file diff --git a/packages/mpx/src/cli.ts b/packages/mpx/src/cli.ts new file mode 100644 index 00000000..7461c5d0 --- /dev/null +++ b/packages/mpx/src/cli.ts @@ -0,0 +1,25 @@ +#!/usr/bin/env node + +import { cac } from "cac" +import { build } from "./build.js" +import { VERSION } from "./constants.js" + +const cli = cac("mpw") + +cli.command("build", "Build the project").action(build) + +cli.help() +cli.version(VERSION) + +if (process.argv.length === 2) { + cli.outputHelp() + process.exit(1) +} + +cli.on("command:*", () => { + console.error(`Unknown command: "%s"`, cli.args.join(" ")) + console.error("See 'mpw --help' for a list of available commands.") + process.exit(1) +}) + +cli.parse() diff --git a/packages/mpx/src/constants.ts b/packages/mpx/src/constants.ts new file mode 100644 index 00000000..530ac15f --- /dev/null +++ b/packages/mpx/src/constants.ts @@ -0,0 +1,7 @@ +import { readFileSync } from "node:fs" + +const { version } = JSON.parse( + readFileSync(new URL("../package.json", import.meta.url)).toString() +) + +export const VERSION = version as string diff --git a/packages/mpx/tsconfig.json b/packages/mpx/tsconfig.json new file mode 100644 index 00000000..ecc5cfc6 --- /dev/null +++ b/packages/mpx/tsconfig.json @@ -0,0 +1,6 @@ +{ + "extends": "@tsconfig/node22/tsconfig.json", + "compilerOptions": { + "noEmit": true + } +} From 60054377e6ce66dbba6c7181dc93daaeb7f165c9 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Fri, 11 Jul 2025 18:41:18 +0200 Subject: [PATCH 02/45] feat: add arktype --- packages/mpx/bin/mpw.js | 551 -- packages/mpx/bin/mpx.js | 7896 ++++++++++++++++++++++ packages/mpx/package.json | 5 +- packages/mpx/pnpm-lock.yaml | 33 + packages/mpx/rolldown.config.ts | 2 +- packages/mpx/src/build.ts | 27 +- packages/mpx/src/cli.ts | 28 +- packages/mpx/src/lib/core/PackageJson.ts | 9 + packages/mpx/test/package.json | 5 + 9 files changed, 7981 insertions(+), 575 deletions(-) delete mode 100644 packages/mpx/bin/mpw.js create mode 100644 packages/mpx/bin/mpx.js create mode 100644 packages/mpx/src/lib/core/PackageJson.ts create mode 100644 packages/mpx/test/package.json diff --git a/packages/mpx/bin/mpw.js b/packages/mpx/bin/mpw.js deleted file mode 100644 index f7a2585d..00000000 --- a/packages/mpx/bin/mpw.js +++ /dev/null @@ -1,551 +0,0 @@ -#!/usr/bin/env node -import { EventEmitter } from "events"; -import process$1 from "node:process"; -import fsPromises, { readFile } from "node:fs/promises"; -import { fileURLToPath } from "node:url"; -import { readFileSync } from "node:fs"; -import path from "node:path"; - -//#region node_modules/.pnpm/cac@6.7.14/node_modules/cac/dist/index.mjs -function toArr(any) { - return any == null ? [] : Array.isArray(any) ? any : [any]; -} -function toVal(out, key, val, opts) { - var x, old = out[key], nxt = !!~opts.string.indexOf(key) ? val == null || val === true ? "" : String(val) : typeof val === "boolean" ? val : !!~opts.boolean.indexOf(key) ? val === "false" ? false : val === "true" || (out._.push((x = +val, x * 0 === 0) ? x : val), !!val) : (x = +val, x * 0 === 0) ? x : val; - out[key] = old == null ? nxt : Array.isArray(old) ? old.concat(nxt) : [old, nxt]; -} -function mri2(args, opts) { - args = args || []; - opts = opts || {}; - var k, arr, arg, name, val, out = { _: [] }; - var i = 0, j = 0, idx = 0, len = args.length; - const alibi = opts.alias !== void 0; - const strict = opts.unknown !== void 0; - const defaults = opts.default !== void 0; - opts.alias = opts.alias || {}; - opts.string = toArr(opts.string); - opts.boolean = toArr(opts.boolean); - if (alibi) for (k in opts.alias) { - arr = opts.alias[k] = toArr(opts.alias[k]); - for (i = 0; i < arr.length; i++) (opts.alias[arr[i]] = arr.concat(k)).splice(i, 1); - } - for (i = opts.boolean.length; i-- > 0;) { - arr = opts.alias[opts.boolean[i]] || []; - for (j = arr.length; j-- > 0;) opts.boolean.push(arr[j]); - } - for (i = opts.string.length; i-- > 0;) { - arr = opts.alias[opts.string[i]] || []; - for (j = arr.length; j-- > 0;) opts.string.push(arr[j]); - } - if (defaults) for (k in opts.default) { - name = typeof opts.default[k]; - arr = opts.alias[k] = opts.alias[k] || []; - if (opts[name] !== void 0) { - opts[name].push(k); - for (i = 0; i < arr.length; i++) opts[name].push(arr[i]); - } - } - const keys = strict ? Object.keys(opts.alias) : []; - for (i = 0; i < len; i++) { - arg = args[i]; - if (arg === "--") { - out._ = out._.concat(args.slice(++i)); - break; - } - for (j = 0; j < arg.length; j++) if (arg.charCodeAt(j) !== 45) break; - if (j === 0) out._.push(arg); - else if (arg.substring(j, j + 3) === "no-") { - name = arg.substring(j + 3); - if (strict && !~keys.indexOf(name)) return opts.unknown(arg); - out[name] = false; - } else { - for (idx = j + 1; idx < arg.length; idx++) if (arg.charCodeAt(idx) === 61) break; - name = arg.substring(j, idx); - val = arg.substring(++idx) || i + 1 === len || ("" + args[i + 1]).charCodeAt(0) === 45 || args[++i]; - arr = j === 2 ? [name] : name; - for (idx = 0; idx < arr.length; idx++) { - name = arr[idx]; - if (strict && !~keys.indexOf(name)) return opts.unknown("-".repeat(j) + name); - toVal(out, name, idx + 1 < arr.length || val, opts); - } - } - } - if (defaults) { - for (k in opts.default) if (out[k] === void 0) out[k] = opts.default[k]; - } - if (alibi) for (k in out) { - arr = opts.alias[k] || []; - while (arr.length > 0) out[arr.shift()] = out[k]; - } - return out; -} -const removeBrackets = (v) => v.replace(/[<[].+/, "").trim(); -const findAllBrackets = (v) => { - const ANGLED_BRACKET_RE_GLOBAL = /<([^>]+)>/g; - const SQUARE_BRACKET_RE_GLOBAL = /\[([^\]]+)\]/g; - const res = []; - const parse = (match) => { - let variadic = false; - let value = match[1]; - if (value.startsWith("...")) { - value = value.slice(3); - variadic = true; - } - return { - required: match[0].startsWith("<"), - value, - variadic - }; - }; - let angledMatch; - while (angledMatch = ANGLED_BRACKET_RE_GLOBAL.exec(v)) res.push(parse(angledMatch)); - let squareMatch; - while (squareMatch = SQUARE_BRACKET_RE_GLOBAL.exec(v)) res.push(parse(squareMatch)); - return res; -}; -const getMriOptions = (options) => { - const result = { - alias: {}, - boolean: [] - }; - for (const [index, option] of options.entries()) { - if (option.names.length > 1) result.alias[option.names[0]] = option.names.slice(1); - if (option.isBoolean) if (option.negated) { - const hasStringTypeOption = options.some((o, i) => { - return i !== index && o.names.some((name) => option.names.includes(name)) && typeof o.required === "boolean"; - }); - if (!hasStringTypeOption) result.boolean.push(option.names[0]); - } else result.boolean.push(option.names[0]); - } - return result; -}; -const findLongest = (arr) => { - return arr.sort((a, b) => { - return a.length > b.length ? -1 : 1; - })[0]; -}; -const padRight = (str, length) => { - return str.length >= length ? str : `${str}${" ".repeat(length - str.length)}`; -}; -const camelcase = (input) => { - return input.replace(/([a-z])-([a-z])/g, (_, p1, p2) => { - return p1 + p2.toUpperCase(); - }); -}; -const setDotProp = (obj, keys, val) => { - let i = 0; - let length = keys.length; - let t = obj; - let x; - for (; i < length; ++i) { - x = t[keys[i]]; - t = t[keys[i]] = i === length - 1 ? val : x != null ? x : !!~keys[i + 1].indexOf(".") || !(+keys[i + 1] > -1) ? {} : []; - } -}; -const setByType = (obj, transforms) => { - for (const key of Object.keys(transforms)) { - const transform = transforms[key]; - if (transform.shouldTransform) { - obj[key] = Array.prototype.concat.call([], obj[key]); - if (typeof transform.transformFunction === "function") obj[key] = obj[key].map(transform.transformFunction); - } - } -}; -const getFileName = (input) => { - const m = /([^\\\/]+)$/.exec(input); - return m ? m[1] : ""; -}; -const camelcaseOptionName = (name) => { - return name.split(".").map((v, i) => { - return i === 0 ? camelcase(v) : v; - }).join("."); -}; -var CACError = class extends Error { - constructor(message) { - super(message); - this.name = this.constructor.name; - if (typeof Error.captureStackTrace === "function") Error.captureStackTrace(this, this.constructor); - else this.stack = new Error(message).stack; - } -}; -var Option = class { - constructor(rawName, description, config) { - this.rawName = rawName; - this.description = description; - this.config = Object.assign({}, config); - rawName = rawName.replace(/\.\*/g, ""); - this.negated = false; - this.names = removeBrackets(rawName).split(",").map((v) => { - let name = v.trim().replace(/^-{1,2}/, ""); - if (name.startsWith("no-")) { - this.negated = true; - name = name.replace(/^no-/, ""); - } - return camelcaseOptionName(name); - }).sort((a, b) => a.length > b.length ? 1 : -1); - this.name = this.names[this.names.length - 1]; - if (this.negated && this.config.default == null) this.config.default = true; - if (rawName.includes("<")) this.required = true; - else if (rawName.includes("[")) this.required = false; - else this.isBoolean = true; - } -}; -const processArgs = process.argv; -const platformInfo = `${process.platform}-${process.arch} node-${process.version}`; -var Command = class { - constructor(rawName, description, config = {}, cli$1) { - this.rawName = rawName; - this.description = description; - this.config = config; - this.cli = cli$1; - this.options = []; - this.aliasNames = []; - this.name = removeBrackets(rawName); - this.args = findAllBrackets(rawName); - this.examples = []; - } - usage(text) { - this.usageText = text; - return this; - } - allowUnknownOptions() { - this.config.allowUnknownOptions = true; - return this; - } - ignoreOptionDefaultValue() { - this.config.ignoreOptionDefaultValue = true; - return this; - } - version(version$1, customFlags = "-v, --version") { - this.versionNumber = version$1; - this.option(customFlags, "Display version number"); - return this; - } - example(example) { - this.examples.push(example); - return this; - } - option(rawName, description, config) { - const option = new Option(rawName, description, config); - this.options.push(option); - return this; - } - alias(name) { - this.aliasNames.push(name); - return this; - } - action(callback) { - this.commandAction = callback; - return this; - } - isMatched(name) { - return this.name === name || this.aliasNames.includes(name); - } - get isDefaultCommand() { - return this.name === "" || this.aliasNames.includes("!"); - } - get isGlobalCommand() { - return this instanceof GlobalCommand; - } - hasOption(name) { - name = name.split(".")[0]; - return this.options.find((option) => { - return option.names.includes(name); - }); - } - outputHelp() { - const { name, commands } = this.cli; - const { versionNumber, options: globalOptions, helpCallback } = this.cli.globalCommand; - let sections = [{ body: `${name}${versionNumber ? `/${versionNumber}` : ""}` }]; - sections.push({ - title: "Usage", - body: ` $ ${name} ${this.usageText || this.rawName}` - }); - const showCommands = (this.isGlobalCommand || this.isDefaultCommand) && commands.length > 0; - if (showCommands) { - const longestCommandName = findLongest(commands.map((command) => command.rawName)); - sections.push({ - title: "Commands", - body: commands.map((command) => { - return ` ${padRight(command.rawName, longestCommandName.length)} ${command.description}`; - }).join("\n") - }); - sections.push({ - title: `For more info, run any command with the \`--help\` flag`, - body: commands.map((command) => ` $ ${name}${command.name === "" ? "" : ` ${command.name}`} --help`).join("\n") - }); - } - let options = this.isGlobalCommand ? globalOptions : [...this.options, ...globalOptions || []]; - if (!this.isGlobalCommand && !this.isDefaultCommand) options = options.filter((option) => option.name !== "version"); - if (options.length > 0) { - const longestOptionName = findLongest(options.map((option) => option.rawName)); - sections.push({ - title: "Options", - body: options.map((option) => { - return ` ${padRight(option.rawName, longestOptionName.length)} ${option.description} ${option.config.default === void 0 ? "" : `(default: ${option.config.default})`}`; - }).join("\n") - }); - } - if (this.examples.length > 0) sections.push({ - title: "Examples", - body: this.examples.map((example) => { - if (typeof example === "function") return example(name); - return example; - }).join("\n") - }); - if (helpCallback) sections = helpCallback(sections) || sections; - console.log(sections.map((section) => { - return section.title ? `${section.title}: -${section.body}` : section.body; - }).join("\n\n")); - } - outputVersion() { - const { name } = this.cli; - const { versionNumber } = this.cli.globalCommand; - if (versionNumber) console.log(`${name}/${versionNumber} ${platformInfo}`); - } - checkRequiredArgs() { - const minimalArgsCount = this.args.filter((arg) => arg.required).length; - if (this.cli.args.length < minimalArgsCount) throw new CACError(`missing required args for command \`${this.rawName}\``); - } - checkUnknownOptions() { - const { options, globalCommand } = this.cli; - if (!this.config.allowUnknownOptions) { - for (const name of Object.keys(options)) if (name !== "--" && !this.hasOption(name) && !globalCommand.hasOption(name)) throw new CACError(`Unknown option \`${name.length > 1 ? `--${name}` : `-${name}`}\``); - } - } - checkOptionValue() { - const { options: parsedOptions, globalCommand } = this.cli; - const options = [...globalCommand.options, ...this.options]; - for (const option of options) { - const value = parsedOptions[option.name.split(".")[0]]; - if (option.required) { - const hasNegated = options.some((o) => o.negated && o.names.includes(option.name)); - if (value === true || value === false && !hasNegated) throw new CACError(`option \`${option.rawName}\` value is missing`); - } - } - } -}; -var GlobalCommand = class extends Command { - constructor(cli$1) { - super("@@global@@", "", {}, cli$1); - } -}; -var __assign = Object.assign; -var CAC = class extends EventEmitter { - constructor(name = "") { - super(); - this.name = name; - this.commands = []; - this.rawArgs = []; - this.args = []; - this.options = {}; - this.globalCommand = new GlobalCommand(this); - this.globalCommand.usage(" [options]"); - } - usage(text) { - this.globalCommand.usage(text); - return this; - } - command(rawName, description, config) { - const command = new Command(rawName, description || "", config, this); - command.globalCommand = this.globalCommand; - this.commands.push(command); - return command; - } - option(rawName, description, config) { - this.globalCommand.option(rawName, description, config); - return this; - } - help(callback) { - this.globalCommand.option("-h, --help", "Display this message"); - this.globalCommand.helpCallback = callback; - this.showHelpOnExit = true; - return this; - } - version(version$1, customFlags = "-v, --version") { - this.globalCommand.version(version$1, customFlags); - this.showVersionOnExit = true; - return this; - } - example(example) { - this.globalCommand.example(example); - return this; - } - outputHelp() { - if (this.matchedCommand) this.matchedCommand.outputHelp(); - else this.globalCommand.outputHelp(); - } - outputVersion() { - this.globalCommand.outputVersion(); - } - setParsedInfo({ args, options }, matchedCommand, matchedCommandName) { - this.args = args; - this.options = options; - if (matchedCommand) this.matchedCommand = matchedCommand; - if (matchedCommandName) this.matchedCommandName = matchedCommandName; - return this; - } - unsetMatchedCommand() { - this.matchedCommand = void 0; - this.matchedCommandName = void 0; - } - parse(argv = processArgs, { run = true } = {}) { - this.rawArgs = argv; - if (!this.name) this.name = argv[1] ? getFileName(argv[1]) : "cli"; - let shouldParse = true; - for (const command of this.commands) { - const parsed = this.mri(argv.slice(2), command); - const commandName = parsed.args[0]; - if (command.isMatched(commandName)) { - shouldParse = false; - const parsedInfo = __assign(__assign({}, parsed), { args: parsed.args.slice(1) }); - this.setParsedInfo(parsedInfo, command, commandName); - this.emit(`command:${commandName}`, command); - } - } - if (shouldParse) { - for (const command of this.commands) if (command.name === "") { - shouldParse = false; - const parsed = this.mri(argv.slice(2), command); - this.setParsedInfo(parsed, command); - this.emit(`command:!`, command); - } - } - if (shouldParse) { - const parsed = this.mri(argv.slice(2)); - this.setParsedInfo(parsed); - } - if (this.options.help && this.showHelpOnExit) { - this.outputHelp(); - run = false; - this.unsetMatchedCommand(); - } - if (this.options.version && this.showVersionOnExit && this.matchedCommandName == null) { - this.outputVersion(); - run = false; - this.unsetMatchedCommand(); - } - const parsedArgv = { - args: this.args, - options: this.options - }; - if (run) this.runMatchedCommand(); - if (!this.matchedCommand && this.args[0]) this.emit("command:*"); - return parsedArgv; - } - mri(argv, command) { - const cliOptions = [...this.globalCommand.options, ...command ? command.options : []]; - const mriOptions = getMriOptions(cliOptions); - let argsAfterDoubleDashes = []; - const doubleDashesIndex = argv.indexOf("--"); - if (doubleDashesIndex > -1) { - argsAfterDoubleDashes = argv.slice(doubleDashesIndex + 1); - argv = argv.slice(0, doubleDashesIndex); - } - let parsed = mri2(argv, mriOptions); - parsed = Object.keys(parsed).reduce((res, name) => { - return __assign(__assign({}, res), { [camelcaseOptionName(name)]: parsed[name] }); - }, { _: [] }); - const args = parsed._; - const options = { "--": argsAfterDoubleDashes }; - const ignoreDefault = command && command.config.ignoreOptionDefaultValue ? command.config.ignoreOptionDefaultValue : this.globalCommand.config.ignoreOptionDefaultValue; - let transforms = Object.create(null); - for (const cliOption of cliOptions) { - if (!ignoreDefault && cliOption.config.default !== void 0) for (const name of cliOption.names) options[name] = cliOption.config.default; - if (Array.isArray(cliOption.config.type)) { - if (transforms[cliOption.name] === void 0) { - transforms[cliOption.name] = Object.create(null); - transforms[cliOption.name]["shouldTransform"] = true; - transforms[cliOption.name]["transformFunction"] = cliOption.config.type[0]; - } - } - } - for (const key of Object.keys(parsed)) if (key !== "_") { - const keys = key.split("."); - setDotProp(options, keys, parsed[key]); - setByType(options, transforms); - } - return { - args, - options - }; - } - runMatchedCommand() { - const { args, options, matchedCommand: command } = this; - if (!command || !command.commandAction) return; - command.checkUnknownOptions(); - command.checkOptionValue(); - command.checkRequiredArgs(); - const actionArgs = []; - command.args.forEach((arg, index) => { - if (arg.variadic) actionArgs.push(args.slice(index)); - else actionArgs.push(args[index]); - }); - actionArgs.push(options); - return command.commandAction.apply(this, actionArgs); - } -}; -const cac = (name = "") => new CAC(name); - -//#endregion -//#region node_modules/.pnpm/find-up-simple@1.0.1/node_modules/find-up-simple/index.js -const toPath = (urlOrPath) => urlOrPath instanceof URL ? fileURLToPath(urlOrPath) : urlOrPath; -async function findUp(name, { cwd = process$1.cwd(), type = "file", stopAt } = {}) { - let directory = path.resolve(toPath(cwd) ?? ""); - const { root } = path.parse(directory); - stopAt = path.resolve(directory, toPath(stopAt ?? root)); - const isAbsoluteName = path.isAbsolute(name); - while (directory) { - const filePath = isAbsoluteName ? name : path.join(directory, name); - try { - const stats = await fsPromises.stat(filePath); - if (type === "file" && stats.isFile() || type === "directory" && stats.isDirectory()) return filePath; - } catch {} - if (directory === stopAt || directory === root) break; - directory = path.dirname(directory); - } -} - -//#endregion -//#region src/build.ts -async function build() { - const result = await readPackageUp(); - if (!result) { - console.error("No package.json found"); - process.exit(1); - } - console.dir(result); -} -async function readPackageUp() { - const filePath = await findUp("package.json"); - if (!filePath) return; - const data = await readFile(filePath, "utf-8"); - try { - return JSON.parse(data); - } catch {} -} - -//#endregion -//#region src/constants.ts -const { version } = JSON.parse(readFileSync(new URL("../package.json", import.meta.url)).toString()); -const VERSION = version; - -//#endregion -//#region src/cli.ts -const cli = cac("mpw"); -cli.command("build", "Build the project").action(build); -cli.help(); -cli.version(VERSION); -if (process.argv.length === 2) { - cli.outputHelp(); - process.exit(1); -} -cli.on("command:*", () => { - console.error(`Unknown command: "%s"`, cli.args.join(" ")); - console.error("See 'mpw --help' for a list of available commands."); - process.exit(1); -}); -cli.parse(); - -//#endregion \ No newline at end of file diff --git a/packages/mpx/bin/mpx.js b/packages/mpx/bin/mpx.js new file mode 100644 index 00000000..e1aead94 --- /dev/null +++ b/packages/mpx/bin/mpx.js @@ -0,0 +1,7896 @@ +#!/usr/bin/env node +import { EventEmitter } from "events"; +import process$1 from "node:process"; +import fsPromises, { readFile } from "node:fs/promises"; +import { fileURLToPath } from "node:url"; +import { readFileSync } from "node:fs"; +import path from "node:path"; + +//#region node_modules/.pnpm/cac@6.7.14/node_modules/cac/dist/index.mjs +function toArr(any) { + return any == null ? [] : Array.isArray(any) ? any : [any]; +} +function toVal(out, key, val, opts) { + var x, old = out[key], nxt = !!~opts.string.indexOf(key) ? val == null || val === true ? "" : String(val) : typeof val === "boolean" ? val : !!~opts.boolean.indexOf(key) ? val === "false" ? false : val === "true" || (out._.push((x = +val, x * 0 === 0) ? x : val), !!val) : (x = +val, x * 0 === 0) ? x : val; + out[key] = old == null ? nxt : Array.isArray(old) ? old.concat(nxt) : [old, nxt]; +} +function mri2(args$1, opts) { + args$1 = args$1 || []; + opts = opts || {}; + var k, arr, arg, name, val, out = { _: [] }; + var i = 0, j = 0, idx = 0, len = args$1.length; + const alibi = opts.alias !== void 0; + const strict = opts.unknown !== void 0; + const defaults = opts.default !== void 0; + opts.alias = opts.alias || {}; + opts.string = toArr(opts.string); + opts.boolean = toArr(opts.boolean); + if (alibi) for (k in opts.alias) { + arr = opts.alias[k] = toArr(opts.alias[k]); + for (i = 0; i < arr.length; i++) (opts.alias[arr[i]] = arr.concat(k)).splice(i, 1); + } + for (i = opts.boolean.length; i-- > 0;) { + arr = opts.alias[opts.boolean[i]] || []; + for (j = arr.length; j-- > 0;) opts.boolean.push(arr[j]); + } + for (i = opts.string.length; i-- > 0;) { + arr = opts.alias[opts.string[i]] || []; + for (j = arr.length; j-- > 0;) opts.string.push(arr[j]); + } + if (defaults) for (k in opts.default) { + name = typeof opts.default[k]; + arr = opts.alias[k] = opts.alias[k] || []; + if (opts[name] !== void 0) { + opts[name].push(k); + for (i = 0; i < arr.length; i++) opts[name].push(arr[i]); + } + } + const keys = strict ? Object.keys(opts.alias) : []; + for (i = 0; i < len; i++) { + arg = args$1[i]; + if (arg === "--") { + out._ = out._.concat(args$1.slice(++i)); + break; + } + for (j = 0; j < arg.length; j++) if (arg.charCodeAt(j) !== 45) break; + if (j === 0) out._.push(arg); + else if (arg.substring(j, j + 3) === "no-") { + name = arg.substring(j + 3); + if (strict && !~keys.indexOf(name)) return opts.unknown(arg); + out[name] = false; + } else { + for (idx = j + 1; idx < arg.length; idx++) if (arg.charCodeAt(idx) === 61) break; + name = arg.substring(j, idx); + val = arg.substring(++idx) || i + 1 === len || ("" + args$1[i + 1]).charCodeAt(0) === 45 || args$1[++i]; + arr = j === 2 ? [name] : name; + for (idx = 0; idx < arr.length; idx++) { + name = arr[idx]; + if (strict && !~keys.indexOf(name)) return opts.unknown("-".repeat(j) + name); + toVal(out, name, idx + 1 < arr.length || val, opts); + } + } + } + if (defaults) { + for (k in opts.default) if (out[k] === void 0) out[k] = opts.default[k]; + } + if (alibi) for (k in out) { + arr = opts.alias[k] || []; + while (arr.length > 0) out[arr.shift()] = out[k]; + } + return out; +} +const removeBrackets = (v) => v.replace(/[<[].+/, "").trim(); +const findAllBrackets = (v) => { + const ANGLED_BRACKET_RE_GLOBAL = /<([^>]+)>/g; + const SQUARE_BRACKET_RE_GLOBAL = /\[([^\]]+)\]/g; + const res = []; + const parse = (match$1) => { + let variadic = false; + let value$1 = match$1[1]; + if (value$1.startsWith("...")) { + value$1 = value$1.slice(3); + variadic = true; + } + return { + required: match$1[0].startsWith("<"), + value: value$1, + variadic + }; + }; + let angledMatch; + while (angledMatch = ANGLED_BRACKET_RE_GLOBAL.exec(v)) res.push(parse(angledMatch)); + let squareMatch; + while (squareMatch = SQUARE_BRACKET_RE_GLOBAL.exec(v)) res.push(parse(squareMatch)); + return res; +}; +const getMriOptions = (options) => { + const result = { + alias: {}, + boolean: [] + }; + for (const [index, option] of options.entries()) { + if (option.names.length > 1) result.alias[option.names[0]] = option.names.slice(1); + if (option.isBoolean) if (option.negated) { + const hasStringTypeOption = options.some((o, i) => { + return i !== index && o.names.some((name) => option.names.includes(name)) && typeof o.required === "boolean"; + }); + if (!hasStringTypeOption) result.boolean.push(option.names[0]); + } else result.boolean.push(option.names[0]); + } + return result; +}; +const findLongest = (arr) => { + return arr.sort((a, b) => { + return a.length > b.length ? -1 : 1; + })[0]; +}; +const padRight = (str, length) => { + return str.length >= length ? str : `${str}${" ".repeat(length - str.length)}`; +}; +const camelcase = (input) => { + return input.replace(/([a-z])-([a-z])/g, (_, p1, p2) => { + return p1 + p2.toUpperCase(); + }); +}; +const setDotProp = (obj, keys, val) => { + let i = 0; + let length = keys.length; + let t = obj; + let x; + for (; i < length; ++i) { + x = t[keys[i]]; + t = t[keys[i]] = i === length - 1 ? val : x != null ? x : !!~keys[i + 1].indexOf(".") || !(+keys[i + 1] > -1) ? {} : []; + } +}; +const setByType = (obj, transforms) => { + for (const key of Object.keys(transforms)) { + const transform = transforms[key]; + if (transform.shouldTransform) { + obj[key] = Array.prototype.concat.call([], obj[key]); + if (typeof transform.transformFunction === "function") obj[key] = obj[key].map(transform.transformFunction); + } + } +}; +const getFileName = (input) => { + const m = /([^\\\/]+)$/.exec(input); + return m ? m[1] : ""; +}; +const camelcaseOptionName = (name) => { + return name.split(".").map((v, i) => { + return i === 0 ? camelcase(v) : v; + }).join("."); +}; +var CACError = class extends Error { + constructor(message) { + super(message); + this.name = this.constructor.name; + if (typeof Error.captureStackTrace === "function") Error.captureStackTrace(this, this.constructor); + else this.stack = new Error(message).stack; + } +}; +var Option = class { + constructor(rawName, description, config) { + this.rawName = rawName; + this.description = description; + this.config = Object.assign({}, config); + rawName = rawName.replace(/\.\*/g, ""); + this.negated = false; + this.names = removeBrackets(rawName).split(",").map((v) => { + let name = v.trim().replace(/^-{1,2}/, ""); + if (name.startsWith("no-")) { + this.negated = true; + name = name.replace(/^no-/, ""); + } + return camelcaseOptionName(name); + }).sort((a, b) => a.length > b.length ? 1 : -1); + this.name = this.names[this.names.length - 1]; + if (this.negated && this.config.default == null) this.config.default = true; + if (rawName.includes("<")) this.required = true; + else if (rawName.includes("[")) this.required = false; + else this.isBoolean = true; + } +}; +const processArgs = process.argv; +const platformInfo = `${process.platform}-${process.arch} node-${process.version}`; +var Command = class { + constructor(rawName, description, config = {}, cli$1) { + this.rawName = rawName; + this.description = description; + this.config = config; + this.cli = cli$1; + this.options = []; + this.aliasNames = []; + this.name = removeBrackets(rawName); + this.args = findAllBrackets(rawName); + this.examples = []; + } + usage(text) { + this.usageText = text; + return this; + } + allowUnknownOptions() { + this.config.allowUnknownOptions = true; + return this; + } + ignoreOptionDefaultValue() { + this.config.ignoreOptionDefaultValue = true; + return this; + } + version(version$1, customFlags = "-v, --version") { + this.versionNumber = version$1; + this.option(customFlags, "Display version number"); + return this; + } + example(example) { + this.examples.push(example); + return this; + } + option(rawName, description, config) { + const option = new Option(rawName, description, config); + this.options.push(option); + return this; + } + alias(name) { + this.aliasNames.push(name); + return this; + } + action(callback) { + this.commandAction = callback; + return this; + } + isMatched(name) { + return this.name === name || this.aliasNames.includes(name); + } + get isDefaultCommand() { + return this.name === "" || this.aliasNames.includes("!"); + } + get isGlobalCommand() { + return this instanceof GlobalCommand; + } + hasOption(name) { + name = name.split(".")[0]; + return this.options.find((option) => { + return option.names.includes(name); + }); + } + outputHelp() { + const { name, commands } = this.cli; + const { versionNumber, options: globalOptions, helpCallback } = this.cli.globalCommand; + let sections = [{ body: `${name}${versionNumber ? `/${versionNumber}` : ""}` }]; + sections.push({ + title: "Usage", + body: ` $ ${name} ${this.usageText || this.rawName}` + }); + const showCommands = (this.isGlobalCommand || this.isDefaultCommand) && commands.length > 0; + if (showCommands) { + const longestCommandName = findLongest(commands.map((command) => command.rawName)); + sections.push({ + title: "Commands", + body: commands.map((command) => { + return ` ${padRight(command.rawName, longestCommandName.length)} ${command.description}`; + }).join("\n") + }); + sections.push({ + title: `For more info, run any command with the \`--help\` flag`, + body: commands.map((command) => ` $ ${name}${command.name === "" ? "" : ` ${command.name}`} --help`).join("\n") + }); + } + let options = this.isGlobalCommand ? globalOptions : [...this.options, ...globalOptions || []]; + if (!this.isGlobalCommand && !this.isDefaultCommand) options = options.filter((option) => option.name !== "version"); + if (options.length > 0) { + const longestOptionName = findLongest(options.map((option) => option.rawName)); + sections.push({ + title: "Options", + body: options.map((option) => { + return ` ${padRight(option.rawName, longestOptionName.length)} ${option.description} ${option.config.default === void 0 ? "" : `(default: ${option.config.default})`}`; + }).join("\n") + }); + } + if (this.examples.length > 0) sections.push({ + title: "Examples", + body: this.examples.map((example) => { + if (typeof example === "function") return example(name); + return example; + }).join("\n") + }); + if (helpCallback) sections = helpCallback(sections) || sections; + console.log(sections.map((section) => { + return section.title ? `${section.title}: +${section.body}` : section.body; + }).join("\n\n")); + } + outputVersion() { + const { name } = this.cli; + const { versionNumber } = this.cli.globalCommand; + if (versionNumber) console.log(`${name}/${versionNumber} ${platformInfo}`); + } + checkRequiredArgs() { + const minimalArgsCount = this.args.filter((arg) => arg.required).length; + if (this.cli.args.length < minimalArgsCount) throw new CACError(`missing required args for command \`${this.rawName}\``); + } + checkUnknownOptions() { + const { options, globalCommand } = this.cli; + if (!this.config.allowUnknownOptions) { + for (const name of Object.keys(options)) if (name !== "--" && !this.hasOption(name) && !globalCommand.hasOption(name)) throw new CACError(`Unknown option \`${name.length > 1 ? `--${name}` : `-${name}`}\``); + } + } + checkOptionValue() { + const { options: parsedOptions, globalCommand } = this.cli; + const options = [...globalCommand.options, ...this.options]; + for (const option of options) { + const value$1 = parsedOptions[option.name.split(".")[0]]; + if (option.required) { + const hasNegated = options.some((o) => o.negated && o.names.includes(option.name)); + if (value$1 === true || value$1 === false && !hasNegated) throw new CACError(`option \`${option.rawName}\` value is missing`); + } + } + } +}; +var GlobalCommand = class extends Command { + constructor(cli$1) { + super("@@global@@", "", {}, cli$1); + } +}; +var __assign = Object.assign; +var CAC = class extends EventEmitter { + constructor(name = "") { + super(); + this.name = name; + this.commands = []; + this.rawArgs = []; + this.args = []; + this.options = {}; + this.globalCommand = new GlobalCommand(this); + this.globalCommand.usage(" [options]"); + } + usage(text) { + this.globalCommand.usage(text); + return this; + } + command(rawName, description, config) { + const command = new Command(rawName, description || "", config, this); + command.globalCommand = this.globalCommand; + this.commands.push(command); + return command; + } + option(rawName, description, config) { + this.globalCommand.option(rawName, description, config); + return this; + } + help(callback) { + this.globalCommand.option("-h, --help", "Display this message"); + this.globalCommand.helpCallback = callback; + this.showHelpOnExit = true; + return this; + } + version(version$1, customFlags = "-v, --version") { + this.globalCommand.version(version$1, customFlags); + this.showVersionOnExit = true; + return this; + } + example(example) { + this.globalCommand.example(example); + return this; + } + outputHelp() { + if (this.matchedCommand) this.matchedCommand.outputHelp(); + else this.globalCommand.outputHelp(); + } + outputVersion() { + this.globalCommand.outputVersion(); + } + setParsedInfo({ args: args$1, options }, matchedCommand, matchedCommandName) { + this.args = args$1; + this.options = options; + if (matchedCommand) this.matchedCommand = matchedCommand; + if (matchedCommandName) this.matchedCommandName = matchedCommandName; + return this; + } + unsetMatchedCommand() { + this.matchedCommand = void 0; + this.matchedCommandName = void 0; + } + parse(argv = processArgs, { run = true } = {}) { + this.rawArgs = argv; + if (!this.name) this.name = argv[1] ? getFileName(argv[1]) : "cli"; + let shouldParse = true; + for (const command of this.commands) { + const parsed$1 = this.mri(argv.slice(2), command); + const commandName = parsed$1.args[0]; + if (command.isMatched(commandName)) { + shouldParse = false; + const parsedInfo = __assign(__assign({}, parsed$1), { args: parsed$1.args.slice(1) }); + this.setParsedInfo(parsedInfo, command, commandName); + this.emit(`command:${commandName}`, command); + } + } + if (shouldParse) { + for (const command of this.commands) if (command.name === "") { + shouldParse = false; + const parsed$1 = this.mri(argv.slice(2), command); + this.setParsedInfo(parsed$1, command); + this.emit(`command:!`, command); + } + } + if (shouldParse) { + const parsed$1 = this.mri(argv.slice(2)); + this.setParsedInfo(parsed$1); + } + if (this.options.help && this.showHelpOnExit) { + this.outputHelp(); + run = false; + this.unsetMatchedCommand(); + } + if (this.options.version && this.showVersionOnExit && this.matchedCommandName == null) { + this.outputVersion(); + run = false; + this.unsetMatchedCommand(); + } + const parsedArgv = { + args: this.args, + options: this.options + }; + if (run) this.runMatchedCommand(); + if (!this.matchedCommand && this.args[0]) this.emit("command:*"); + return parsedArgv; + } + mri(argv, command) { + const cliOptions = [...this.globalCommand.options, ...command ? command.options : []]; + const mriOptions = getMriOptions(cliOptions); + let argsAfterDoubleDashes = []; + const doubleDashesIndex = argv.indexOf("--"); + if (doubleDashesIndex > -1) { + argsAfterDoubleDashes = argv.slice(doubleDashesIndex + 1); + argv = argv.slice(0, doubleDashesIndex); + } + let parsed$1 = mri2(argv, mriOptions); + parsed$1 = Object.keys(parsed$1).reduce((res, name) => { + return __assign(__assign({}, res), { [camelcaseOptionName(name)]: parsed$1[name] }); + }, { _: [] }); + const args$1 = parsed$1._; + const options = { "--": argsAfterDoubleDashes }; + const ignoreDefault = command && command.config.ignoreOptionDefaultValue ? command.config.ignoreOptionDefaultValue : this.globalCommand.config.ignoreOptionDefaultValue; + let transforms = Object.create(null); + for (const cliOption of cliOptions) { + if (!ignoreDefault && cliOption.config.default !== void 0) for (const name of cliOption.names) options[name] = cliOption.config.default; + if (Array.isArray(cliOption.config.type)) { + if (transforms[cliOption.name] === void 0) { + transforms[cliOption.name] = Object.create(null); + transforms[cliOption.name]["shouldTransform"] = true; + transforms[cliOption.name]["transformFunction"] = cliOption.config.type[0]; + } + } + } + for (const key of Object.keys(parsed$1)) if (key !== "_") { + const keys = key.split("."); + setDotProp(options, keys, parsed$1[key]); + setByType(options, transforms); + } + return { + args: args$1, + options + }; + } + runMatchedCommand() { + const { args: args$1, options, matchedCommand: command } = this; + if (!command || !command.commandAction) return; + command.checkUnknownOptions(); + command.checkOptionValue(); + command.checkRequiredArgs(); + const actionArgs = []; + command.args.forEach((arg, index) => { + if (arg.variadic) actionArgs.push(args$1.slice(index)); + else actionArgs.push(args$1[index]); + }); + actionArgs.push(options); + return command.commandAction.apply(this, actionArgs); + } +}; +const cac = (name = "") => new CAC(name); + +//#endregion +//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/arrays.js +const liftArray = (data) => Array.isArray(data) ? data : [data]; +/** +* Splits an array into two arrays based on the result of a predicate +* +* @param predicate - The guard function used to determine which items to include. +* @returns A tuple containing two arrays: +* - the first includes items for which `predicate` returns true +* - the second includes items for which `predicate` returns false +* +* @example +* const list = [1, "2", "3", 4, 5]; +* const [numbers, strings] = spliterate(list, (x) => typeof x === "number"); +* // Type: number[] +* // Output: [1, 4, 5] +* console.log(evens); +* // Type: string[] +* // Output: ["2", "3"] +* console.log(odds); +*/ +const spliterate = (arr, predicate) => { + const result = [[], []]; + for (const item of arr) if (predicate(item)) result[0].push(item); + else result[1].push(item); + return result; +}; +const ReadonlyArray = Array; +const includes = (array, element) => array.includes(element); +const range = (length, offset = 0) => [...new Array(length)].map((_, i) => i + offset); +/** +* Adds a value or array to an array, returning the concatenated result +*/ +const append = (to, value$1, opts) => { + if (to === void 0) return value$1 === void 0 ? [] : Array.isArray(value$1) ? value$1 : [value$1]; + if (opts?.prepend) if (Array.isArray(value$1)) to.unshift(...value$1); + else to.unshift(value$1); + else if (Array.isArray(value$1)) to.push(...value$1); + else to.push(value$1); + return to; +}; +/** +* Concatenates an element or list with a readonly list +*/ +const conflatenate = (to, elementOrList) => { + if (elementOrList === void 0 || elementOrList === null) return to ?? []; + if (to === void 0 || to === null) return liftArray(elementOrList); + return to.concat(elementOrList); +}; +/** +* Concatenates a variadic list of elements or lists with a readonly list +*/ +const conflatenateAll = (...elementsOrLists) => elementsOrLists.reduce(conflatenate, []); +/** +* Appends a value or concatenates an array to an array if it is not already included, returning the array +*/ +const appendUnique = (to, value$1, opts) => { + if (to === void 0) return Array.isArray(value$1) ? value$1 : [value$1]; + const isEqual = opts?.isEqual ?? ((l, r) => l === r); + for (const v of liftArray(value$1)) if (!to.some((existing) => isEqual(existing, v))) to.push(v); + return to; +}; +const groupBy = (array, discriminant) => array.reduce((result, item) => { + const key = item[discriminant]; + result[key] = append(result[key], item); + return result; +}, {}); +const arrayEquals = (l, r, opts) => l.length === r.length && l.every(opts?.isEqual ? (lItem, i) => opts.isEqual(lItem, r[i]) : (lItem, i) => lItem === r[i]); + +//#endregion +//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/domain.js +const hasDomain = (data, kind) => domainOf(data) === kind; +const domainOf = (data) => { + const builtinType = typeof data; + return builtinType === "object" ? data === null ? "null" : "object" : builtinType === "function" ? "object" : builtinType; +}; +/** Each domain's completion for the phrase "must be _____" */ +const domainDescriptions = { + boolean: "boolean", + null: "null", + undefined: "undefined", + bigint: "a bigint", + number: "a number", + object: "an object", + string: "a string", + symbol: "a symbol" +}; +const jsTypeOfDescriptions = { + ...domainDescriptions, + function: "a function" +}; + +//#endregion +//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/errors.js +var InternalArktypeError = class extends Error {}; +const throwInternalError = (message) => throwError(message, InternalArktypeError); +const throwError = (message, ctor = Error) => { + throw new ctor(message); +}; +var ParseError = class extends Error { + name = "ParseError"; +}; +const throwParseError = (message) => throwError(message, ParseError); +/** +* TypeScript won't suggest strings beginning with a space as properties. +* Useful for symbol-like string properties. +*/ +const noSuggest = (s) => ` ${s}`; + +//#endregion +//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/flatMorph.js +const flatMorph = (o, flatMapEntry) => { + const result = {}; + const inputIsArray = Array.isArray(o); + let outputShouldBeArray = false; + for (const [i, entry] of Object.entries(o).entries()) { + const mapped = inputIsArray ? flatMapEntry(i, entry[1]) : flatMapEntry(...entry, i); + outputShouldBeArray ||= typeof mapped[0] === "number"; + const flattenedEntries = Array.isArray(mapped[0]) || mapped.length === 0 ? mapped : [mapped]; + for (const [k, v] of flattenedEntries) if (typeof k === "object") result[k.group] = append(result[k.group], v); + else result[k] = v; + } + return outputShouldBeArray ? Object.values(result) : result; +}; + +//#endregion +//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/records.js +/** +* Object.entries wrapper providing narrowed types for objects with known sets +* of keys, e.g. those defined internally as configs +*/ +const entriesOf = Object.entries; +const isKeyOf = (k, o) => k in o; +const hasKey = (o, k) => k in o; +var DynamicBase = class { + constructor(properties) { + Object.assign(this, properties); + } +}; +const NoopBase = class {}; +/** @ts-ignore (needed to extend `t`) **/ +var CastableBase = class extends NoopBase {}; +const splitByKeys = (o, leftKeys) => { + const l = {}; + const r = {}; + let k; + for (k in o) if (k in leftKeys) l[k] = o[k]; + else r[k] = o[k]; + return [l, r]; +}; +const omit = (o, keys) => splitByKeys(o, keys)[1]; +const isEmptyObject = (o) => Object.keys(o).length === 0; +const stringAndSymbolicEntriesOf = (o) => [...Object.entries(o), ...Object.getOwnPropertySymbols(o).map((k) => [k, o[k]])]; +/** Like Object.assign, but it will preserve getters instead of evaluating them. */ +const defineProperties = (base, merged) => Object.defineProperties(base, Object.getOwnPropertyDescriptors(merged)); +/** Copies enumerable keys of o to a new object in alphabetical order */ +const withAlphabetizedKeys = (o) => { + const keys = Object.keys(o).sort(); + const result = {}; + for (let i = 0; i < keys.length; i++) result[keys[i]] = o[keys[i]]; + return result; +}; +const unset = noSuggest("represents an uninitialized value"); +const enumValues = (tsEnum) => Object.values(tsEnum).filter((v) => { + if (typeof v === "number") return true; + return typeof tsEnum[v] !== "number"; +}); + +//#endregion +//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/objectKinds.js +const ecmascriptConstructors = { + Array, + Boolean, + Date, + Error, + Function, + Map, + Number, + Promise, + RegExp, + Set, + String, + WeakMap, + WeakSet +}; +/** Node18 */ +const FileConstructor = globalThis.File ?? Blob; +const platformConstructors = { + ArrayBuffer, + Blob, + File: FileConstructor, + FormData, + Headers, + Request, + Response, + URL +}; +const typedArrayConstructors = { + Int8Array, + Uint8Array, + Uint8ClampedArray, + Int16Array, + Uint16Array, + Int32Array, + Uint32Array, + Float32Array, + Float64Array, + BigInt64Array, + BigUint64Array +}; +const builtinConstructors = { + ...ecmascriptConstructors, + ...platformConstructors, + ...typedArrayConstructors, + String, + Number, + Boolean +}; +const objectKindOf = (data) => { + let prototype = Object.getPrototypeOf(data); + while (prototype?.constructor && (!isKeyOf(prototype.constructor.name, builtinConstructors) || !(data instanceof builtinConstructors[prototype.constructor.name]))) prototype = Object.getPrototypeOf(prototype); + const name = prototype?.constructor?.name; + if (name === void 0 || name === "Object") return void 0; + return name; +}; +const objectKindOrDomainOf = (data) => typeof data === "object" && data !== null ? objectKindOf(data) ?? "object" : domainOf(data); +const isArray = Array.isArray; +const ecmascriptDescriptions = { + Array: "an array", + Function: "a function", + Date: "a Date", + RegExp: "a RegExp", + Error: "an Error", + Map: "a Map", + Set: "a Set", + String: "a String object", + Number: "a Number object", + Boolean: "a Boolean object", + Promise: "a Promise", + WeakMap: "a WeakMap", + WeakSet: "a WeakSet" +}; +const platformDescriptions = { + ArrayBuffer: "an ArrayBuffer instance", + Blob: "a Blob instance", + File: "a File instance", + FormData: "a FormData instance", + Headers: "a Headers instance", + Request: "a Request instance", + Response: "a Response instance", + URL: "a URL instance" +}; +const typedArrayDescriptions = { + Int8Array: "an Int8Array", + Uint8Array: "a Uint8Array", + Uint8ClampedArray: "a Uint8ClampedArray", + Int16Array: "an Int16Array", + Uint16Array: "a Uint16Array", + Int32Array: "an Int32Array", + Uint32Array: "a Uint32Array", + Float32Array: "a Float32Array", + Float64Array: "a Float64Array", + BigInt64Array: "a BigInt64Array", + BigUint64Array: "a BigUint64Array" +}; +/** Each defaultObjectKind's completion for the phrase "must be _____" */ +const objectKindDescriptions = { + ...ecmascriptDescriptions, + ...platformDescriptions, + ...typedArrayDescriptions +}; +/** +* this will only return an object kind if it's the root constructor +* example TypeError would return null not 'Error' +**/ +const getBuiltinNameOfConstructor = (ctor) => { + const constructorName = Object(ctor).name ?? null; + return constructorName && isKeyOf(constructorName, builtinConstructors) && builtinConstructors[constructorName] === ctor ? constructorName : null; +}; +/** +* Returns an array of constructors for all ancestors (i.e., prototypes) of a given object. +*/ +const ancestorsOf = (o) => { + let proto = Object.getPrototypeOf(o); + const result = []; + while (proto !== null) { + result.push(proto.constructor); + proto = Object.getPrototypeOf(proto); + } + return result; +}; +const constructorExtends = (ctor, base) => { + let current = ctor.prototype; + while (current !== null) { + if (current === base.prototype) return true; + current = Object.getPrototypeOf(current); + } + return false; +}; + +//#endregion +//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/clone.js +/** Deeply copy the properties of the a non-subclassed Object, Array or Date.*/ +const deepClone = (input) => _clone(input, /* @__PURE__ */ new Map()); +const _clone = (input, seen) => { + if (typeof input !== "object" || input === null) return input; + if (seen?.has(input)) return seen.get(input); + const builtinConstructorName = getBuiltinNameOfConstructor(input.constructor); + if (builtinConstructorName === "Date") return new Date(input.getTime()); + if (builtinConstructorName && builtinConstructorName !== "Array") return input; + const cloned = Array.isArray(input) ? input.slice() : Object.create(Object.getPrototypeOf(input)); + const propertyDescriptors = Object.getOwnPropertyDescriptors(input); + if (seen) { + seen.set(input, cloned); + for (const k in propertyDescriptors) { + const desc = propertyDescriptors[k]; + if ("get" in desc || "set" in desc) continue; + desc.value = _clone(desc.value, seen); + } + } + Object.defineProperties(cloned, propertyDescriptors); + return cloned; +}; + +//#endregion +//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/functions.js +const cached = (thunk) => { + let result = unset; + return () => result === unset ? result = thunk() : result; +}; +const isThunk = (value$1) => typeof value$1 === "function" && value$1.length === 0; +const DynamicFunction = class extends Function { + constructor(...args$1) { + const params = args$1.slice(0, -1); + const body = args$1.at(-1); + try { + super(...params, body); + } catch (e) { + return throwInternalError(`Encountered an unexpected error while compiling your definition: + Message: ${e} + Source: (${args$1.slice(0, -1)}) => { + ${args$1.at(-1)} + }`); + } + } +}; +var Callable = class { + constructor(fn, ...[opts]) { + return Object.assign(Object.setPrototypeOf(fn.bind(opts?.bind ?? this), this.constructor.prototype), opts?.attach); + } +}; +/** +* Checks if the environment has Content Security Policy (CSP) enabled, +* preventing JIT-optimized code from being compiled via new Function(). +* +* @returns `true` if a function created using new Function() can be +* successfully invoked in the environment, `false` otherwise. +* +* The result is cached for subsequent invocations. +*/ +const envHasCsp = cached(() => { + try { + return new Function("return false")(); + } catch { + return true; + } +}); + +//#endregion +//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/generics.js +const brand = noSuggest("brand"); +/** primitive key used to represent an inferred type at compile-time */ +const inferred = noSuggest("arkInferred"); + +//#endregion +//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/hkt.js +const args = noSuggest("args"); +var Hkt = class { + constructor() {} +}; + +//#endregion +//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/isomorphic.js +/** get a CJS/ESM compatible string representing the current file */ +const fileName = () => { + try { + const error = /* @__PURE__ */ new Error(); + const stackLine = error.stack?.split("\n")[2]?.trim() || ""; + const filePath = stackLine.match(/\(?(.+?)(?::\d+:\d+)?\)?$/)?.[1] || "unknown"; + return filePath.replace(/^file:\/\//, ""); + } catch { + return "unknown"; + } +}; +const env = globalThis.process?.env ?? {}; +const isomorphic = { + fileName, + env +}; + +//#endregion +//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/strings.js +const capitalize$1 = (s) => s[0].toUpperCase() + s.slice(1); +const anchoredRegex = (regex$1) => new RegExp(anchoredSource(regex$1), typeof regex$1 === "string" ? "" : regex$1.flags); +const anchoredSource = (regex$1) => { + const source = typeof regex$1 === "string" ? regex$1 : regex$1.source; + return `^(?:${source})$`; +}; +const RegexPatterns = { + negativeLookahead: (pattern) => `(?!${pattern})`, + nonCapturingGroup: (pattern) => `(?:${pattern})` +}; +const escapeChar = "\\"; +const whitespaceChars = { + " ": 1, + "\n": 1, + " ": 1 +}; + +//#endregion +//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/numbers.js +const anchoredNegativeZeroPattern = /^-0\.?0*$/.source; +const positiveIntegerPattern = /[1-9]\d*/.source; +const looseDecimalPattern = /\.\d+/.source; +const strictDecimalPattern = /\.\d*[1-9]/.source; +const createNumberMatcher = (opts) => anchoredRegex(RegexPatterns.negativeLookahead(anchoredNegativeZeroPattern) + RegexPatterns.nonCapturingGroup("-?" + RegexPatterns.nonCapturingGroup(RegexPatterns.nonCapturingGroup("0|" + positiveIntegerPattern) + RegexPatterns.nonCapturingGroup(opts.decimalPattern) + "?") + (opts.allowDecimalOnly ? "|" + opts.decimalPattern : "") + "?")); +/** +* Matches a well-formatted numeric expression according to the following rules: +* 1. Must include an integer portion (i.e. '.321' must be written as '0.321') +* 2. The first digit of the value must not be 0, unless the entire integer portion is 0 +* 3. If the value includes a decimal, its last digit may not be 0 +* 4. The value may not be "-0" +*/ +const wellFormedNumberMatcher = createNumberMatcher({ + decimalPattern: strictDecimalPattern, + allowDecimalOnly: false +}); +const isWellFormedNumber = wellFormedNumberMatcher.test.bind(wellFormedNumberMatcher); +/** +* Similar to wellFormedNumber but more permissive in the following ways: +* +* - Allows numbers without an integer portion like ".5" (well-formed equivalent is "0.5") +* - Allows decimals with trailing zeroes like "0.10" (well-formed equivalent is "0.1") +*/ +const numericStringMatcher = createNumberMatcher({ + decimalPattern: looseDecimalPattern, + allowDecimalOnly: true +}); +const isNumericString = numericStringMatcher.test.bind(numericStringMatcher); +const numberLikeMatcher = /^-?\d*\.?\d*$/; +const isNumberLike = (s) => s.length !== 0 && numberLikeMatcher.test(s); +/** +* Matches a well-formatted integer according to the following rules: +* 1. must begin with an integer, the first digit of which cannot be 0 unless the entire value is 0 +* 2. The value may not be "-0" +*/ +const wellFormedIntegerMatcher = anchoredRegex(RegexPatterns.negativeLookahead("^-0$") + "-?" + RegexPatterns.nonCapturingGroup(RegexPatterns.nonCapturingGroup("0|" + positiveIntegerPattern))); +const isWellFormedInteger = wellFormedIntegerMatcher.test.bind(wellFormedIntegerMatcher); +const integerLikeMatcher = /^-?\d+$/; +const isIntegerLike = integerLikeMatcher.test.bind(integerLikeMatcher); +const numericLiteralDescriptions = { + number: "a number", + bigint: "a bigint", + integer: "an integer" +}; +const writeMalformedNumericLiteralMessage = (def, kind) => `'${def}' was parsed as ${numericLiteralDescriptions[kind]} but could not be narrowed to a literal value. Avoid unnecessary leading or trailing zeros and other abnormal notation`; +const isWellFormed = (def, kind) => kind === "number" ? isWellFormedNumber(def) : isWellFormedInteger(def); +const parseKind = (def, kind) => kind === "number" ? Number(def) : Number.parseInt(def); +const isKindLike = (def, kind) => kind === "number" ? isNumberLike(def) : isIntegerLike(def); +const tryParseNumber = (token, options) => parseNumeric(token, "number", options); +const tryParseWellFormedNumber = (token, options) => parseNumeric(token, "number", { + ...options, + strict: true +}); +const tryParseInteger = (token, options) => parseNumeric(token, "integer", options); +const parseNumeric = (token, kind, options) => { + const value$1 = parseKind(token, kind); + if (!Number.isNaN(value$1)) { + if (isKindLike(token, kind)) { + if (options?.strict) return isWellFormed(token, kind) ? value$1 : throwParseError(writeMalformedNumericLiteralMessage(token, kind)); + return value$1; + } + } + return options?.errorOnFail ? throwParseError(options?.errorOnFail === true ? `Failed to parse ${numericLiteralDescriptions[kind]} from '${token}'` : options?.errorOnFail) : void 0; +}; +const tryParseWellFormedBigint = (def) => { + if (def[def.length - 1] !== "n") return; + const maybeIntegerLiteral = def.slice(0, -1); + let value$1; + try { + value$1 = BigInt(maybeIntegerLiteral); + } catch { + return; + } + if (wellFormedIntegerMatcher.test(maybeIntegerLiteral)) return value$1; + if (integerLikeMatcher.test(maybeIntegerLiteral)) return throwParseError(writeMalformedNumericLiteralMessage(def, "bigint")); +}; + +//#endregion +//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/registry.js +const arkUtilVersion = "0.46.0"; +const initialRegistryContents = { + version: arkUtilVersion, + filename: isomorphic.fileName(), + FileConstructor +}; +const registry = initialRegistryContents; +const namesByResolution = /* @__PURE__ */ new Map(); +const nameCounts = Object.create(null); +const register = (value$1) => { + const existingName = namesByResolution.get(value$1); + if (existingName) return existingName; + let name = baseNameFor(value$1); + if (nameCounts[name]) name = `${name}${nameCounts[name]++}`; + else nameCounts[name] = 1; + registry[name] = value$1; + namesByResolution.set(value$1, name); + return name; +}; +const isDotAccessible = (keyName) => /^[$A-Z_a-z][\w$]*$/.test(keyName); +const baseNameFor = (value$1) => { + switch (typeof value$1) { + case "object": { + if (value$1 === null) break; + const prefix = objectKindOf(value$1) ?? "object"; + return prefix[0].toLowerCase() + prefix.slice(1); + } + case "function": return isDotAccessible(value$1.name) ? value$1.name : "fn"; + case "symbol": return value$1.description && isDotAccessible(value$1.description) ? value$1.description : "symbol"; + } + return throwInternalError(`Unexpected attempt to register serializable value of type ${domainOf(value$1)}`); +}; + +//#endregion +//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/primitive.js +const serializePrimitive = (value$1) => typeof value$1 === "string" ? JSON.stringify(value$1) : typeof value$1 === "bigint" ? `${value$1}n` : `${value$1}`; + +//#endregion +//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/serialize.js +const snapshot = (data, opts = {}) => _serialize(data, { + onUndefined: `$ark.undefined`, + onBigInt: (n) => `$ark.bigint-${n}`, + ...opts +}, []); +const printable = (data, opts) => { + switch (domainOf(data)) { + case "object": + const o = data; + const ctorName = o.constructor.name; + return ctorName === "Object" || ctorName === "Array" ? opts?.quoteKeys === false ? stringifyUnquoted(o, opts?.indent ?? 0, "") : JSON.stringify(_serialize(o, printableOpts, []), null, opts?.indent) : stringifyUnquoted(o, opts?.indent ?? 0, ""); + case "symbol": return printableOpts.onSymbol(data); + default: return serializePrimitive(data); + } +}; +const stringifyUnquoted = (value$1, indent$1, currentIndent) => { + if (typeof value$1 === "function") return printableOpts.onFunction(value$1); + if (typeof value$1 !== "object" || value$1 === null) return serializePrimitive(value$1); + const nextIndent = currentIndent + " ".repeat(indent$1); + if (Array.isArray(value$1)) { + if (value$1.length === 0) return "[]"; + const items = value$1.map((item) => stringifyUnquoted(item, indent$1, nextIndent)).join(",\n" + nextIndent); + return indent$1 ? `[\n${nextIndent}${items}\n${currentIndent}]` : `[${items}]`; + } + const ctorName = value$1.constructor.name; + if (ctorName === "Object") { + const keyValues = stringAndSymbolicEntriesOf(value$1).map(([key, val]) => { + const stringifiedKey = typeof key === "symbol" ? printableOpts.onSymbol(key) : isDotAccessible(key) ? key : JSON.stringify(key); + const stringifiedValue = stringifyUnquoted(val, indent$1, nextIndent); + return `${nextIndent}${stringifiedKey}: ${stringifiedValue}`; + }); + if (keyValues.length === 0) return "{}"; + return indent$1 ? `{\n${keyValues.join(",\n")}\n${currentIndent}}` : `{${keyValues.join(", ")}}`; + } + if (value$1 instanceof Date) return describeCollapsibleDate(value$1); + if ("expression" in value$1 && typeof value$1.expression === "string") return value$1.expression; + return ctorName; +}; +const printableOpts = { + onCycle: () => "(cycle)", + onSymbol: (v) => `Symbol(${register(v)})`, + onFunction: (v) => `Function(${register(v)})` +}; +const _serialize = (data, opts, seen) => { + switch (domainOf(data)) { + case "object": { + const o = data; + if ("toJSON" in o && typeof o.toJSON === "function") return o.toJSON(); + if (typeof o === "function") return printableOpts.onFunction(o); + if (seen.includes(o)) return "(cycle)"; + const nextSeen = [...seen, o]; + if (Array.isArray(o)) return o.map((item) => _serialize(item, opts, nextSeen)); + if (o instanceof Date) return o.toDateString(); + const result = {}; + for (const k in o) result[k] = _serialize(o[k], opts, nextSeen); + for (const s of Object.getOwnPropertySymbols(o)) result[opts.onSymbol?.(s) ?? s.toString()] = _serialize(o[s], opts, nextSeen); + return result; + } + case "symbol": return printableOpts.onSymbol(data); + case "bigint": return opts.onBigInt?.(data) ?? `${data}n`; + case "undefined": return opts.onUndefined ?? "undefined"; + case "string": return data.replaceAll("\\", "\\\\"); + default: return data; + } +}; +/** +* Converts a Date instance to a human-readable description relative to its precision +*/ +const describeCollapsibleDate = (date) => { + const year = date.getFullYear(); + const month = date.getMonth(); + const dayOfMonth = date.getDate(); + const hours = date.getHours(); + const minutes = date.getMinutes(); + const seconds = date.getSeconds(); + const milliseconds = date.getMilliseconds(); + if (month === 0 && dayOfMonth === 1 && hours === 0 && minutes === 0 && seconds === 0 && milliseconds === 0) return `${year}`; + const datePortion = `${months[month]} ${dayOfMonth}, ${year}`; + if (hours === 0 && minutes === 0 && seconds === 0 && milliseconds === 0) return datePortion; + let timePortion = date.toLocaleTimeString(); + const suffix$1 = timePortion.endsWith(" AM") || timePortion.endsWith(" PM") ? timePortion.slice(-3) : ""; + if (suffix$1) timePortion = timePortion.slice(0, -suffix$1.length); + if (milliseconds) timePortion += `.${pad(milliseconds, 3)}`; + else if (timeWithUnnecessarySeconds.test(timePortion)) timePortion = timePortion.slice(0, -3); + return `${timePortion + suffix$1}, ${datePortion}`; +}; +const months = [ + "January", + "February", + "March", + "April", + "May", + "June", + "July", + "August", + "September", + "October", + "November", + "December" +]; +const timeWithUnnecessarySeconds = /:\d\d:00$/; +const pad = (value$1, length) => String(value$1).padStart(length, "0"); + +//#endregion +//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/path.js +const appendStringifiedKey = (path$1, prop, ...[opts]) => { + const stringifySymbol = opts?.stringifySymbol ?? printable; + let propAccessChain = path$1; + switch (typeof prop) { + case "string": + propAccessChain = isDotAccessible(prop) ? path$1 === "" ? prop : `${path$1}.${prop}` : `${path$1}[${JSON.stringify(prop)}]`; + break; + case "number": + propAccessChain = `${path$1}[${prop}]`; + break; + case "symbol": + propAccessChain = `${path$1}[${stringifySymbol(prop)}]`; + break; + default: if (opts?.stringifyNonKey) propAccessChain = `${path$1}[${opts.stringifyNonKey(prop)}]`; + else throwParseError(`${printable(prop)} must be a PropertyKey or stringifyNonKey must be passed to options`); + } + return propAccessChain; +}; +const stringifyPath = (path$1, ...opts) => path$1.reduce((s, k) => appendStringifiedKey(s, k, ...opts), ""); +var ReadonlyPath = class extends ReadonlyArray { + cache = {}; + constructor(...items) { + super(); + this.push(...items); + } + toJSON() { + if (this.cache.json) return this.cache.json; + this.cache.json = []; + for (let i = 0; i < this.length; i++) this.cache.json.push(typeof this[i] === "symbol" ? printable(this[i]) : this[i]); + return this.cache.json; + } + stringify() { + if (this.cache.stringify) return this.cache.stringify; + return this.cache.stringify = stringifyPath(this); + } + stringifyAncestors() { + if (this.cache.stringifyAncestors) return this.cache.stringifyAncestors; + let propString = ""; + const result = [propString]; + for (const path$1 of this) { + propString = appendStringifiedKey(propString, path$1); + result.push(propString); + } + return this.cache.stringifyAncestors = result; + } +}; + +//#endregion +//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/scanner.js +var Scanner = class { + chars; + i; + def; + constructor(def) { + this.def = def; + this.chars = [...def]; + this.i = 0; + } + /** Get lookahead and advance scanner by one */ + shift() { + return this.chars[this.i++] ?? ""; + } + get lookahead() { + return this.chars[this.i] ?? ""; + } + get nextLookahead() { + return this.chars[this.i + 1] ?? ""; + } + get length() { + return this.chars.length; + } + shiftUntil(condition) { + let shifted = ""; + while (this.lookahead) { + if (condition(this, shifted)) if (shifted[shifted.length - 1] === escapeChar) shifted = shifted.slice(0, -1); + else break; + shifted += this.shift(); + } + return shifted; + } + shiftUntilLookahead(charOrSet) { + return typeof charOrSet === "string" ? this.shiftUntil((s) => s.lookahead === charOrSet) : this.shiftUntil((s) => s.lookahead in charOrSet); + } + shiftUntilNonWhitespace() { + return this.shiftUntil(() => !(this.lookahead in whitespaceChars)); + } + jumpToIndex(i) { + this.i = i < 0 ? this.length + i : i; + } + jumpForward(count) { + this.i += count; + } + get location() { + return this.i; + } + get unscanned() { + return this.chars.slice(this.i, this.length).join(""); + } + get scanned() { + return this.chars.slice(0, this.i).join(""); + } + sliceChars(start, end) { + return this.chars.slice(start, end).join(""); + } + lookaheadIs(char) { + return this.lookahead === char; + } + lookaheadIsIn(tokens) { + return this.lookahead in tokens; + } +}; + +//#endregion +//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/traits.js +const implementedTraits = noSuggest("implementedTraits"); +const hasTrait = (traitClass) => (o) => { + if (!hasDomain(o, "object")) return false; + if (implementedTraits in o.constructor && o.constructor[implementedTraits].includes(traitClass)) return true; + return ancestorsOf(o).includes(traitClass); +}; +/** @ts-ignore required to extend NoopBase */ +var Trait = class extends NoopBase { + static get [Symbol.hasInstance]() { + return hasTrait(this); + } + traitsOf() { + return implementedTraits in this.constructor ? this.constructor[implementedTraits] : []; + } +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/shared/registry.js +let _registryName = "$ark"; +let suffix = 2; +while (_registryName in globalThis) _registryName = `$ark${suffix++}`; +const registryName = _registryName; +globalThis[registryName] = registry; +const $ark = registry; +const reference = (name) => `${registryName}.${name}`; +const registeredReference = (value$1) => reference(register(value$1)); + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/shared/compile.js +var CompiledFunction = class extends CastableBase { + argNames; + body = ""; + constructor(...args$1) { + super(); + this.argNames = args$1; + for (const arg of args$1) { + if (arg in this) throw new Error(`Arg name '${arg}' would overwrite an existing property on FunctionBody`); + this[arg] = arg; + } + } + indentation = 0; + indent() { + this.indentation += 4; + return this; + } + dedent() { + this.indentation -= 4; + return this; + } + prop(key, optional = false) { + return compileLiteralPropAccess(key, optional); + } + index(key, optional = false) { + return indexPropAccess(`${key}`, optional); + } + line(statement) { + this.body += `${" ".repeat(this.indentation)}${statement}\n`; + return this; + } + const(identifier, expression) { + this.line(`const ${identifier} = ${expression}`); + return this; + } + let(identifier, expression) { + return this.line(`let ${identifier} = ${expression}`); + } + set(identifier, expression) { + return this.line(`${identifier} = ${expression}`); + } + if(condition, then) { + return this.block(`if (${condition})`, then); + } + elseIf(condition, then) { + return this.block(`else if (${condition})`, then); + } + else(then) { + return this.block("else", then); + } + /** Current index is "i" */ + for(until, body, initialValue = 0) { + return this.block(`for (let i = ${initialValue}; ${until}; i++)`, body); + } + /** Current key is "k" */ + forIn(object$1, body) { + return this.block(`for (const k in ${object$1})`, body); + } + block(prefix, contents, suffix$1 = "") { + this.line(`${prefix} {`); + this.indent(); + contents(this); + this.dedent(); + return this.line(`}${suffix$1}`); + } + return(expression = "") { + return this.line(`return ${expression}`); + } + write(name = "anonymous", indent$1 = 0) { + return `${name}(${this.argNames.join(", ")}) { ${indent$1 ? this.body.split("\n").map((l) => " ".repeat(indent$1) + `${l}`).join("\n") : this.body} }`; + } + compile() { + return new DynamicFunction(...this.argNames, this.body); + } +}; +const compileSerializedValue = (value$1) => hasDomain(value$1, "object") || typeof value$1 === "symbol" ? registeredReference(value$1) : serializePrimitive(value$1); +const compileLiteralPropAccess = (key, optional = false) => { + if (typeof key === "string" && isDotAccessible(key)) return `${optional ? "?" : ""}.${key}`; + return indexPropAccess(serializeLiteralKey(key), optional); +}; +const serializeLiteralKey = (key) => typeof key === "symbol" ? registeredReference(key) : JSON.stringify(key); +const indexPropAccess = (key, optional = false) => `${optional ? "?." : ""}[${key}]`; +var NodeCompiler = class extends CompiledFunction { + traversalKind; + optimistic; + constructor(ctx) { + super("data", "ctx"); + this.traversalKind = ctx.kind; + this.optimistic = ctx.optimistic === true; + } + invoke(node$1, opts) { + const arg = opts?.arg ?? this.data; + const requiresContext = typeof node$1 === "string" ? true : this.requiresContextFor(node$1); + const id = typeof node$1 === "string" ? node$1 : node$1.id; + if (requiresContext) return `${this.referenceToId(id, opts)}(${arg}, ${this.ctx})`; + return `${this.referenceToId(id, opts)}(${arg})`; + } + referenceToId(id, opts) { + const invokedKind = opts?.kind ?? this.traversalKind; + const base = `this.${id}${invokedKind}`; + return opts?.bind ? `${base}.bind(${opts?.bind})` : base; + } + requiresContextFor(node$1) { + return this.traversalKind === "Apply" || node$1.allowsRequiresContext; + } + initializeErrorCount() { + return this.const("errorCount", "ctx.currentErrorCount"); + } + returnIfFail() { + return this.if("ctx.currentErrorCount > errorCount", () => this.return()); + } + returnIfFailFast() { + return this.if("ctx.failFast && ctx.currentErrorCount > errorCount", () => this.return()); + } + traverseKey(keyExpression, accessExpression, node$1) { + const requiresContext = this.requiresContextFor(node$1); + if (requiresContext) this.line(`${this.ctx}.path.push(${keyExpression})`); + this.check(node$1, { arg: accessExpression }); + if (requiresContext) this.line(`${this.ctx}.path.pop()`); + return this; + } + check(node$1, opts) { + return this.traversalKind === "Allows" ? this.if(`!${this.invoke(node$1, opts)}`, () => this.return(false)) : this.line(this.invoke(node$1, opts)); + } +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/shared/utils.js +const makeRootAndArrayPropertiesMutable = (o) => flatMorph(o, (k, v) => [k, isArray(v) ? [...v] : v]); +const arkKind = noSuggest("arkKind"); +const hasArkKind = (value$1, kind) => value$1?.[arkKind] === kind; +const isNode = (value$1) => hasArkKind(value$1, "root") || hasArkKind(value$1, "constraint"); + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/shared/implement.js +const basisKinds = [ + "unit", + "proto", + "domain" +]; +const structuralKinds = [ + "required", + "optional", + "index", + "sequence" +]; +const refinementKinds = [ + "pattern", + "divisor", + "exactLength", + "max", + "min", + "maxLength", + "minLength", + "before", + "after" +]; +const constraintKinds = [ + ...refinementKinds, + ...structuralKinds, + "structure", + "predicate" +]; +const rootKinds = [ + "alias", + "union", + "morph", + "unit", + "intersection", + "proto", + "domain" +]; +const nodeKinds = [...rootKinds, ...constraintKinds]; +const constraintKeys = flatMorph(constraintKinds, (i, kind) => [kind, 1]); +const structureKeys = flatMorph([...structuralKinds, "undeclared"], (i, k) => [k, 1]); +const precedenceByKind = flatMorph(nodeKinds, (i, kind) => [kind, i]); +const isNodeKind = (value$1) => typeof value$1 === "string" && value$1 in precedenceByKind; +const precedenceOfKind = (kind) => precedenceByKind[kind]; +const schemaKindsRightOf = (kind) => rootKinds.slice(precedenceOfKind(kind) + 1); +const unionChildKinds = [...schemaKindsRightOf("union"), "alias"]; +const morphChildKinds = [...schemaKindsRightOf("morph"), "alias"]; +const defaultValueSerializer = (v) => { + if (typeof v === "string" || typeof v === "boolean" || v === null) return v; + if (typeof v === "number") { + if (Number.isNaN(v)) return "NaN"; + if (v === Number.POSITIVE_INFINITY) return "Infinity"; + if (v === Number.NEGATIVE_INFINITY) return "-Infinity"; + return v; + } + return compileSerializedValue(v); +}; +const compileObjectLiteral = (ctx) => { + let result = "{ "; + for (const [k, v] of Object.entries(ctx)) result += `${k}: ${compileSerializedValue(v)}, `; + return result + " }"; +}; +const implementNode = (_) => { + const implementation$22 = _; + if (implementation$22.hasAssociatedError) { + implementation$22.defaults.expected ??= (ctx) => "description" in ctx ? ctx.description : implementation$22.defaults.description(ctx); + implementation$22.defaults.actual ??= (data) => printable(data); + implementation$22.defaults.problem ??= (ctx) => `must be ${ctx.expected}${ctx.actual ? ` (was ${ctx.actual})` : ""}`; + implementation$22.defaults.message ??= (ctx) => { + if (ctx.path.length === 0) return ctx.problem; + const problemWithLocation = `${ctx.propString} ${ctx.problem}`; + if (problemWithLocation[0] === "[") return `value at ${problemWithLocation}`; + return problemWithLocation; + }; + } + return implementation$22; +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/shared/toJsonSchema.js +var ToJsonSchemaError = class extends Error { + name = "ToJsonSchemaError"; + code; + context; + constructor(code, context) { + super(printable(context, { + quoteKeys: false, + indent: 4 + })); + this.code = code; + this.context = context; + } + hasCode(code) { + return this.code === code; + } +}; +const defaultConfig = { + dialect: "https://json-schema.org/draft/2020-12/schema", + useRefs: false, + fallback: { + arrayObject: (ctx) => ToJsonSchema.throw("arrayObject", ctx), + arrayPostfix: (ctx) => ToJsonSchema.throw("arrayPostfix", ctx), + defaultValue: (ctx) => ToJsonSchema.throw("defaultValue", ctx), + domain: (ctx) => ToJsonSchema.throw("domain", ctx), + morph: (ctx) => ToJsonSchema.throw("morph", ctx), + patternIntersection: (ctx) => ToJsonSchema.throw("patternIntersection", ctx), + predicate: (ctx) => ToJsonSchema.throw("predicate", ctx), + proto: (ctx) => ToJsonSchema.throw("proto", ctx), + symbolKey: (ctx) => ToJsonSchema.throw("symbolKey", ctx), + unit: (ctx) => ToJsonSchema.throw("unit", ctx), + date: (ctx) => ToJsonSchema.throw("date", ctx) + } +}; +const ToJsonSchema = { + Error: ToJsonSchemaError, + throw: (...args$1) => { + throw new ToJsonSchema.Error(...args$1); + }, + throwInternalOperandError: (kind, schema$1) => throwInternalError(`Unexpected JSON Schema input for ${kind}: ${printable(schema$1)}`), + defaultConfig +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/config.js +$ark.config ??= {}; +const mergeConfigs = (base, merged) => { + if (!merged) return base; + const result = { ...base }; + let k; + for (k in merged) { + const keywords$1 = { ...base.keywords }; + if (k === "keywords") { + for (const flatAlias in merged[k]) { + const v = merged.keywords[flatAlias]; + if (v === void 0) continue; + keywords$1[flatAlias] = typeof v === "string" ? { description: v } : v; + } + result.keywords = keywords$1; + } else if (k === "toJsonSchema") result[k] = mergeToJsonSchemaConfigs(base.toJsonSchema, merged.toJsonSchema); + else if (isNodeKind(k)) result[k] = { + ...base[k], + ...merged[k] + }; + else result[k] = merged[k]; + } + return result; +}; +const mergeToJsonSchemaConfigs = (baseConfig, mergedConfig) => { + if (!baseConfig) return mergedConfig ?? {}; + if (!mergedConfig) return baseConfig; + const result = { ...baseConfig }; + let k; + for (k in mergedConfig) if (k === "fallback") result.fallback = mergeFallbacks(baseConfig.fallback, mergedConfig.fallback); + else result[k] = mergedConfig[k]; + return result; +}; +const mergeFallbacks = (base, merged) => { + base = normalizeFallback(base); + merged = normalizeFallback(merged); + const result = {}; + let code; + for (code in ToJsonSchema.defaultConfig.fallback) result[code] = merged[code] ?? merged.default ?? base[code] ?? base.default ?? ToJsonSchema.defaultConfig.fallback[code]; + return result; +}; +const normalizeFallback = (fallback) => typeof fallback === "function" ? { default: fallback } : fallback ?? {}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/shared/errors.js +var ArkError = class ArkError extends CastableBase { + [arkKind] = "error"; + path; + data; + nodeConfig; + input; + ctx; + constructor({ prefixPath, relativePath,...input }, ctx) { + super(); + this.input = input; + this.ctx = ctx; + defineProperties(this, input); + const data = ctx.data; + if (input.code === "union") input.errors = input.errors.flatMap((innerError) => { + const flat = innerError.hasCode("union") ? innerError.errors : [innerError]; + if (!prefixPath && !relativePath) return flat; + return flat.map((e) => e.transform((e$1) => ({ + ...e$1, + path: conflatenateAll(prefixPath, e$1.path, relativePath) + }))); + }); + this.nodeConfig = ctx.config[this.code]; + const basePath = [...input.path ?? ctx.path]; + if (relativePath) basePath.push(...relativePath); + if (prefixPath) basePath.unshift(...prefixPath); + this.path = new ReadonlyPath(...basePath); + this.data = "data" in input ? input.data : data; + } + transform(f) { + return new ArkError(f({ + data: this.data, + path: this.path, + ...this.input + }), this.ctx); + } + hasCode(code) { + return this.code === code; + } + get propString() { + return stringifyPath(this.path); + } + get expected() { + if (this.input.expected) return this.input.expected; + const config = this.meta?.expected ?? this.nodeConfig.expected; + return typeof config === "function" ? config(this.input) : config; + } + get actual() { + if (this.input.actual) return this.input.actual; + const config = this.meta?.actual ?? this.nodeConfig.actual; + return typeof config === "function" ? config(this.data) : config; + } + get problem() { + if (this.input.problem) return this.input.problem; + const config = this.meta?.problem ?? this.nodeConfig.problem; + return typeof config === "function" ? config(this) : config; + } + get message() { + if (this.input.message) return this.input.message; + const config = this.meta?.message ?? this.nodeConfig.message; + return typeof config === "function" ? config(this) : config; + } + get flat() { + return this.hasCode("intersection") ? [...this.errors] : [this]; + } + toJSON() { + return { + data: this.data, + path: this.path, + ...this.input, + expected: this.expected, + actual: this.actual, + problem: this.problem, + message: this.message + }; + } + toString() { + return this.message; + } + throw() { + throw this; + } +}; +/** +* A ReadonlyArray of `ArkError`s returned by a Type on invalid input. +* +* Subsequent errors added at an existing path are merged into an +* ArkError intersection. +*/ +var ArkErrors = class ArkErrors extends ReadonlyArray { + [arkKind] = "errors"; + ctx; + constructor(ctx) { + super(); + this.ctx = ctx; + } + /** + * Errors by a pathString representing their location. + */ + byPath = Object.create(null); + /** + * {@link byPath} flattened so that each value is an array of ArkError instances at that path. + * + * ✅ Since "intersection" errors will be flattened to their constituent `.errors`, + * they will never be directly present in this representation. + */ + get flatByPath() { + return flatMorph(this.byPath, (k, v) => [k, v.flat]); + } + /** + * {@link byPath} flattened so that each value is an array of problem strings at that path. + */ + get flatProblemsByPath() { + return flatMorph(this.byPath, (k, v) => [k, v.flat.map((e) => e.problem)]); + } + /** + * All pathStrings at which errors are present mapped to the errors occuring + * at that path or any nested path within it. + */ + byAncestorPath = Object.create(null); + count = 0; + mutable = this; + /** + * Throw a TraversalError based on these errors. + */ + throw() { + throw this.toTraversalError(); + } + /** + * Converts ArkErrors to TraversalError, a subclass of `Error` suitable for throwing with nice + * formatting. + */ + toTraversalError() { + return new TraversalError(this); + } + /** + * Append an ArkError to this array, ignoring duplicates. + */ + add(error) { + if (this.includes(error)) return; + this._add(error); + } + transform(f) { + const result = new ArkErrors(this.ctx); + for (const e of this) result.add(f(e)); + return result; + } + /** + * Add all errors from an ArkErrors instance, ignoring duplicates and + * prefixing their paths with that of the current Traversal. + */ + merge(errors) { + for (const e of errors) { + if (this.includes(e)) continue; + this._add(new ArkError({ + ...e, + path: [...this.ctx.path, ...e.path] + }, this.ctx)); + } + } + /** + * @internal + */ + affectsPath(path$1) { + if (this.length === 0) return false; + return path$1.stringifyAncestors().some((s) => s in this.byPath) || path$1.stringify() in this.byAncestorPath; + } + /** + * A human-readable summary of all errors. + */ + get summary() { + return this.toString(); + } + /** + * Alias of this ArkErrors instance for StandardSchema compatibility. + */ + get issues() { + return this; + } + toJSON() { + return [...this.map((e) => e.toJSON())]; + } + toString() { + return this.join("\n"); + } + _add(error) { + const existing = this.byPath[error.propString]; + if (existing) { + if (existing.hasCode("union") && existing.errors.length === 0) return; + const errorIntersection = error.hasCode("union") && error.errors.length === 0 ? error : new ArkError({ + code: "intersection", + errors: existing.hasCode("intersection") ? [...existing.errors, error] : [existing, error] + }, this.ctx); + const existingIndex = this.indexOf(existing); + this.mutable[existingIndex === -1 ? this.length : existingIndex] = errorIntersection; + this.byPath[error.propString] = errorIntersection; + this.addAncestorPaths(error); + } else { + this.byPath[error.propString] = error; + this.addAncestorPaths(error); + this.mutable.push(error); + } + this.count++; + } + addAncestorPaths(error) { + for (const propString of error.path.stringifyAncestors()) this.byAncestorPath[propString] = append(this.byAncestorPath[propString], error); + } +}; +var TraversalError = class extends Error { + name = "TraversalError"; + constructor(errors) { + if (errors.length === 1) super(errors.summary); + else super("\n" + errors.map((error) => ` • ${indent(error)}`).join("\n")); + Object.defineProperty(this, "arkErrors", { + value: errors, + enumerable: false + }); + } +}; +const indent = (error) => error.toString().split("\n").join("\n "); + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/shared/traversal.js +var Traversal = class { + /** + * #### the path being validated or morphed + * + * ✅ array indices represented as numbers + * ⚠️ mutated during traversal - use `path.slice(0)` to snapshot + * 🔗 use {@link propString} for a stringified version + */ + path = []; + /** + * #### {@link ArkErrors} that will be part of this traversal's finalized result + * + * ✅ will always be an empty array for a valid traversal + */ + errors = new ArkErrors(this); + /** + * #### the original value being traversed + */ + root; + /** + * #### configuration for this traversal + * + * ✅ options can affect traversal results and error messages + * ✅ defaults < global config < scope config + * ✅ does not include options configured on individual types + */ + config; + queuedMorphs = []; + branches = []; + seen = {}; + constructor(root, config) { + this.root = root; + this.config = config; + } + /** + * #### the data being validated or morphed + * + * ✅ extracted from {@link root} at {@link path} + */ + get data() { + let result = this.root; + for (const segment of this.path) result = result?.[segment]; + return result; + } + /** + * #### a string representing {@link path} + * + * @propString + */ + get propString() { + return stringifyPath(this.path); + } + /** + * #### add an {@link ArkError} and return `false` + * + * ✅ useful for predicates like `.narrow` + */ + reject(input) { + this.error(input); + return false; + } + /** + * #### add an {@link ArkError} from a description and return `false` + * + * ✅ useful for predicates like `.narrow` + * 🔗 equivalent to {@link reject}({ expected }) + */ + mustBe(expected) { + this.error(expected); + return false; + } + error(input) { + const errCtx = typeof input === "object" ? input.code ? input : { + ...input, + code: "predicate" + } : { + code: "predicate", + expected: input + }; + return this.errorFromContext(errCtx); + } + /** + * #### whether {@link currentBranch} (or the traversal root, outside a union) has one or more errors + */ + hasError() { + return this.currentErrorCount !== 0; + } + get currentBranch() { + return this.branches.at(-1); + } + queueMorphs(morphs) { + const input = { + path: new ReadonlyPath(...this.path), + morphs + }; + if (this.currentBranch) this.currentBranch.queuedMorphs.push(input); + else this.queuedMorphs.push(input); + } + finalize(onFail) { + if (this.queuedMorphs.length) { + if (typeof this.root === "object" && this.root !== null && this.config.clone) this.root = this.config.clone(this.root); + this.applyQueuedMorphs(); + } + if (this.hasError()) return onFail ? onFail(this.errors) : this.errors; + return this.root; + } + get currentErrorCount() { + return this.currentBranch ? this.currentBranch.error ? 1 : 0 : this.errors.count; + } + get failFast() { + return this.branches.length !== 0; + } + pushBranch() { + this.branches.push({ + error: void 0, + queuedMorphs: [] + }); + } + popBranch() { + return this.branches.pop(); + } + /** + * @internal + * Convenience for casting from InternalTraversal to Traversal + * for cases where the extra methods on the external type are expected, e.g. + * a morph or predicate. + */ + get external() { + return this; + } + errorFromNodeContext(input) { + return this.errorFromContext(input); + } + errorFromContext(errCtx) { + const error = new ArkError(errCtx, this); + if (this.currentBranch) this.currentBranch.error = error; + else this.errors.add(error); + return error; + } + applyQueuedMorphs() { + while (this.queuedMorphs.length) { + const queuedMorphs = this.queuedMorphs; + this.queuedMorphs = []; + for (const { path: path$1, morphs } of queuedMorphs) { + if (this.errors.affectsPath(path$1)) continue; + this.applyMorphsAtPath(path$1, morphs); + } + } + } + applyMorphsAtPath(path$1, morphs) { + const key = path$1.at(-1); + let parent; + if (key !== void 0) { + parent = this.root; + for (let pathIndex = 0; pathIndex < path$1.length - 1; pathIndex++) parent = parent[path$1[pathIndex]]; + } + this.path = [...path$1]; + for (const morph of morphs) { + const morphIsNode = isNode(morph); + const result = morph(parent === void 0 ? this.root : parent[key], this); + if (result instanceof ArkError) { + this.errors.add(result); + break; + } + if (result instanceof ArkErrors) { + if (!morphIsNode) this.errors.merge(result); + break; + } + if (parent === void 0) this.root = result; + else parent[key] = result; + this.applyQueuedMorphs(); + } + } +}; +const traverseKey = (key, fn, ctx) => { + if (!ctx) return fn(); + ctx.path.push(key); + const result = fn(); + ctx.path.pop(); + return result; +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/node.js +var BaseNode = class extends Callable { + attachments; + $; + onFail; + includesTransform; + includesContextualPredicate; + isCyclic; + allowsRequiresContext; + rootApplyStrategy; + contextFreeMorph; + rootApply; + referencesById; + shallowReferences; + flatRefs; + flatMorphs; + allows; + get shallowMorphs() { + return []; + } + constructor(attachments, $) { + super((data, pipedFromCtx, onFail = this.onFail) => { + if (pipedFromCtx) { + this.traverseApply(data, pipedFromCtx); + return pipedFromCtx.hasError() ? pipedFromCtx.errors : pipedFromCtx.data; + } + return this.rootApply(data, onFail); + }, { attach: attachments }); + this.attachments = attachments; + this.$ = $; + this.onFail = this.meta.onFail ?? this.$.resolvedConfig.onFail; + this.includesTransform = this.hasKind("morph") || this.hasKind("structure") && this.structuralMorph !== void 0; + this.includesContextualPredicate = this.hasKind("predicate") && this.inner.predicate.length !== 1; + this.isCyclic = this.kind === "alias"; + this.referencesById = { [this.id]: this }; + this.shallowReferences = this.hasKind("structure") ? [this, ...this.children] : this.children.reduce((acc, child) => appendUniqueNodes(acc, child.shallowReferences), [this]); + const isStructural = this.isStructural(); + this.flatRefs = []; + this.flatMorphs = []; + for (let i = 0; i < this.children.length; i++) { + this.includesTransform ||= this.children[i].includesTransform; + this.includesContextualPredicate ||= this.children[i].includesContextualPredicate; + this.isCyclic ||= this.children[i].isCyclic; + if (!isStructural) { + const childFlatRefs = this.children[i].flatRefs; + for (let j = 0; j < childFlatRefs.length; j++) { + const childRef = childFlatRefs[j]; + if (!this.flatRefs.some((existing) => flatRefsAreEqual(existing, childRef))) { + this.flatRefs.push(childRef); + for (const branch of childRef.node.branches) if (branch.hasKind("morph") || branch.hasKind("intersection") && branch.structure?.structuralMorph !== void 0) this.flatMorphs.push({ + path: childRef.path, + propString: childRef.propString, + node: branch + }); + } + } + } + Object.assign(this.referencesById, this.children[i].referencesById); + } + this.flatRefs.sort((l, r) => l.path.length > r.path.length ? 1 : l.path.length < r.path.length ? -1 : l.propString > r.propString ? 1 : l.propString < r.propString ? -1 : l.node.expression < r.node.expression ? -1 : 1); + this.allowsRequiresContext = this.includesContextualPredicate || this.isCyclic; + this.rootApplyStrategy = !this.allowsRequiresContext && this.flatMorphs.length === 0 ? this.shallowMorphs.length === 0 ? "allows" : this.shallowMorphs.every((morph) => morph.length === 1 || morph.name === "$arkStructuralMorph") ? this.hasKind("union") ? this.branches.some((branch) => branch.shallowMorphs.length > 1) ? "contextual" : "branchedOptimistic" : this.shallowMorphs.length > 1 ? "contextual" : "optimistic" : "contextual" : "contextual"; + this.rootApply = this.createRootApply(); + this.allows = this.allowsRequiresContext ? (data) => this.traverseAllows(data, new Traversal(data, this.$.resolvedConfig)) : (data) => this.traverseAllows(data); + } + createRootApply() { + switch (this.rootApplyStrategy) { + case "allows": return (data, onFail) => { + if (this.allows(data)) return data; + const ctx = new Traversal(data, this.$.resolvedConfig); + this.traverseApply(data, ctx); + return ctx.finalize(onFail); + }; + case "contextual": return (data, onFail) => { + const ctx = new Traversal(data, this.$.resolvedConfig); + this.traverseApply(data, ctx); + return ctx.finalize(onFail); + }; + case "optimistic": + this.contextFreeMorph = this.shallowMorphs[0]; + const clone = this.$.resolvedConfig.clone; + return (data, onFail) => { + if (this.allows(data)) return this.contextFreeMorph(clone && (typeof data === "object" && data !== null || typeof data === "function") ? clone(data) : data); + const ctx = new Traversal(data, this.$.resolvedConfig); + this.traverseApply(data, ctx); + return ctx.finalize(onFail); + }; + case "branchedOptimistic": return this.createBranchedOptimisticRootApply(); + default: + this.rootApplyStrategy; + return throwInternalError(`Unexpected rootApplyStrategy ${this.rootApplyStrategy}`); + } + } + compiledMeta = compileMeta(this.metaJson); + cacheGetter(name, value$1) { + Object.defineProperty(this, name, { value: value$1 }); + return value$1; + } + get description() { + return this.cacheGetter("description", this.meta?.description ?? this.$.resolvedConfig[this.kind].description(this)); + } + get references() { + return Object.values(this.referencesById); + } + precedence = precedenceOfKind(this.kind); + precompilation; + assert = (data, pipedFromCtx) => this(data, pipedFromCtx, (errors) => errors.throw()); + traverse(data, pipedFromCtx) { + return this(data, pipedFromCtx, null); + } + get in() { + return this.cacheGetter("in", this.getIo("in")); + } + get out() { + return this.cacheGetter("out", this.getIo("out")); + } + getIo(ioKind) { + if (!this.includesTransform) return this; + const ioInner = {}; + for (const [k, v] of this.innerEntries) { + const keySchemaImplementation = this.impl.keys[k]; + if (keySchemaImplementation.reduceIo) keySchemaImplementation.reduceIo(ioKind, ioInner, v); + else if (keySchemaImplementation.child) { + const childValue = v; + ioInner[k] = isArray(childValue) ? childValue.map((child) => child[ioKind]) : childValue[ioKind]; + } else ioInner[k] = v; + } + return this.$.node(this.kind, ioInner); + } + toJSON() { + return this.json; + } + toString() { + return `Type<${this.expression}>`; + } + equals(r) { + const rNode = isNode(r) ? r : this.$.parseDefinition(r); + return this.innerHash === rNode.innerHash; + } + ifEquals(r) { + return this.equals(r) ? this : void 0; + } + hasKind(kind) { + return this.kind === kind; + } + assertHasKind(kind) { + if (this.kind !== kind) throwError(`${this.kind} node was not of asserted kind ${kind}`); + return this; + } + hasKindIn(...kinds) { + return kinds.includes(this.kind); + } + assertHasKindIn(...kinds) { + if (!includes(kinds, this.kind)) throwError(`${this.kind} node was not one of asserted kinds ${kinds}`); + return this; + } + isBasis() { + return includes(basisKinds, this.kind); + } + isConstraint() { + return includes(constraintKinds, this.kind); + } + isStructural() { + return includes(structuralKinds, this.kind); + } + isRefinement() { + return includes(refinementKinds, this.kind); + } + isRoot() { + return includes(rootKinds, this.kind); + } + isUnknown() { + return this.hasKind("intersection") && this.children.length === 0; + } + isNever() { + return this.hasKind("union") && this.children.length === 0; + } + hasUnit(value$1) { + return this.hasKind("unit") && this.allows(value$1); + } + hasOpenIntersection() { + return this.impl.intersectionIsOpen; + } + get nestableExpression() { + return this.expression; + } + select(selector) { + const normalized = NodeSelector.normalize(selector); + return this._select(normalized); + } + _select(selector) { + let nodes = NodeSelector.applyBoundary[selector.boundary ?? "references"](this); + if (selector.kind) nodes = nodes.filter((n) => n.kind === selector.kind); + if (selector.where) nodes = nodes.filter(selector.where); + return NodeSelector.applyMethod[selector.method ?? "filter"](nodes, this, selector); + } + transform(mapper, opts) { + return this._transform(mapper, this._createTransformContext(opts)); + } + _createTransformContext(opts) { + return { + root: this, + selected: void 0, + seen: {}, + path: [], + parseOptions: { prereduced: opts?.prereduced ?? false }, + undeclaredKeyHandling: void 0, + ...opts + }; + } + _transform(mapper, ctx) { + const $ = ctx.bindScope ?? this.$; + if (ctx.seen[this.id]) return this.$.lazilyResolve(ctx.seen[this.id]); + if (ctx.shouldTransform?.(this, ctx) === false) return this; + let transformedNode; + ctx.seen[this.id] = () => transformedNode; + if (this.hasKind("structure") && this.undeclared !== ctx.undeclaredKeyHandling) ctx = { + ...ctx, + undeclaredKeyHandling: this.undeclared + }; + const innerWithTransformedChildren = flatMorph(this.inner, (k, v) => { + if (!this.impl.keys[k].child) return [k, v]; + const children = v; + if (!isArray(children)) { + const transformed$1 = children._transform(mapper, ctx); + return transformed$1 ? [k, transformed$1] : []; + } + if (children.length === 0) return [k, v]; + const transformed = children.flatMap((n) => { + const transformedChild = n._transform(mapper, ctx); + return transformedChild ?? []; + }); + return transformed.length ? [k, transformed] : []; + }); + delete ctx.seen[this.id]; + const innerWithMeta = Object.assign(innerWithTransformedChildren, { meta: this.meta }); + const transformedInner = ctx.selected && !ctx.selected.includes(this) ? innerWithMeta : mapper(this.kind, innerWithMeta, ctx); + if (transformedInner === null) return null; + if (isNode(transformedInner)) return transformedNode = transformedInner; + const transformedKeys = Object.keys(transformedInner); + const hasNoTypedKeys = transformedKeys.length === 0 || transformedKeys.length === 1 && transformedKeys[0] === "meta"; + if (hasNoTypedKeys && !isEmptyObject(this.inner)) return null; + if ((this.kind === "required" || this.kind === "optional" || this.kind === "index") && !("value" in transformedInner)) return ctx.undeclaredKeyHandling ? { + ...transformedInner, + value: $ark.intrinsic.unknown + } : null; + if (this.kind === "morph") transformedInner.in ??= $ark.intrinsic.unknown; + return transformedNode = $.node(this.kind, transformedInner, ctx.parseOptions); + } + configureReferences(meta, selector = "references") { + const normalized = NodeSelector.normalize(selector); + const mapper = typeof meta === "string" ? (kind, inner) => ({ + ...inner, + meta: { + ...inner.meta, + description: meta + } + }) : typeof meta === "function" ? (kind, inner) => ({ + ...inner, + meta: meta(inner.meta) + }) : (kind, inner) => ({ + ...inner, + meta: { + ...inner.meta, + ...meta + } + }); + if (normalized.boundary === "self") return this.$.node(this.kind, mapper(this.kind, { + ...this.inner, + meta: this.meta + })); + const rawSelected = this._select(normalized); + const selected = rawSelected && liftArray(rawSelected); + const shouldTransform = normalized.boundary === "child" ? (node$1, ctx) => ctx.root.children.includes(node$1) : normalized.boundary === "shallow" ? (node$1) => node$1.kind !== "structure" : () => true; + return this.$.finalize(this.transform(mapper, { + shouldTransform, + selected + })); + } +}; +const NodeSelector = { + applyBoundary: { + self: (node$1) => [node$1], + child: (node$1) => [...node$1.children], + shallow: (node$1) => [...node$1.shallowReferences], + references: (node$1) => [...node$1.references] + }, + applyMethod: { + filter: (nodes) => nodes, + assertFilter: (nodes, from, selector) => { + if (nodes.length === 0) throwError(writeSelectAssertionMessage(from, selector)); + return nodes; + }, + find: (nodes) => nodes[0], + assertFind: (nodes, from, selector) => { + if (nodes.length === 0) throwError(writeSelectAssertionMessage(from, selector)); + return nodes[0]; + } + }, + normalize: (selector) => typeof selector === "function" ? { + boundary: "references", + method: "filter", + where: selector + } : typeof selector === "string" ? isKeyOf(selector, NodeSelector.applyBoundary) ? { + method: "filter", + boundary: selector + } : { + boundary: "references", + method: "filter", + kind: selector + } : { + boundary: "references", + method: "filter", + ...selector + } +}; +const writeSelectAssertionMessage = (from, selector) => `${from} had no references matching ${printable(selector)}.`; +const typePathToPropString = (path$1) => stringifyPath(path$1, { stringifyNonKey: (node$1) => node$1.expression }); +const referenceMatcher = /"(\$ark\.[^"]+)"/g; +const compileMeta = (metaJson) => JSON.stringify(metaJson).replaceAll(referenceMatcher, "$1"); +const flatRef = (path$1, node$1) => ({ + path: path$1, + node: node$1, + propString: typePathToPropString(path$1) +}); +const flatRefsAreEqual = (l, r) => l.propString === r.propString && l.node.equals(r.node); +const appendUniqueFlatRefs = (existing, refs) => appendUnique(existing, refs, { isEqual: flatRefsAreEqual }); +const appendUniqueNodes = (existing, refs) => appendUnique(existing, refs, { isEqual: (l, r) => l.equals(r) }); + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/shared/disjoint.js +var Disjoint = class Disjoint extends Array { + static init(kind, l, r, ctx) { + return new Disjoint({ + kind, + l, + r, + path: ctx?.path ?? [], + optional: ctx?.optional ?? false + }); + } + add(kind, l, r, ctx) { + this.push({ + kind, + l, + r, + path: ctx?.path ?? [], + optional: ctx?.optional ?? false + }); + return this; + } + get summary() { + return this.describeReasons(); + } + describeReasons() { + if (this.length === 1) { + const { path: path$1, l, r } = this[0]; + const pathString = stringifyPath(path$1); + return writeUnsatisfiableExpressionError(`Intersection${pathString && ` at ${pathString}`} of ${describeReasons(l, r)}`); + } + return `The following intersections result in unsatisfiable types:\n• ${this.map(({ path: path$1, l, r }) => `${path$1}: ${describeReasons(l, r)}`).join("\n• ")}`; + } + throw() { + return throwParseError(this.describeReasons()); + } + invert() { + const result = this.map((entry) => ({ + ...entry, + l: entry.r, + r: entry.l + })); + if (!(result instanceof Disjoint)) return new Disjoint(...result); + return result; + } + withPrefixKey(key, kind) { + return this.map((entry) => ({ + ...entry, + path: [key, ...entry.path], + optional: entry.optional || kind === "optional" + })); + } + toNeverIfDisjoint() { + return $ark.intrinsic.never; + } +}; +const describeReasons = (l, r) => `${describeReason(l)} and ${describeReason(r)}`; +const describeReason = (value$1) => isNode(value$1) ? value$1.expression : isArray(value$1) ? value$1.map(describeReason).join(" | ") || "never" : String(value$1); +const writeUnsatisfiableExpressionError = (expression) => `${expression} results in an unsatisfiable type`; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/shared/intersections.js +const intersectionCache = {}; +const intersectNodesRoot = (l, r, $) => intersectOrPipeNodes(l, r, { + $, + invert: false, + pipe: false +}); +const pipeNodesRoot = (l, r, $) => intersectOrPipeNodes(l, r, { + $, + invert: false, + pipe: true +}); +const intersectOrPipeNodes = (l, r, ctx) => { + const operator = ctx.pipe ? "|>" : "&"; + const lrCacheKey = `${l.hash}${operator}${r.hash}`; + if (intersectionCache[lrCacheKey] !== void 0) return intersectionCache[lrCacheKey]; + if (!ctx.pipe) { + const rlCacheKey = `${r.hash}${operator}${l.hash}`; + if (intersectionCache[rlCacheKey] !== void 0) { + const rlResult = intersectionCache[rlCacheKey]; + const lrResult = rlResult instanceof Disjoint ? rlResult.invert() : rlResult; + intersectionCache[lrCacheKey] = lrResult; + return lrResult; + } + } + const isPureIntersection = !ctx.pipe || !l.includesTransform && !r.includesTransform; + if (isPureIntersection && l.equals(r)) return l; + let result = isPureIntersection ? _intersectNodes(l, r, ctx) : l.hasKindIn(...rootKinds) ? _pipeNodes(l, r, ctx) : _intersectNodes(l, r, ctx); + if (isNode(result)) { + if (l.equals(result)) result = l; + else if (r.equals(result)) result = r; + } + intersectionCache[lrCacheKey] = result; + return result; +}; +const _intersectNodes = (l, r, ctx) => { + const leftmostKind = l.precedence < r.precedence ? l.kind : r.kind; + const implementation$22 = l.impl.intersections[r.kind] ?? r.impl.intersections[l.kind]; + if (implementation$22 === void 0) return null; + else if (leftmostKind === l.kind) return implementation$22(l, r, ctx); + else { + let result = implementation$22(r, l, { + ...ctx, + invert: !ctx.invert + }); + if (result instanceof Disjoint) result = result.invert(); + return result; + } +}; +const _pipeNodes = (l, r, ctx) => l.includesTransform || r.includesTransform ? ctx.invert ? pipeMorphed(r, l, ctx) : pipeMorphed(l, r, ctx) : _intersectNodes(l, r, ctx); +const pipeMorphed = (from, to, ctx) => from.distribute((fromBranch) => _pipeMorphed(fromBranch, to, ctx), (results) => { + const viableBranches = results.filter(isNode); + if (viableBranches.length === 0) return Disjoint.init("union", from.branches, to.branches); + if (viableBranches.length < from.branches.length || !from.branches.every((branch, i) => branch.in.equals(viableBranches[i].in))) return ctx.$.parseSchema(viableBranches); + let meta; + if (viableBranches.length === 1) { + const onlyBranch = viableBranches[0]; + if (!meta) return onlyBranch; + return ctx.$.node("morph", { + ...onlyBranch.inner, + in: onlyBranch.in.configure(meta, "self") + }); + } + const schema$1 = { branches: viableBranches }; + if (meta) schema$1.meta = meta; + return ctx.$.parseSchema(schema$1); +}); +const _pipeMorphed = (from, to, ctx) => { + const fromIsMorph = from.hasKind("morph"); + if (fromIsMorph) { + const morphs = [...from.morphs]; + if (from.lastMorphIfNode) { + const outIntersection = intersectOrPipeNodes(from.lastMorphIfNode, to, ctx); + if (outIntersection instanceof Disjoint) return outIntersection; + morphs[morphs.length - 1] = outIntersection; + } else morphs.push(to); + return ctx.$.node("morph", { + morphs, + in: from.inner.in + }); + } + if (to.hasKind("morph")) { + const inTersection = intersectOrPipeNodes(from, to.in, ctx); + if (inTersection instanceof Disjoint) return inTersection; + return ctx.$.node("morph", { + morphs: [to], + in: inTersection + }); + } + return ctx.$.node("morph", { + morphs: [to], + in: from + }); +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/constraint.js +var BaseConstraint = class extends BaseNode { + constructor(attachments, $) { + super(attachments, $); + Object.defineProperty(this, arkKind, { + value: "constraint", + enumerable: false + }); + } + impliedSiblings; + intersect(r) { + return intersectNodesRoot(this, r, this.$); + } +}; +var InternalPrimitiveConstraint = class extends BaseConstraint { + traverseApply = (data, ctx) => { + if (!this.traverseAllows(data, ctx)) ctx.errorFromNodeContext(this.errorContext); + }; + compile(js) { + if (js.traversalKind === "Allows") js.return(this.compiledCondition); + else js.if(this.compiledNegation, () => js.line(`${js.ctx}.errorFromNodeContext(${this.compiledErrorContext})`)); + } + get errorContext() { + return { + code: this.kind, + description: this.description, + meta: this.meta, + ...this.inner + }; + } + get compiledErrorContext() { + return compileObjectLiteral(this.errorContext); + } +}; +const constraintKeyParser = (kind) => (schema$1, ctx) => { + if (isArray(schema$1)) { + if (schema$1.length === 0) return; + const nodes = schema$1.map((schema$2) => ctx.$.node(kind, schema$2)); + if (kind === "predicate") return nodes; + return nodes.sort((l, r) => l.hash < r.hash ? -1 : 1); + } + const child = ctx.$.node(kind, schema$1); + return child.hasOpenIntersection() ? [child] : child; +}; +const intersectConstraints = (s) => { + const head = s.r.shift(); + if (!head) { + let result = s.l.length === 0 && s.kind === "structure" ? $ark.intrinsic.unknown.internal : s.ctx.$.node(s.kind, Object.assign(s.baseInner, unflattenConstraints(s.l)), { prereduced: true }); + for (const root of s.roots) { + if (result instanceof Disjoint) return result; + result = intersectOrPipeNodes(root, result, s.ctx); + } + return result; + } + let matched = false; + for (let i = 0; i < s.l.length; i++) { + const result = intersectOrPipeNodes(s.l[i], head, s.ctx); + if (result === null) continue; + if (result instanceof Disjoint) return result; + if (!matched) { + if (result.isRoot()) { + s.roots.push(result); + s.l.splice(i); + return intersectConstraints(s); + } + s.l[i] = result; + matched = true; + } else if (!s.l.includes(result)) return throwInternalError(`Unexpectedly encountered multiple distinct intersection results for refinement ${result}`); + } + if (!matched) s.l.push(head); + if (s.kind === "intersection") { + if (head.impliedSiblings) for (const node$1 of head.impliedSiblings) appendUnique(s.r, node$1); + } + return intersectConstraints(s); +}; +const flattenConstraints = (inner) => { + const result = Object.entries(inner).flatMap(([k, v]) => k in constraintKeys ? v : []).sort((l, r) => l.precedence < r.precedence ? -1 : l.precedence > r.precedence ? 1 : l.kind === "predicate" && r.kind === "predicate" ? 0 : l.hash < r.hash ? -1 : 1); + return result; +}; +const unflattenConstraints = (constraints) => { + const inner = {}; + for (const constraint of constraints) if (constraint.hasOpenIntersection()) inner[constraint.kind] = append(inner[constraint.kind], constraint); + else { + if (inner[constraint.kind]) return throwInternalError(`Unexpected intersection of closed refinements of kind ${constraint.kind}`); + inner[constraint.kind] = constraint; + } + return inner; +}; +const throwInvalidOperandError = (...args$1) => throwParseError(writeInvalidOperandMessage(...args$1)); +const writeInvalidOperandMessage = (kind, expected, actual) => { + const actualDescription = actual.hasKind("morph") ? "a morph" : actual.isUnknown() ? "unknown" : actual.exclude(expected).defaultShortDescription; + return `${capitalize$1(kind)} operand must be ${expected.description} (was ${actualDescription})`; +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/generic.js +const parseGeneric = (paramDefs, bodyDef, $) => new GenericRoot(paramDefs, bodyDef, $, $, null); +var LazyGenericBody = class extends Callable {}; +var GenericRoot = class extends Callable { + [arkKind] = "generic"; + paramDefs; + bodyDef; + $; + arg$; + baseInstantiation; + hkt; + description; + constructor(paramDefs, bodyDef, $, arg$, hkt) { + super((...args$1) => { + const argNodes = flatMorph(this.names, (i, name) => { + const arg = this.arg$.parse(args$1[i]); + if (!arg.extends(this.constraints[i])) throwParseError(writeUnsatisfiedParameterConstraintMessage(name, this.constraints[i].expression, arg.expression)); + return [name, arg]; + }); + if (this.defIsLazy()) { + const def = this.bodyDef(argNodes); + return this.$.parse(def); + } + return this.$.parse(bodyDef, { args: argNodes }); + }); + this.paramDefs = paramDefs; + this.bodyDef = bodyDef; + this.$ = $; + this.arg$ = arg$; + this.hkt = hkt; + this.description = hkt ? new hkt().description ?? `a generic type for ${hkt.constructor.name}` : "a generic type"; + this.baseInstantiation = this(...this.constraints); + } + defIsLazy() { + return this.bodyDef instanceof LazyGenericBody; + } + cacheGetter(name, value$1) { + Object.defineProperty(this, name, { value: value$1 }); + return value$1; + } + get json() { + return this.cacheGetter("json", { + params: this.params.map((param) => param[1].isUnknown() ? param[0] : [param[0], param[1].json]), + body: snapshot(this.bodyDef) + }); + } + get params() { + return this.cacheGetter("params", this.paramDefs.map((param) => typeof param === "string" ? [param, $ark.intrinsic.unknown] : [param[0], this.$.parse(param[1])])); + } + get names() { + return this.cacheGetter("names", this.params.map((e) => e[0])); + } + get constraints() { + return this.cacheGetter("constraints", this.params.map((e) => e[1])); + } + get internal() { + return this; + } + get referencesById() { + return this.baseInstantiation.internal.referencesById; + } + get references() { + return this.baseInstantiation.internal.references; + } +}; +const writeUnsatisfiedParameterConstraintMessage = (name, constraint, arg) => `${name} must be assignable to ${constraint} (was ${arg})`; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/predicate.js +const implementation$21 = implementNode({ + kind: "predicate", + hasAssociatedError: true, + collapsibleKey: "predicate", + keys: { predicate: {} }, + normalize: (schema$1) => typeof schema$1 === "function" ? { predicate: schema$1 } : schema$1, + defaults: { description: (node$1) => `valid according to ${node$1.predicate.name || "an anonymous predicate"}` }, + intersectionIsOpen: true, + intersections: { predicate: () => null } +}); +var PredicateNode = class extends BaseConstraint { + serializedPredicate = registeredReference(this.predicate); + compiledCondition = `${this.serializedPredicate}(data, ctx)`; + compiledNegation = `!${this.compiledCondition}`; + impliedBasis = null; + expression = this.serializedPredicate; + traverseAllows = this.predicate; + errorContext = { + code: "predicate", + description: this.description, + meta: this.meta + }; + compiledErrorContext = compileObjectLiteral(this.errorContext); + traverseApply = (data, ctx) => { + if (!this.predicate(data, ctx.external) && !ctx.hasError()) ctx.errorFromNodeContext(this.errorContext); + }; + compile(js) { + if (js.traversalKind === "Allows") { + js.return(this.compiledCondition); + return; + } + js.if(`${this.compiledNegation} && !ctx.hasError()`, () => js.line(`ctx.errorFromNodeContext(${this.compiledErrorContext})`)); + } + reduceJsonSchema(base, ctx) { + return ctx.fallback.predicate({ + code: "predicate", + base, + predicate: this.predicate + }); + } +}; +const Predicate = { + implementation: implementation$21, + Node: PredicateNode +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/divisor.js +const implementation$20 = implementNode({ + kind: "divisor", + collapsibleKey: "rule", + keys: { rule: { parse: (divisor) => Number.isInteger(divisor) ? divisor : throwParseError(writeNonIntegerDivisorMessage(divisor)) } }, + normalize: (schema$1) => typeof schema$1 === "number" ? { rule: schema$1 } : schema$1, + hasAssociatedError: true, + defaults: { description: (node$1) => node$1.rule === 1 ? "an integer" : node$1.rule === 2 ? "even" : `a multiple of ${node$1.rule}` }, + intersections: { divisor: (l, r, ctx) => ctx.$.node("divisor", { rule: Math.abs(l.rule * r.rule / greatestCommonDivisor(l.rule, r.rule)) }) }, + obviatesBasisDescription: true +}); +var DivisorNode = class extends InternalPrimitiveConstraint { + traverseAllows = (data) => data % this.rule === 0; + compiledCondition = `data % ${this.rule} === 0`; + compiledNegation = `data % ${this.rule} !== 0`; + impliedBasis = $ark.intrinsic.number.internal; + expression = `% ${this.rule}`; + reduceJsonSchema(schema$1) { + schema$1.type = "integer"; + if (this.rule === 1) return schema$1; + schema$1.multipleOf = this.rule; + return schema$1; + } +}; +const Divisor = { + implementation: implementation$20, + Node: DivisorNode +}; +const writeNonIntegerDivisorMessage = (divisor) => `divisor must be an integer (was ${divisor})`; +const greatestCommonDivisor = (l, r) => { + let previous; + let greatestCommonDivisor$1 = l; + let current = r; + while (current !== 0) { + previous = current; + current = greatestCommonDivisor$1 % current; + greatestCommonDivisor$1 = previous; + } + return greatestCommonDivisor$1; +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/range.js +var BaseRange = class extends InternalPrimitiveConstraint { + boundOperandKind = operandKindsByBoundKind[this.kind]; + compiledActual = this.boundOperandKind === "value" ? `data` : this.boundOperandKind === "length" ? `data.length` : `data.valueOf()`; + comparator = compileComparator(this.kind, this.exclusive); + numericLimit = this.rule.valueOf(); + expression = `${this.comparator} ${this.rule}`; + compiledCondition = `${this.compiledActual} ${this.comparator} ${this.numericLimit}`; + compiledNegation = `${this.compiledActual} ${negatedComparators[this.comparator]} ${this.numericLimit}`; + stringLimit = this.boundOperandKind === "date" ? dateLimitToString(this.numericLimit) : `${this.numericLimit}`; + limitKind = this.comparator["0"] === "<" ? "upper" : "lower"; + isStricterThan(r) { + const thisLimitIsStricter = this.limitKind === "upper" ? this.numericLimit < r.numericLimit : this.numericLimit > r.numericLimit; + return thisLimitIsStricter || this.numericLimit === r.numericLimit && this.exclusive === true && !r.exclusive; + } + overlapsRange(r) { + if (this.isStricterThan(r)) return false; + if (this.numericLimit === r.numericLimit && (this.exclusive || r.exclusive)) return false; + return true; + } + overlapIsUnit(r) { + return this.numericLimit === r.numericLimit && !this.exclusive && !r.exclusive; + } +}; +const negatedComparators = { + "<": ">=", + "<=": ">", + ">": "<=", + ">=": "<" +}; +const boundKindPairsByLower = { + min: "max", + minLength: "maxLength", + after: "before" +}; +const parseExclusiveKey = { parse: (flag) => flag || void 0 }; +const createLengthSchemaNormalizer = (kind) => (schema$1) => { + if (typeof schema$1 === "number") return { rule: schema$1 }; + const { exclusive,...normalized } = schema$1; + return exclusive ? { + ...normalized, + rule: kind === "minLength" ? normalized.rule + 1 : normalized.rule - 1 + } : normalized; +}; +const createDateSchemaNormalizer = (kind) => (schema$1) => { + if (typeof schema$1 === "number" || typeof schema$1 === "string" || schema$1 instanceof Date) return { rule: schema$1 }; + const { exclusive,...normalized } = schema$1; + if (!exclusive) return normalized; + const numericLimit = typeof normalized.rule === "number" ? normalized.rule : typeof normalized.rule === "string" ? new Date(normalized.rule).valueOf() : normalized.rule.valueOf(); + return exclusive ? { + ...normalized, + rule: kind === "after" ? numericLimit + 1 : numericLimit - 1 + } : normalized; +}; +const parseDateLimit = (limit) => typeof limit === "string" || typeof limit === "number" ? new Date(limit) : limit; +const writeInvalidLengthBoundMessage = (kind, limit) => `${kind} bound must be a positive integer (was ${limit})`; +const createLengthRuleParser = (kind) => (limit) => { + if (!Number.isInteger(limit) || limit < 0) throwParseError(writeInvalidLengthBoundMessage(kind, limit)); + return limit; +}; +const operandKindsByBoundKind = { + min: "value", + max: "value", + minLength: "length", + maxLength: "length", + after: "date", + before: "date" +}; +const compileComparator = (kind, exclusive) => `${isKeyOf(kind, boundKindPairsByLower) ? ">" : "<"}${exclusive ? "" : "="}`; +const dateLimitToString = (limit) => typeof limit === "string" ? limit : new Date(limit).toLocaleString(); +const writeUnboundableMessage = (root) => `Bounded expression ${root} must be exactly one of number, string, Array, or Date`; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/after.js +const implementation$19 = implementNode({ + kind: "after", + collapsibleKey: "rule", + hasAssociatedError: true, + keys: { rule: { + parse: parseDateLimit, + serialize: (schema$1) => schema$1.toISOString() + } }, + normalize: createDateSchemaNormalizer("after"), + defaults: { + description: (node$1) => `${node$1.collapsibleLimitString} or later`, + actual: describeCollapsibleDate + }, + intersections: { after: (l, r) => l.isStricterThan(r) ? l : r } +}); +var AfterNode = class extends BaseRange { + impliedBasis = $ark.intrinsic.Date.internal; + collapsibleLimitString = describeCollapsibleDate(this.rule); + traverseAllows = (data) => data >= this.rule; + reduceJsonSchema(base, ctx) { + return ctx.fallback.date({ + code: "date", + base, + after: this.rule + }); + } +}; +const After = { + implementation: implementation$19, + Node: AfterNode +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/before.js +const implementation$18 = implementNode({ + kind: "before", + collapsibleKey: "rule", + hasAssociatedError: true, + keys: { rule: { + parse: parseDateLimit, + serialize: (schema$1) => schema$1.toISOString() + } }, + normalize: createDateSchemaNormalizer("before"), + defaults: { + description: (node$1) => `${node$1.collapsibleLimitString} or earlier`, + actual: describeCollapsibleDate + }, + intersections: { + before: (l, r) => l.isStricterThan(r) ? l : r, + after: (before, after, ctx) => before.overlapsRange(after) ? before.overlapIsUnit(after) ? ctx.$.node("unit", { unit: before.rule }) : null : Disjoint.init("range", before, after) + } +}); +var BeforeNode = class extends BaseRange { + collapsibleLimitString = describeCollapsibleDate(this.rule); + traverseAllows = (data) => data <= this.rule; + impliedBasis = $ark.intrinsic.Date.internal; + reduceJsonSchema(base, ctx) { + return ctx.fallback.date({ + code: "date", + base, + before: this.rule + }); + } +}; +const Before = { + implementation: implementation$18, + Node: BeforeNode +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/exactLength.js +const implementation$17 = implementNode({ + kind: "exactLength", + collapsibleKey: "rule", + keys: { rule: { parse: createLengthRuleParser("exactLength") } }, + normalize: (schema$1) => typeof schema$1 === "number" ? { rule: schema$1 } : schema$1, + hasAssociatedError: true, + defaults: { + description: (node$1) => `exactly length ${node$1.rule}`, + actual: (data) => `${data.length}` + }, + intersections: { + exactLength: (l, r, ctx) => Disjoint.init("unit", ctx.$.node("unit", { unit: l.rule }), ctx.$.node("unit", { unit: r.rule }), { path: ["length"] }), + minLength: (exactLength, minLength) => exactLength.rule >= minLength.rule ? exactLength : Disjoint.init("range", exactLength, minLength), + maxLength: (exactLength, maxLength) => exactLength.rule <= maxLength.rule ? exactLength : Disjoint.init("range", exactLength, maxLength) + } +}); +var ExactLengthNode = class extends InternalPrimitiveConstraint { + traverseAllows = (data) => data.length === this.rule; + compiledCondition = `data.length === ${this.rule}`; + compiledNegation = `data.length !== ${this.rule}`; + impliedBasis = $ark.intrinsic.lengthBoundable.internal; + expression = `== ${this.rule}`; + reduceJsonSchema(schema$1) { + switch (schema$1.type) { + case "string": + schema$1.minLength = this.rule; + schema$1.maxLength = this.rule; + return schema$1; + case "array": + schema$1.minItems = this.rule; + schema$1.maxItems = this.rule; + return schema$1; + default: return ToJsonSchema.throwInternalOperandError("exactLength", schema$1); + } + } +}; +const ExactLength = { + implementation: implementation$17, + Node: ExactLengthNode +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/max.js +const implementation$16 = implementNode({ + kind: "max", + collapsibleKey: "rule", + hasAssociatedError: true, + keys: { + rule: {}, + exclusive: parseExclusiveKey + }, + normalize: (schema$1) => typeof schema$1 === "number" ? { rule: schema$1 } : schema$1, + defaults: { description: (node$1) => { + if (node$1.rule === 0) return node$1.exclusive ? "negative" : "non-positive"; + return `${node$1.exclusive ? "less than" : "at most"} ${node$1.rule}`; + } }, + intersections: { + max: (l, r) => l.isStricterThan(r) ? l : r, + min: (max, min, ctx) => max.overlapsRange(min) ? max.overlapIsUnit(min) ? ctx.$.node("unit", { unit: max.rule }) : null : Disjoint.init("range", max, min) + }, + obviatesBasisDescription: true +}); +var MaxNode = class extends BaseRange { + impliedBasis = $ark.intrinsic.number.internal; + traverseAllows = this.exclusive ? (data) => data < this.rule : (data) => data <= this.rule; + reduceJsonSchema(schema$1) { + if (this.exclusive) schema$1.exclusiveMaximum = this.rule; + else schema$1.maximum = this.rule; + return schema$1; + } +}; +const Max = { + implementation: implementation$16, + Node: MaxNode +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/maxLength.js +const implementation$15 = implementNode({ + kind: "maxLength", + collapsibleKey: "rule", + hasAssociatedError: true, + keys: { rule: { parse: createLengthRuleParser("maxLength") } }, + reduce: (inner, $) => inner.rule === 0 ? $.node("exactLength", inner) : void 0, + normalize: createLengthSchemaNormalizer("maxLength"), + defaults: { + description: (node$1) => `at most length ${node$1.rule}`, + actual: (data) => `${data.length}` + }, + intersections: { + maxLength: (l, r) => l.isStricterThan(r) ? l : r, + minLength: (max, min, ctx) => max.overlapsRange(min) ? max.overlapIsUnit(min) ? ctx.$.node("exactLength", { rule: max.rule }) : null : Disjoint.init("range", max, min) + } +}); +var MaxLengthNode = class extends BaseRange { + impliedBasis = $ark.intrinsic.lengthBoundable.internal; + traverseAllows = (data) => data.length <= this.rule; + reduceJsonSchema(schema$1) { + switch (schema$1.type) { + case "string": + schema$1.maxLength = this.rule; + return schema$1; + case "array": + schema$1.maxItems = this.rule; + return schema$1; + default: return ToJsonSchema.throwInternalOperandError("maxLength", schema$1); + } + } +}; +const MaxLength = { + implementation: implementation$15, + Node: MaxLengthNode +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/min.js +const implementation$14 = implementNode({ + kind: "min", + collapsibleKey: "rule", + hasAssociatedError: true, + keys: { + rule: {}, + exclusive: parseExclusiveKey + }, + normalize: (schema$1) => typeof schema$1 === "number" ? { rule: schema$1 } : schema$1, + defaults: { description: (node$1) => { + if (node$1.rule === 0) return node$1.exclusive ? "positive" : "non-negative"; + return `${node$1.exclusive ? "more than" : "at least"} ${node$1.rule}`; + } }, + intersections: { min: (l, r) => l.isStricterThan(r) ? l : r }, + obviatesBasisDescription: true +}); +var MinNode = class extends BaseRange { + impliedBasis = $ark.intrinsic.number.internal; + traverseAllows = this.exclusive ? (data) => data > this.rule : (data) => data >= this.rule; + reduceJsonSchema(schema$1) { + if (this.exclusive) schema$1.exclusiveMinimum = this.rule; + else schema$1.minimum = this.rule; + return schema$1; + } +}; +const Min = { + implementation: implementation$14, + Node: MinNode +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/minLength.js +const implementation$13 = implementNode({ + kind: "minLength", + collapsibleKey: "rule", + hasAssociatedError: true, + keys: { rule: { parse: createLengthRuleParser("minLength") } }, + reduce: (inner) => inner.rule === 0 ? $ark.intrinsic.unknown : void 0, + normalize: createLengthSchemaNormalizer("minLength"), + defaults: { + description: (node$1) => node$1.rule === 1 ? "non-empty" : `at least length ${node$1.rule}`, + actual: (data) => data.length === 0 ? "" : `${data.length}` + }, + intersections: { minLength: (l, r) => l.isStricterThan(r) ? l : r } +}); +var MinLengthNode = class extends BaseRange { + impliedBasis = $ark.intrinsic.lengthBoundable.internal; + traverseAllows = (data) => data.length >= this.rule; + reduceJsonSchema(schema$1) { + switch (schema$1.type) { + case "string": + schema$1.minLength = this.rule; + return schema$1; + case "array": + schema$1.minItems = this.rule; + return schema$1; + default: return ToJsonSchema.throwInternalOperandError("minLength", schema$1); + } + } +}; +const MinLength = { + implementation: implementation$13, + Node: MinLengthNode +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/kinds.js +const boundImplementationsByKind = { + min: Min.implementation, + max: Max.implementation, + minLength: MinLength.implementation, + maxLength: MaxLength.implementation, + exactLength: ExactLength.implementation, + after: After.implementation, + before: Before.implementation +}; +const boundClassesByKind = { + min: Min.Node, + max: Max.Node, + minLength: MinLength.Node, + maxLength: MaxLength.Node, + exactLength: ExactLength.Node, + after: After.Node, + before: Before.Node +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/pattern.js +const implementation$12 = implementNode({ + kind: "pattern", + collapsibleKey: "rule", + keys: { + rule: {}, + flags: {} + }, + normalize: (schema$1) => typeof schema$1 === "string" ? { rule: schema$1 } : schema$1 instanceof RegExp ? schema$1.flags ? { + rule: schema$1.source, + flags: schema$1.flags + } : { rule: schema$1.source } : schema$1, + obviatesBasisDescription: true, + obviatesBasisExpression: true, + hasAssociatedError: true, + intersectionIsOpen: true, + defaults: { description: (node$1) => `matched by ${node$1.rule}` }, + intersections: { pattern: () => null } +}); +var PatternNode = class extends InternalPrimitiveConstraint { + instance = new RegExp(this.rule, this.flags); + expression = `${this.instance}`; + traverseAllows = this.instance.test.bind(this.instance); + compiledCondition = `${this.expression}.test(data)`; + compiledNegation = `!${this.compiledCondition}`; + impliedBasis = $ark.intrinsic.string.internal; + reduceJsonSchema(base, ctx) { + if (base.pattern) return ctx.fallback.patternIntersection({ + code: "patternIntersection", + base, + pattern: this.rule + }); + base.pattern = this.rule; + return base; + } +}; +const Pattern = { + implementation: implementation$12, + Node: PatternNode +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/parse.js +const schemaKindOf = (schema$1, allowedKinds) => { + const kind = discriminateRootKind(schema$1); + if (allowedKinds && !allowedKinds.includes(kind)) return throwParseError(`Root of kind ${kind} should be one of ${allowedKinds}`); + return kind; +}; +const discriminateRootKind = (schema$1) => { + if (hasArkKind(schema$1, "root")) return schema$1.kind; + if (typeof schema$1 === "string") return schema$1[0] === "$" ? "alias" : schema$1 in domainDescriptions ? "domain" : "proto"; + if (typeof schema$1 === "function") return "proto"; + if (typeof schema$1 !== "object" || schema$1 === null) return throwParseError(writeInvalidSchemaMessage(schema$1)); + if ("morphs" in schema$1) return "morph"; + if ("branches" in schema$1 || isArray(schema$1)) return "union"; + if ("unit" in schema$1) return "unit"; + if ("reference" in schema$1) return "alias"; + const schemaKeys = Object.keys(schema$1); + if (schemaKeys.length === 0 || schemaKeys.some((k) => k in constraintKeys)) return "intersection"; + if ("proto" in schema$1) return "proto"; + if ("domain" in schema$1) return "domain"; + return throwParseError(writeInvalidSchemaMessage(schema$1)); +}; +const writeInvalidSchemaMessage = (schema$1) => `${printable(schema$1)} is not a valid type schema`; +const nodeCountsByPrefix = {}; +const serializeListableChild = (listableNode) => isArray(listableNode) ? listableNode.map((node$1) => node$1.collapsibleJson) : listableNode.collapsibleJson; +const nodesByRegisteredId = {}; +$ark.nodesByRegisteredId = nodesByRegisteredId; +const registerNodeId = (prefix) => { + nodeCountsByPrefix[prefix] ??= 0; + return `${prefix}${++nodeCountsByPrefix[prefix]}`; +}; +const parseNode = (ctx) => { + const impl = nodeImplementationsByKind[ctx.kind]; + const configuredSchema = impl.applyConfig?.(ctx.def, ctx.$.resolvedConfig) ?? ctx.def; + const inner = {}; + const { meta: metaSchema,...innerSchema } = configuredSchema; + const meta = metaSchema === void 0 ? {} : typeof metaSchema === "string" ? { description: metaSchema } : metaSchema; + const innerSchemaEntries = entriesOf(innerSchema).sort(([lKey], [rKey]) => isNodeKind(lKey) ? isNodeKind(rKey) ? precedenceOfKind(lKey) - precedenceOfKind(rKey) : 1 : isNodeKind(rKey) ? -1 : lKey < rKey ? -1 : 1).filter(([k, v]) => { + if (k.startsWith("meta.")) { + const metaKey = k.slice(5); + meta[metaKey] = v; + return false; + } + return true; + }); + for (const entry of innerSchemaEntries) { + const k = entry[0]; + const keyImpl = impl.keys[k]; + if (!keyImpl) return throwParseError(`Key ${k} is not valid on ${ctx.kind} schema`); + const v = keyImpl.parse ? keyImpl.parse(entry[1], ctx) : entry[1]; + if (v !== unset && (v !== void 0 || keyImpl.preserveUndefined)) inner[k] = v; + } + if (impl.reduce && !ctx.prereduced) { + const reduced = impl.reduce(inner, ctx.$); + if (reduced) { + if (reduced instanceof Disjoint) return reduced.throw(); + return withMeta(reduced, meta); + } + } + const node$1 = createNode({ + id: ctx.id, + kind: ctx.kind, + inner, + meta, + $: ctx.$ + }); + return node$1; +}; +const createNode = ({ id, kind, inner, meta, $, ignoreCache }) => { + const impl = nodeImplementationsByKind[kind]; + const innerEntries = entriesOf(inner); + const children = []; + let innerJson = {}; + for (const [k, v] of innerEntries) { + const keyImpl = impl.keys[k]; + const serialize = keyImpl.serialize ?? (keyImpl.child ? serializeListableChild : defaultValueSerializer); + innerJson[k] = serialize(v); + if (keyImpl.child === true) { + const listableNode = v; + if (isArray(listableNode)) children.push(...listableNode); + else children.push(listableNode); + } else if (typeof keyImpl.child === "function") children.push(...keyImpl.child(v)); + } + if (impl.finalizeInnerJson) innerJson = impl.finalizeInnerJson(innerJson); + let json$2 = { ...innerJson }; + let metaJson = {}; + if (!isEmptyObject(meta)) { + metaJson = flatMorph(meta, (k, v) => [k, k === "examples" ? v : defaultValueSerializer(v)]); + json$2.meta = possiblyCollapse(metaJson, "description", true); + } + innerJson = possiblyCollapse(innerJson, impl.collapsibleKey, false); + const innerHash = JSON.stringify({ + kind, + ...innerJson + }); + json$2 = possiblyCollapse(json$2, impl.collapsibleKey, false); + const collapsibleJson = possiblyCollapse(json$2, impl.collapsibleKey, true); + const hash = JSON.stringify({ + kind, + ...json$2 + }); + if ($.nodesByHash[hash] && !ignoreCache) return $.nodesByHash[hash]; + const attachments = { + id, + kind, + impl, + inner, + innerEntries, + innerJson, + innerHash, + meta, + metaJson, + json: json$2, + hash, + collapsibleJson, + children + }; + if (kind !== "intersection") { + for (const k in inner) if (k !== "in" && k !== "out") attachments[k] = inner[k]; + } + const node$1 = new nodeClassesByKind[kind](attachments, $); + return $.nodesByHash[hash] = node$1; +}; +const withId = (node$1, id) => { + if (node$1.id === id) return node$1; + if (isNode(nodesByRegisteredId[id])) throwInternalError(`Unexpected attempt to overwrite node id ${id}`); + return createNode({ + id, + kind: node$1.kind, + inner: node$1.inner, + meta: node$1.meta, + $: node$1.$, + ignoreCache: true + }); +}; +const withMeta = (node$1, meta, id) => { + if (id && isNode(nodesByRegisteredId[id])) throwInternalError(`Unexpected attempt to overwrite node id ${id}`); + return createNode({ + id: id ?? registerNodeId(meta.alias ?? node$1.kind), + kind: node$1.kind, + inner: node$1.inner, + meta, + $: node$1.$ + }); +}; +const possiblyCollapse = (json$2, toKey, allowPrimitive) => { + const collapsibleKeys = Object.keys(json$2); + if (collapsibleKeys.length === 1 && collapsibleKeys[0] === toKey) { + const collapsed = json$2[toKey]; + if (allowPrimitive) return collapsed; + if (hasDomain(collapsed, "object") && (Object.keys(collapsed).length === 1 || Array.isArray(collapsed))) return collapsed; + } + return json$2; +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/structure/prop.js +const intersectProps = (l, r, ctx) => { + if (l.key !== r.key) return null; + const key = l.key; + let value$1 = intersectOrPipeNodes(l.value, r.value, ctx); + const kind = l.required || r.required ? "required" : "optional"; + if (value$1 instanceof Disjoint) if (kind === "optional") value$1 = $ark.intrinsic.never.internal; + else return value$1.withPrefixKey(l.key, l.required && r.required ? "required" : "optional"); + if (kind === "required") return ctx.$.node("required", { + key, + value: value$1 + }); + const defaultIntersection = l.hasDefault() ? r.hasDefault() ? l.default === r.default ? l.default : throwParseError(writeDefaultIntersectionMessage(l.default, r.default)) : l.default : r.hasDefault() ? r.default : unset; + return ctx.$.node("optional", { + key, + value: value$1, + default: defaultIntersection + }); +}; +var BaseProp = class extends BaseConstraint { + required = this.kind === "required"; + optional = this.kind === "optional"; + impliedBasis = $ark.intrinsic.object.internal; + serializedKey = compileSerializedValue(this.key); + compiledKey = typeof this.key === "string" ? this.key : this.serializedKey; + flatRefs = append(this.value.flatRefs.map((ref) => flatRef([this.key, ...ref.path], ref.node)), flatRef([this.key], this.value)); + _transform(mapper, ctx) { + ctx.path.push(this.key); + const result = super._transform(mapper, ctx); + ctx.path.pop(); + return result; + } + hasDefault() { + return "default" in this.inner; + } + traverseAllows = (data, ctx) => { + if (this.key in data) return traverseKey(this.key, () => this.value.traverseAllows(data[this.key], ctx), ctx); + return this.optional; + }; + traverseApply = (data, ctx) => { + if (this.key in data) traverseKey(this.key, () => this.value.traverseApply(data[this.key], ctx), ctx); + else if (this.hasKind("required")) ctx.errorFromNodeContext(this.errorContext); + }; + compile(js) { + js.if(`${this.serializedKey} in data`, () => js.traverseKey(this.serializedKey, `data${js.prop(this.key)}`, this.value)); + if (this.hasKind("required")) js.else(() => js.traversalKind === "Apply" ? js.line(`ctx.errorFromNodeContext(${this.compiledErrorContext})`) : js.return(false)); + if (js.traversalKind === "Allows") js.return(true); + } +}; +const writeDefaultIntersectionMessage = (lValue, rValue) => `Invalid intersection of default values ${printable(lValue)} & ${printable(rValue)}`; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/structure/optional.js +const implementation$11 = implementNode({ + kind: "optional", + hasAssociatedError: false, + intersectionIsOpen: true, + keys: { + key: {}, + value: { + child: true, + parse: (schema$1, ctx) => ctx.$.parseSchema(schema$1) + }, + default: { preserveUndefined: true } + }, + normalize: (schema$1) => schema$1, + reduce: (inner, $) => { + if ($.resolvedConfig.exactOptionalPropertyTypes === false) { + if (!inner.value.allows(void 0)) return $.node("optional", { + ...inner, + value: inner.value.or(intrinsic.undefined) + }, { prereduced: true }); + } + }, + defaults: { description: (node$1) => `${node$1.compiledKey}?: ${node$1.value.description}` }, + intersections: { optional: intersectProps } +}); +var OptionalNode = class extends BaseProp { + constructor(...args$1) { + super(...args$1); + if ("default" in this.inner) assertDefaultValueAssignability(this.value, this.inner.default, this.key); + } + get outProp() { + if (!this.hasDefault()) return this; + const { default: defaultValue,...requiredInner } = this.inner; + return this.cacheGetter("outProp", this.$.node("required", requiredInner, { prereduced: true })); + } + expression = this.hasDefault() ? `${this.compiledKey}: ${this.value.expression} = ${printable(this.inner.default)}` : `${this.compiledKey}?: ${this.value.expression}`; + defaultValueMorph = getDefaultableMorph(this); + defaultValueMorphRef = this.defaultValueMorph && registeredReference(this.defaultValueMorph); +}; +const Optional = { + implementation: implementation$11, + Node: OptionalNode +}; +const defaultableMorphCache = {}; +const getDefaultableMorph = (node$1) => { + if (!node$1.hasDefault()) return; + const cacheKey = `{${node$1.compiledKey}: ${node$1.value.id} = ${defaultValueSerializer(node$1.default)}}`; + return defaultableMorphCache[cacheKey] ??= computeDefaultValueMorph(node$1.key, node$1.value, node$1.default); +}; +const computeDefaultValueMorph = (key, value$1, defaultInput) => { + if (typeof defaultInput === "function") return value$1.includesTransform ? (data, ctx) => { + traverseKey(key, () => value$1(data[key] = defaultInput(), ctx), ctx); + return data; + } : (data) => { + data[key] = defaultInput(); + return data; + }; + const precomputedMorphedDefault = value$1.includesTransform ? value$1.assert(defaultInput) : defaultInput; + return hasDomain(precomputedMorphedDefault, "object") ? (data, ctx) => { + traverseKey(key, () => value$1(data[key] = defaultInput, ctx), ctx); + return data; + } : (data) => { + data[key] = precomputedMorphedDefault; + return data; + }; +}; +const assertDefaultValueAssignability = (node$1, value$1, key) => { + const wrapped = isThunk(value$1); + if (hasDomain(value$1, "object") && !wrapped) throwParseError(writeNonPrimitiveNonFunctionDefaultValueMessage(key)); + const out = node$1.in(wrapped ? value$1() : value$1); + if (out instanceof ArkErrors) { + if (key === null) throwParseError(`Default ${out.summary}`); + const atPath = out.transform((e) => e.transform((input) => ({ + ...input, + prefixPath: [key] + }))); + throwParseError(`Default for ${atPath.summary}`); + } + return value$1; +}; +const writeNonPrimitiveNonFunctionDefaultValueMessage = (key) => { + const keyDescription = key === null ? "" : typeof key === "number" ? `for value at [${key}] ` : `for ${compileSerializedValue(key)} `; + return `Non-primitive default ${keyDescription}must be specified as a function like () => ({my: 'object'})`; +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/roots/root.js +var BaseRoot = class extends BaseNode { + constructor(attachments, $) { + super(attachments, $); + Object.defineProperty(this, arkKind, { + value: "root", + enumerable: false + }); + } + get internal() { + return this; + } + get "~standard"() { + return { + vendor: "arktype", + version: 1, + validate: (input) => { + const out = this(input); + if (out instanceof ArkErrors) return out; + return { value: out }; + } + }; + } + as() { + return this; + } + brand(name) { + if (name === "") return throwParseError(emptyBrandNameMessage); + return this; + } + readonly() { + return this; + } + branches = this.hasKind("union") ? this.inner.branches : [this]; + distribute(mapBranch, reduceMapped) { + const mappedBranches = this.branches.map(mapBranch); + return reduceMapped?.(mappedBranches) ?? mappedBranches; + } + get shortDescription() { + return this.meta.description ?? this.defaultShortDescription; + } + toJsonSchema(opts = {}) { + const ctx = mergeToJsonSchemaConfigs(this.$.resolvedConfig.toJsonSchema, opts); + ctx.useRefs ||= this.isCyclic; + const schema$1 = typeof ctx.dialect === "string" ? { $schema: ctx.dialect } : {}; + Object.assign(schema$1, this.toJsonSchemaRecurse(ctx)); + if (ctx.useRefs) schema$1.$defs = flatMorph(this.references, (i, ref) => ref.isRoot() && !ref.alwaysExpandJsonSchema ? [ref.id, ref.toResolvedJsonSchema(ctx)] : []); + return schema$1; + } + toJsonSchemaRecurse(ctx) { + if (ctx.useRefs && !this.alwaysExpandJsonSchema) return { $ref: `#/$defs/${this.id}` }; + return this.toResolvedJsonSchema(ctx); + } + get alwaysExpandJsonSchema() { + return this.isBasis() || this.kind === "alias" || this.hasKind("union") && this.isBoolean; + } + toResolvedJsonSchema(ctx) { + const result = this.innerToJsonSchema(ctx); + return Object.assign(result, this.metaJson); + } + intersect(r) { + const rNode = this.$.parseDefinition(r); + const result = this.rawIntersect(rNode); + if (result instanceof Disjoint) return result; + return this.$.finalize(result); + } + rawIntersect(r) { + return intersectNodesRoot(this, r, this.$); + } + toNeverIfDisjoint() { + return this; + } + and(r) { + const result = this.intersect(r); + return result instanceof Disjoint ? result.throw() : result; + } + rawAnd(r) { + const result = this.rawIntersect(r); + return result instanceof Disjoint ? result.throw() : result; + } + or(r) { + const rNode = this.$.parseDefinition(r); + return this.$.finalize(this.rawOr(rNode)); + } + rawOr(r) { + const branches = [...this.branches, ...r.branches]; + return this.$.node("union", branches); + } + map(flatMapEntry) { + return this.$.schema(this.applyStructuralOperation("map", [flatMapEntry])); + } + pick(...keys) { + return this.$.schema(this.applyStructuralOperation("pick", keys)); + } + omit(...keys) { + return this.$.schema(this.applyStructuralOperation("omit", keys)); + } + required() { + return this.$.schema(this.applyStructuralOperation("required", [])); + } + partial() { + return this.$.schema(this.applyStructuralOperation("partial", [])); + } + _keyof; + keyof() { + if (this._keyof) return this._keyof; + const result = this.applyStructuralOperation("keyof", []).reduce((result$1, branch) => result$1.intersect(branch).toNeverIfDisjoint(), $ark.intrinsic.unknown.internal); + if (result.branches.length === 0) throwParseError(writeUnsatisfiableExpressionError(`keyof ${this.expression}`)); + return this._keyof = this.$.finalize(result); + } + get props() { + if (this.branches.length !== 1) return throwParseError(writeLiteralUnionEntriesMessage(this.expression)); + return [...this.applyStructuralOperation("props", [])[0]]; + } + merge(r) { + const rNode = this.$.parseDefinition(r); + return this.$.schema(rNode.distribute((branch) => this.applyStructuralOperation("merge", [structureOf(branch) ?? throwParseError(writeNonStructuralOperandMessage("merge", branch.expression))]))); + } + applyStructuralOperation(operation, args$1) { + return this.distribute((branch) => { + if (branch.equals($ark.intrinsic.object) && operation !== "merge") return branch; + const structure = structureOf(branch); + if (!structure) throwParseError(writeNonStructuralOperandMessage(operation, branch.expression)); + if (operation === "keyof") return structure.keyof(); + if (operation === "get") return structure.get(...args$1); + if (operation === "props") return structure.props; + const structuralMethodName = operation === "required" ? "require" : operation === "partial" ? "optionalize" : operation; + return this.$.node("intersection", { + ...branch.inner, + structure: structure[structuralMethodName](...args$1) + }); + }); + } + get(...path$1) { + if (path$1[0] === void 0) return this; + return this.$.schema(this.applyStructuralOperation("get", path$1)); + } + extract(r) { + const rNode = this.$.parseDefinition(r); + return this.$.schema(this.branches.filter((branch) => branch.extends(rNode))); + } + exclude(r) { + const rNode = this.$.parseDefinition(r); + return this.$.schema(this.branches.filter((branch) => !branch.extends(rNode))); + } + array() { + return this.$.schema(this.isUnknown() ? { proto: Array } : { + proto: Array, + sequence: this + }, { prereduced: true }); + } + overlaps(r) { + const intersection = this.intersect(r); + return !(intersection instanceof Disjoint); + } + extends(r) { + const intersection = this.intersect(r); + return !(intersection instanceof Disjoint) && this.equals(intersection); + } + ifExtends(r) { + return this.extends(r) ? this : void 0; + } + subsumes(r) { + const rNode = this.$.parseDefinition(r); + return rNode.extends(this); + } + configure(meta, selector = "shallow") { + return this.configureReferences(meta, selector); + } + describe(description, selector = "shallow") { + return this.configure({ description }, selector); + } + optional() { + return [this, "?"]; + } + default(thunkableValue) { + assertDefaultValueAssignability(this, thunkableValue, null); + return [ + this, + "=", + thunkableValue + ]; + } + from(input) { + return this.assert(input); + } + _pipe(...morphs) { + const result = morphs.reduce((acc, morph) => acc.rawPipeOnce(morph), this); + return this.$.finalize(result); + } + tryPipe(...morphs) { + const result = morphs.reduce((acc, morph) => acc.rawPipeOnce(hasArkKind(morph, "root") ? morph : (In, ctx) => { + try { + return morph(In, ctx); + } catch (e) { + return ctx.error({ + code: "predicate", + predicate: morph, + actual: `aborted due to error:\n ${e}\n` + }); + } + }), this); + return this.$.finalize(result); + } + pipe = Object.assign(this._pipe.bind(this), { try: this.tryPipe.bind(this) }); + to(def) { + return this.$.finalize(this.toNode(this.$.parseDefinition(def))); + } + toNode(root) { + const result = pipeNodesRoot(this, root, this.$); + if (result instanceof Disjoint) return result.throw(); + return result; + } + rawPipeOnce(morph) { + if (hasArkKind(morph, "root")) return this.toNode(morph); + return this.distribute((branch) => branch.hasKind("morph") ? this.$.node("morph", { + in: branch.inner.in, + morphs: [...branch.morphs, morph] + }) : this.$.node("morph", { + in: branch, + morphs: [morph] + }), this.$.parseSchema); + } + narrow(predicate) { + return this.constrainOut("predicate", predicate); + } + constrain(kind, schema$1) { + return this._constrain("root", kind, schema$1); + } + constrainIn(kind, schema$1) { + return this._constrain("in", kind, schema$1); + } + constrainOut(kind, schema$1) { + return this._constrain("out", kind, schema$1); + } + _constrain(io, kind, schema$1) { + const constraint = this.$.node(kind, schema$1); + if (constraint.isRoot()) return constraint.isUnknown() ? this : throwInternalError(`Unexpected constraint node ${constraint}`); + const operand = io === "root" ? this : this[io]; + if (operand.hasKind("morph") || constraint.impliedBasis && !operand.extends(constraint.impliedBasis)) return throwInvalidOperandError(kind, constraint.impliedBasis, this); + const partialIntersection = this.$.node("intersection", { [constraint.kind]: constraint }); + const result = io === "out" ? pipeNodesRoot(this, partialIntersection, this.$) : intersectNodesRoot(this, partialIntersection, this.$); + if (result instanceof Disjoint) result.throw(); + return this.$.finalize(result); + } + onUndeclaredKey(cfg) { + const rule = typeof cfg === "string" ? cfg : cfg.rule; + const deep = typeof cfg === "string" ? false : cfg.deep; + return this.$.finalize(this.transform((kind, inner) => kind === "structure" ? rule === "ignore" ? omit(inner, { undeclared: 1 }) : { + ...inner, + undeclared: rule + } : inner, deep ? void 0 : { shouldTransform: (node$1) => !includes(structuralKinds, node$1.kind) })); + } + hasEqualMorphs(r) { + if (!this.includesTransform && !r.includesTransform) return true; + if (!arrayEquals(this.shallowMorphs, r.shallowMorphs)) return false; + if (!arrayEquals(this.flatMorphs, r.flatMorphs, { isEqual: (l, r$1) => l.propString === r$1.propString && (l.node.hasKind("morph") && r$1.node.hasKind("morph") ? l.node.hasEqualMorphs(r$1.node) : l.node.hasKind("intersection") && r$1.node.hasKind("intersection") ? l.node.structure?.structuralMorphRef === r$1.node.structure?.structuralMorphRef : false) })) return false; + return true; + } + onDeepUndeclaredKey(behavior) { + return this.onUndeclaredKey({ + rule: behavior, + deep: true + }); + } + filter(predicate) { + return this.constrainIn("predicate", predicate); + } + divisibleBy(schema$1) { + return this.constrain("divisor", schema$1); + } + matching(schema$1) { + return this.constrain("pattern", schema$1); + } + atLeast(schema$1) { + return this.constrain("min", schema$1); + } + atMost(schema$1) { + return this.constrain("max", schema$1); + } + moreThan(schema$1) { + return this.constrain("min", exclusivizeRangeSchema(schema$1)); + } + lessThan(schema$1) { + return this.constrain("max", exclusivizeRangeSchema(schema$1)); + } + atLeastLength(schema$1) { + return this.constrain("minLength", schema$1); + } + atMostLength(schema$1) { + return this.constrain("maxLength", schema$1); + } + moreThanLength(schema$1) { + return this.constrain("minLength", exclusivizeRangeSchema(schema$1)); + } + lessThanLength(schema$1) { + return this.constrain("maxLength", exclusivizeRangeSchema(schema$1)); + } + exactlyLength(schema$1) { + return this.constrain("exactLength", schema$1); + } + atOrAfter(schema$1) { + return this.constrain("after", schema$1); + } + atOrBefore(schema$1) { + return this.constrain("before", schema$1); + } + laterThan(schema$1) { + return this.constrain("after", exclusivizeRangeSchema(schema$1)); + } + earlierThan(schema$1) { + return this.constrain("before", exclusivizeRangeSchema(schema$1)); + } +}; +const emptyBrandNameMessage = `Expected a non-empty brand name after #`; +const exclusivizeRangeSchema = (schema$1) => typeof schema$1 === "object" && !(schema$1 instanceof Date) ? { + ...schema$1, + exclusive: true +} : { + rule: schema$1, + exclusive: true +}; +const typeOrTermExtends = (t, base) => hasArkKind(base, "root") ? hasArkKind(t, "root") ? t.extends(base) : base.allows(t) : hasArkKind(t, "root") ? t.hasUnit(base) : base === t; +const structureOf = (branch) => { + if (branch.hasKind("morph")) return null; + if (branch.hasKind("intersection")) return branch.inner.structure ?? (branch.basis?.domain === "object" ? branch.$.bindReference($ark.intrinsic.emptyStructure) : null); + if (branch.isBasis() && branch.domain === "object") return branch.$.bindReference($ark.intrinsic.emptyStructure); + return null; +}; +const writeLiteralUnionEntriesMessage = (expression) => `Props cannot be extracted from a union. Use .distribute to extract props from each branch instead. Received: +${expression}`; +const writeNonStructuralOperandMessage = (operation, operand) => `${operation} operand must be an object (was ${operand})`; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/roots/utils.js +const defineRightwardIntersections = (kind, implementation$22) => flatMorph(schemaKindsRightOf(kind), (i, kind$1) => [kind$1, implementation$22]); + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/roots/alias.js +const normalizeAliasSchema = (schema$1) => typeof schema$1 === "string" ? { reference: schema$1 } : schema$1; +const neverIfDisjoint = (result) => result instanceof Disjoint ? $ark.intrinsic.never.internal : result; +const implementation$10 = implementNode({ + kind: "alias", + hasAssociatedError: false, + collapsibleKey: "reference", + keys: { + reference: { serialize: (s) => s.startsWith("$") ? s : `$ark.${s}` }, + resolve: {} + }, + normalize: normalizeAliasSchema, + defaults: { description: (node$1) => node$1.reference }, + intersections: { + alias: (l, r, ctx) => ctx.$.lazilyResolve(() => neverIfDisjoint(intersectOrPipeNodes(l.resolution, r.resolution, ctx)), `${l.reference}${ctx.pipe ? "=>" : "&"}${r.reference}`), + ...defineRightwardIntersections("alias", (l, r, ctx) => { + if (r.isUnknown()) return l; + if (r.isNever()) return r; + if (r.isBasis() && !r.overlaps($ark.intrinsic.object)) return Disjoint.init("assignability", $ark.intrinsic.object, r); + return ctx.$.lazilyResolve(() => neverIfDisjoint(intersectOrPipeNodes(l.resolution, r, ctx)), `${l.reference}${ctx.pipe ? "=>" : "&"}${r.id}`); + }) + } +}); +var AliasNode = class extends BaseRoot { + expression = this.reference; + structure = void 0; + get resolution() { + const result = this._resolve(); + return nodesByRegisteredId[this.id] = result; + } + _resolve() { + if (this.resolve) return this.resolve(); + if (this.reference[0] === "$") return this.$.resolveRoot(this.reference.slice(1)); + const id = this.reference; + let resolution = nodesByRegisteredId[id]; + const seen = []; + while (hasArkKind(resolution, "context")) { + if (seen.includes(resolution.id)) return throwParseError(writeShallowCycleErrorMessage(resolution.id, seen)); + seen.push(resolution.id); + resolution = nodesByRegisteredId[resolution.id]; + } + if (!hasArkKind(resolution, "root")) return throwInternalError(`Unexpected resolution for reference ${this.reference} +Seen: [${seen.join("->")}] +Resolution: ${printable(resolution)}`); + return resolution; + } + get resolutionId() { + if (this.reference.includes("&") || this.reference.includes("=>")) return this.resolution.id; + if (this.reference[0] !== "$") return this.reference; + const alias = this.reference.slice(1); + const resolution = this.$.resolutions[alias]; + if (typeof resolution === "string") return resolution; + if (hasArkKind(resolution, "root")) return resolution.id; + return throwInternalError(`Unexpected resolution for reference ${this.reference}: ${printable(resolution)}`); + } + get defaultShortDescription() { + return domainDescriptions.object; + } + innerToJsonSchema(ctx) { + return this.resolution.toJsonSchemaRecurse(ctx); + } + traverseAllows = (data, ctx) => { + const seen = ctx.seen[this.reference]; + if (seen?.includes(data)) return true; + ctx.seen[this.reference] = append(seen, data); + return this.resolution.traverseAllows(data, ctx); + }; + traverseApply = (data, ctx) => { + const seen = ctx.seen[this.reference]; + if (seen?.includes(data)) return; + ctx.seen[this.reference] = append(seen, data); + this.resolution.traverseApply(data, ctx); + }; + compile(js) { + const id = this.resolutionId; + js.if(`ctx.seen.${id} && ctx.seen.${id}.includes(data)`, () => js.return(true)); + js.if(`!ctx.seen.${id}`, () => js.line(`ctx.seen.${id} = []`)); + js.line(`ctx.seen.${id}.push(data)`); + js.return(js.invoke(id)); + } +}; +const writeShallowCycleErrorMessage = (name, seen) => `Alias '${name}' has a shallow resolution cycle: ${[...seen, name].join("->")}`; +const Alias = { + implementation: implementation$10, + Node: AliasNode +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/roots/basis.js +var InternalBasis = class extends BaseRoot { + traverseApply = (data, ctx) => { + if (!this.traverseAllows(data, ctx)) ctx.errorFromNodeContext(this.errorContext); + }; + get errorContext() { + return { + code: this.kind, + description: this.description, + meta: this.meta, + ...this.inner + }; + } + get compiledErrorContext() { + return compileObjectLiteral(this.errorContext); + } + compile(js) { + if (js.traversalKind === "Allows") js.return(this.compiledCondition); + else js.if(this.compiledNegation, () => js.line(`${js.ctx}.errorFromNodeContext(${this.compiledErrorContext})`)); + } +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/roots/domain.js +const implementation$9 = implementNode({ + kind: "domain", + hasAssociatedError: true, + collapsibleKey: "domain", + keys: { + domain: {}, + numberAllowsNaN: {} + }, + normalize: (schema$1) => typeof schema$1 === "string" ? { domain: schema$1 } : hasKey(schema$1, "numberAllowsNaN") && schema$1.domain !== "number" ? throwParseError(Domain.writeBadAllowNanMessage(schema$1.domain)) : schema$1, + applyConfig: (schema$1, config) => schema$1.numberAllowsNaN === void 0 && schema$1.domain === "number" && config.numberAllowsNaN ? { + ...schema$1, + numberAllowsNaN: true + } : schema$1, + defaults: { + description: (node$1) => domainDescriptions[node$1.domain], + actual: (data) => Number.isNaN(data) ? "NaN" : domainDescriptions[domainOf(data)] + }, + intersections: { domain: (l, r) => l.domain === "number" && r.domain === "number" ? l.numberAllowsNaN ? r : l : Disjoint.init("domain", l, r) } +}); +var DomainNode = class extends InternalBasis { + requiresNaNCheck = this.domain === "number" && !this.numberAllowsNaN; + traverseAllows = this.requiresNaNCheck ? (data) => typeof data === "number" && !Number.isNaN(data) : (data) => domainOf(data) === this.domain; + compiledCondition = this.domain === "object" ? `((typeof data === "object" && data !== null) || typeof data === "function")` : `typeof data === "${this.domain}"${this.requiresNaNCheck ? " && !Number.isNaN(data)" : ""}`; + compiledNegation = this.domain === "object" ? `((typeof data !== "object" || data === null) && typeof data !== "function")` : `typeof data !== "${this.domain}"${this.requiresNaNCheck ? " || Number.isNaN(data)" : ""}`; + expression = this.numberAllowsNaN ? "number | NaN" : this.domain; + get nestableExpression() { + return this.numberAllowsNaN ? `(${this.expression})` : this.expression; + } + get defaultShortDescription() { + return domainDescriptions[this.domain]; + } + innerToJsonSchema(ctx) { + if (this.domain === "bigint" || this.domain === "symbol") return ctx.fallback.domain({ + code: "domain", + base: {}, + domain: this.domain + }); + return { type: this.domain }; + } +}; +const Domain = { + implementation: implementation$9, + Node: DomainNode, + writeBadAllowNanMessage: (actual) => `numberAllowsNaN may only be specified with domain "number" (was ${actual})` +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/roots/intersection.js +const implementation$8 = implementNode({ + kind: "intersection", + hasAssociatedError: true, + normalize: (rawSchema) => { + if (isNode(rawSchema)) return rawSchema; + const { structure,...schema$1 } = rawSchema; + const hasRootStructureKey = !!structure; + const normalizedStructure = structure ?? {}; + const normalized = flatMorph(schema$1, (k, v) => { + if (isKeyOf(k, structureKeys)) { + if (hasRootStructureKey) throwParseError(`Flattened structure key ${k} cannot be specified alongside a root 'structure' key.`); + normalizedStructure[k] = v; + return []; + } + return [k, v]; + }); + if (hasArkKind(normalizedStructure, "constraint") || !isEmptyObject(normalizedStructure)) normalized.structure = normalizedStructure; + return normalized; + }, + finalizeInnerJson: ({ structure,...rest }) => hasDomain(structure, "object") ? { + ...structure, + ...rest + } : rest, + keys: { + domain: { + child: true, + parse: (schema$1, ctx) => ctx.$.node("domain", schema$1) + }, + proto: { + child: true, + parse: (schema$1, ctx) => ctx.$.node("proto", schema$1) + }, + structure: { + child: true, + parse: (schema$1, ctx) => ctx.$.node("structure", schema$1), + serialize: (node$1) => { + if (!node$1.sequence?.minLength) return node$1.collapsibleJson; + const { sequence,...structureJson } = node$1.collapsibleJson; + const { minVariadicLength,...sequenceJson } = sequence; + const collapsibleSequenceJson = sequenceJson.variadic && Object.keys(sequenceJson).length === 1 ? sequenceJson.variadic : sequenceJson; + return { + ...structureJson, + sequence: collapsibleSequenceJson + }; + } + }, + divisor: { + child: true, + parse: constraintKeyParser("divisor") + }, + max: { + child: true, + parse: constraintKeyParser("max") + }, + min: { + child: true, + parse: constraintKeyParser("min") + }, + maxLength: { + child: true, + parse: constraintKeyParser("maxLength") + }, + minLength: { + child: true, + parse: constraintKeyParser("minLength") + }, + exactLength: { + child: true, + parse: constraintKeyParser("exactLength") + }, + before: { + child: true, + parse: constraintKeyParser("before") + }, + after: { + child: true, + parse: constraintKeyParser("after") + }, + pattern: { + child: true, + parse: constraintKeyParser("pattern") + }, + predicate: { + child: true, + parse: constraintKeyParser("predicate") + } + }, + reduce: (inner, $) => intersectIntersections({}, inner, { + $, + invert: false, + pipe: false + }), + defaults: { + description: (node$1) => { + if (node$1.children.length === 0) return "unknown"; + if (node$1.structure) return node$1.structure.description; + const childDescriptions = []; + if (node$1.basis && !node$1.refinements.some((r) => r.impl.obviatesBasisDescription)) childDescriptions.push(node$1.basis.description); + if (node$1.refinements.length) { + const sortedRefinementDescriptions = node$1.refinements.toSorted((l, r) => l.kind === "min" && r.kind === "max" ? -1 : 0).map((r) => r.description); + childDescriptions.push(...sortedRefinementDescriptions); + } + if (node$1.inner.predicate) childDescriptions.push(...node$1.inner.predicate.map((p) => p.description)); + return childDescriptions.join(" and "); + }, + expected: (source) => ` ◦ ${source.errors.map((e) => e.expected).join("\n ◦ ")}`, + problem: (ctx) => `(${ctx.actual}) must be...\n${ctx.expected}` + }, + intersections: { + intersection: (l, r, ctx) => intersectIntersections(l.inner, r.inner, ctx), + ...defineRightwardIntersections("intersection", (l, r, ctx) => { + if (l.children.length === 0) return r; + const { domain, proto,...lInnerConstraints } = l.inner; + const lBasis = proto ?? domain; + const basis = lBasis ? intersectOrPipeNodes(lBasis, r, ctx) : r; + return basis instanceof Disjoint ? basis : l?.basis?.equals(basis) ? l : l.$.node("intersection", { + ...lInnerConstraints, + [basis.kind]: basis + }, { prereduced: true }); + }) + } +}); +var IntersectionNode = class extends BaseRoot { + basis = this.inner.domain ?? this.inner.proto ?? null; + refinements = this.children.filter((node$1) => node$1.isRefinement()); + structure = this.inner.structure; + expression = writeIntersectionExpression(this); + get shallowMorphs() { + return this.inner.structure?.structuralMorph ? [this.inner.structure.structuralMorph] : []; + } + get defaultShortDescription() { + return this.basis?.defaultShortDescription ?? "present"; + } + innerToJsonSchema(ctx) { + return this.children.reduce((schema$1, child) => child.isBasis() ? child.toJsonSchemaRecurse(ctx) : child.reduceJsonSchema(schema$1, ctx), {}); + } + traverseAllows = (data, ctx) => this.children.every((child) => child.traverseAllows(data, ctx)); + traverseApply = (data, ctx) => { + const errorCount = ctx.currentErrorCount; + if (this.basis) { + this.basis.traverseApply(data, ctx); + if (ctx.currentErrorCount > errorCount) return; + } + if (this.refinements.length) { + for (let i = 0; i < this.refinements.length - 1; i++) { + this.refinements[i].traverseApply(data, ctx); + if (ctx.failFast && ctx.currentErrorCount > errorCount) return; + } + this.refinements.at(-1).traverseApply(data, ctx); + if (ctx.currentErrorCount > errorCount) return; + } + if (this.structure) { + this.structure.traverseApply(data, ctx); + if (ctx.currentErrorCount > errorCount) return; + } + if (this.inner.predicate) { + for (let i = 0; i < this.inner.predicate.length - 1; i++) { + this.inner.predicate[i].traverseApply(data, ctx); + if (ctx.failFast && ctx.currentErrorCount > errorCount) return; + } + this.inner.predicate.at(-1).traverseApply(data, ctx); + } + }; + compile(js) { + if (js.traversalKind === "Allows") { + for (const child of this.children) js.check(child); + js.return(true); + return; + } + js.initializeErrorCount(); + if (this.basis) { + js.check(this.basis); + if (this.children.length > 1) js.returnIfFail(); + } + if (this.refinements.length) { + for (let i = 0; i < this.refinements.length - 1; i++) { + js.check(this.refinements[i]); + js.returnIfFailFast(); + } + js.check(this.refinements.at(-1)); + if (this.structure || this.inner.predicate) js.returnIfFail(); + } + if (this.structure) { + js.check(this.structure); + if (this.inner.predicate) js.returnIfFail(); + } + if (this.inner.predicate) { + for (let i = 0; i < this.inner.predicate.length - 1; i++) { + js.check(this.inner.predicate[i]); + js.returnIfFail(); + } + js.check(this.inner.predicate.at(-1)); + } + } +}; +const Intersection = { + implementation: implementation$8, + Node: IntersectionNode +}; +const writeIntersectionExpression = (node$1) => { + let expression = node$1.structure?.expression || `${node$1.basis && !node$1.refinements.some((n) => n.impl.obviatesBasisExpression) ? node$1.basis.nestableExpression + " " : ""}${node$1.refinements.map((n) => n.expression).join(" & ")}` || "unknown"; + if (expression === "Array == 0") expression = "[]"; + return expression; +}; +const intersectIntersections = (l, r, ctx) => { + const baseInner = {}; + const lBasis = l.proto ?? l.domain; + const rBasis = r.proto ?? r.domain; + const basisResult = lBasis ? rBasis ? intersectOrPipeNodes(lBasis, rBasis, ctx) : lBasis : rBasis; + if (basisResult instanceof Disjoint) return basisResult; + if (basisResult) baseInner[basisResult.kind] = basisResult; + return intersectConstraints({ + kind: "intersection", + baseInner, + l: flattenConstraints(l), + r: flattenConstraints(r), + roots: [], + ctx + }); +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/roots/morph.js +const implementation$7 = implementNode({ + kind: "morph", + hasAssociatedError: false, + keys: { + in: { + child: true, + parse: (schema$1, ctx) => ctx.$.parseSchema(schema$1) + }, + morphs: { + parse: liftArray, + serialize: (morphs) => morphs.map((m) => hasArkKind(m, "root") ? m.json : registeredReference(m)) + }, + declaredIn: { + child: false, + serialize: (node$1) => node$1.json + }, + declaredOut: { + child: false, + serialize: (node$1) => node$1.json + } + }, + normalize: (schema$1) => schema$1, + defaults: { description: (node$1) => `a morph from ${node$1.in.description} to ${node$1.out?.description ?? "unknown"}` }, + intersections: { + morph: (l, r, ctx) => { + if (!l.hasEqualMorphs(r)) return throwParseError(writeMorphIntersectionMessage(l.expression, r.expression)); + const inTersection = intersectOrPipeNodes(l.in, r.in, ctx); + if (inTersection instanceof Disjoint) return inTersection; + const baseInner = { morphs: l.morphs }; + if (l.declaredIn || r.declaredIn) { + const declaredIn = intersectOrPipeNodes(l.in, r.in, ctx); + if (declaredIn instanceof Disjoint) return declaredIn.throw(); + else baseInner.declaredIn = declaredIn; + } + if (l.declaredOut || r.declaredOut) { + const declaredOut = intersectOrPipeNodes(l.out, r.out, ctx); + if (declaredOut instanceof Disjoint) return declaredOut.throw(); + else baseInner.declaredOut = declaredOut; + } + return inTersection.distribute((inBranch) => ctx.$.node("morph", { + ...baseInner, + in: inBranch + }), ctx.$.parseSchema); + }, + ...defineRightwardIntersections("morph", (l, r, ctx) => { + const inTersection = l.inner.in ? intersectOrPipeNodes(l.inner.in, r, ctx) : r; + return inTersection instanceof Disjoint ? inTersection : inTersection.equals(l.inner.in) ? l : ctx.$.node("morph", { + ...l.inner, + in: inTersection + }); + }) + } +}); +var MorphNode = class extends BaseRoot { + serializedMorphs = this.morphs.map(registeredReference); + compiledMorphs = `[${this.serializedMorphs}]`; + lastMorph = this.inner.morphs.at(-1); + lastMorphIfNode = hasArkKind(this.lastMorph, "root") ? this.lastMorph : void 0; + introspectableIn = this.inner.in; + introspectableOut = this.lastMorphIfNode ? Object.assign(this.referencesById, this.lastMorphIfNode.referencesById) && this.lastMorphIfNode.out : void 0; + get shallowMorphs() { + return Array.isArray(this.inner.in?.shallowMorphs) ? [...this.inner.in.shallowMorphs, ...this.morphs] : this.morphs; + } + get in() { + return this.declaredIn ?? this.inner.in?.in ?? $ark.intrinsic.unknown.internal; + } + get out() { + return this.declaredOut ?? this.introspectableOut ?? $ark.intrinsic.unknown.internal; + } + declareIn(declaredIn) { + return this.$.node("morph", { + ...this.inner, + declaredIn + }); + } + declareOut(declaredOut) { + return this.$.node("morph", { + ...this.inner, + declaredOut + }); + } + expression = `(In: ${this.in.expression}) => ${this.lastMorphIfNode ? "To" : "Out"}<${this.out.expression}>`; + get defaultShortDescription() { + return this.in.meta.description ?? this.in.defaultShortDescription; + } + innerToJsonSchema(ctx) { + return ctx.fallback.morph({ + code: "morph", + base: this.in.toJsonSchemaRecurse(ctx), + out: this.introspectableOut?.toJsonSchemaRecurse(ctx) ?? null + }); + } + compile(js) { + if (js.traversalKind === "Allows") { + if (!this.introspectableIn) return; + js.return(js.invoke(this.introspectableIn)); + return; + } + if (this.introspectableIn) js.line(js.invoke(this.introspectableIn)); + js.line(`ctx.queueMorphs(${this.compiledMorphs})`); + } + traverseAllows = (data, ctx) => !this.introspectableIn || this.introspectableIn.traverseAllows(data, ctx); + traverseApply = (data, ctx) => { + if (this.introspectableIn) this.introspectableIn.traverseApply(data, ctx); + ctx.queueMorphs(this.morphs); + }; + /** Check if the morphs of r are equal to those of this node */ + hasEqualMorphs(r) { + return arrayEquals(this.morphs, r.morphs, { isEqual: (lMorph, rMorph) => lMorph === rMorph || hasArkKind(lMorph, "root") && hasArkKind(rMorph, "root") && lMorph.equals(rMorph) }); + } +}; +const Morph = { + implementation: implementation$7, + Node: MorphNode +}; +const writeMorphIntersectionMessage = (lDescription, rDescription) => `The intersection of distinct morphs at a single path is indeterminate: +Left: ${lDescription} +Right: ${rDescription}`; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/roots/proto.js +const implementation$6 = implementNode({ + kind: "proto", + hasAssociatedError: true, + collapsibleKey: "proto", + keys: { + proto: { serialize: (ctor) => getBuiltinNameOfConstructor(ctor) ?? defaultValueSerializer(ctor) }, + dateAllowsInvalid: {} + }, + normalize: (schema$1) => { + const normalized = typeof schema$1 === "string" ? { proto: builtinConstructors[schema$1] } : typeof schema$1 === "function" ? isNode(schema$1) ? schema$1 : { proto: schema$1 } : typeof schema$1.proto === "string" ? { + ...schema$1, + proto: builtinConstructors[schema$1.proto] + } : schema$1; + if (typeof normalized.proto !== "function") throwParseError(Proto.writeInvalidSchemaMessage(normalized.proto)); + if (hasKey(normalized, "dateAllowsInvalid") && normalized.proto !== Date) throwParseError(Proto.writeBadInvalidDateMessage(normalized.proto)); + return normalized; + }, + applyConfig: (schema$1, config) => { + if (schema$1.dateAllowsInvalid === void 0 && schema$1.proto === Date && config.dateAllowsInvalid) return { + ...schema$1, + dateAllowsInvalid: true + }; + return schema$1; + }, + defaults: { + description: (node$1) => node$1.builtinName ? objectKindDescriptions[node$1.builtinName] : `an instance of ${node$1.proto.name}`, + actual: (data) => data instanceof Date && data.toString() === "Invalid Date" ? "an invalid Date" : objectKindOrDomainOf(data) + }, + intersections: { + proto: (l, r) => l.proto === Date && r.proto === Date ? l.dateAllowsInvalid ? r : l : constructorExtends(l.proto, r.proto) ? l : constructorExtends(r.proto, l.proto) ? r : Disjoint.init("proto", l, r), + domain: (proto, domain) => domain.domain === "object" ? proto : Disjoint.init("domain", $ark.intrinsic.object.internal, domain) + } +}); +var ProtoNode = class extends InternalBasis { + builtinName = getBuiltinNameOfConstructor(this.proto); + serializedConstructor = this.json.proto; + requiresInvalidDateCheck = this.proto === Date && !this.dateAllowsInvalid; + traverseAllows = this.requiresInvalidDateCheck ? (data) => data instanceof Date && data.toString() !== "Invalid Date" : (data) => data instanceof this.proto; + compiledCondition = `data instanceof ${this.serializedConstructor}${this.requiresInvalidDateCheck ? ` && data.toString() !== "Invalid Date"` : ""}`; + compiledNegation = `!(${this.compiledCondition})`; + innerToJsonSchema(ctx) { + switch (this.builtinName) { + case "Array": return { type: "array" }; + case "Date": return ctx.fallback.date?.({ + code: "date", + base: {} + }) ?? ctx.fallback.proto({ + code: "proto", + base: {}, + proto: this.proto + }); + default: return ctx.fallback.proto({ + code: "proto", + base: {}, + proto: this.proto + }); + } + } + expression = this.dateAllowsInvalid ? "Date | InvalidDate" : this.proto.name; + get nestableExpression() { + return this.dateAllowsInvalid ? `(${this.expression})` : this.expression; + } + domain = "object"; + get defaultShortDescription() { + return this.description; + } +}; +const Proto = { + implementation: implementation$6, + Node: ProtoNode, + writeBadInvalidDateMessage: (actual) => `dateAllowsInvalid may only be specified with constructor Date (was ${actual.name})`, + writeInvalidSchemaMessage: (actual) => `instanceOf operand must be a function (was ${domainOf(actual)})` +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/roots/union.js +const implementation$5 = implementNode({ + kind: "union", + hasAssociatedError: true, + collapsibleKey: "branches", + keys: { + ordered: {}, + branches: { + child: true, + parse: (schema$1, ctx) => { + const branches = []; + for (const branchSchema of schema$1) { + const branchNodes = hasArkKind(branchSchema, "root") ? branchSchema.branches : ctx.$.parseSchema(branchSchema).branches; + for (const node$1 of branchNodes) if (node$1.hasKind("morph")) { + const matchingMorphIndex = branches.findIndex((matching) => matching.hasKind("morph") && matching.hasEqualMorphs(node$1)); + if (matchingMorphIndex === -1) branches.push(node$1); + else { + const matchingMorph = branches[matchingMorphIndex]; + branches[matchingMorphIndex] = ctx.$.node("morph", { + ...matchingMorph.inner, + in: matchingMorph.in.rawOr(node$1.in) + }); + } + } else branches.push(node$1); + } + if (!ctx.def.ordered) branches.sort((l, r) => l.hash < r.hash ? -1 : 1); + return branches; + } + } + }, + normalize: (schema$1) => isArray(schema$1) ? { branches: schema$1 } : schema$1, + reduce: (inner, $) => { + const reducedBranches = reduceBranches(inner); + if (reducedBranches.length === 1) return reducedBranches[0]; + if (reducedBranches.length === inner.branches.length) return; + return $.node("union", { + ...inner, + branches: reducedBranches + }, { prereduced: true }); + }, + defaults: { + description: (node$1) => node$1.distribute((branch) => branch.description, describeBranches), + expected: (ctx) => { + const byPath = groupBy(ctx.errors, "propString"); + const pathDescriptions = Object.entries(byPath).map(([path$1, errors]) => { + const branchesAtPath = []; + for (const errorAtPath of errors) appendUnique(branchesAtPath, errorAtPath.expected); + const expected = describeBranches(branchesAtPath); + const actual = errors.every((e) => e.actual === errors[0].actual) ? errors[0].actual : printable(errors[0].data); + return `${path$1 && `${path$1} `}must be ${expected}${actual && ` (was ${actual})`}`; + }); + return describeBranches(pathDescriptions); + }, + problem: (ctx) => ctx.expected, + message: (ctx) => ctx.problem + }, + intersections: { + union: (l, r, ctx) => { + if (l.isNever !== r.isNever) return Disjoint.init("presence", l, r); + let resultBranches; + if (l.ordered) { + if (r.ordered) throwParseError(writeOrderedIntersectionMessage(l.expression, r.expression)); + resultBranches = intersectBranches(r.branches, l.branches, ctx); + if (resultBranches instanceof Disjoint) resultBranches.invert(); + } else resultBranches = intersectBranches(l.branches, r.branches, ctx); + if (resultBranches instanceof Disjoint) return resultBranches; + return ctx.$.parseSchema(l.ordered || r.ordered ? { + branches: resultBranches, + ordered: true + } : { branches: resultBranches }); + }, + ...defineRightwardIntersections("union", (l, r, ctx) => { + const branches = intersectBranches(l.branches, [r], ctx); + if (branches instanceof Disjoint) return branches; + if (branches.length === 1) return branches[0]; + return ctx.$.parseSchema(l.ordered ? { + branches, + ordered: true + } : { branches }); + }) + } +}); +var UnionNode = class extends BaseRoot { + isBoolean = this.branches.length === 2 && this.branches[0].hasUnit(false) && this.branches[1].hasUnit(true); + get branchGroups() { + const branchGroups = []; + let firstBooleanIndex = -1; + for (const branch of this.branches) { + if (branch.hasKind("unit") && branch.domain === "boolean") { + if (firstBooleanIndex === -1) { + firstBooleanIndex = branchGroups.length; + branchGroups.push(branch); + } else branchGroups[firstBooleanIndex] = $ark.intrinsic.boolean; + continue; + } + branchGroups.push(branch); + } + return branchGroups; + } + unitBranches = this.branches.filter((n) => n.in.hasKind("unit")); + discriminant = this.discriminate(); + discriminantJson = this.discriminant ? discriminantToJson(this.discriminant) : null; + expression = this.distribute((n) => n.nestableExpression, expressBranches); + createBranchedOptimisticRootApply() { + return (data, onFail) => { + const optimisticResult = this.traverseOptimistic(data); + if (optimisticResult !== unset) return optimisticResult; + const ctx = new Traversal(data, this.$.resolvedConfig); + this.traverseApply(data, ctx); + return ctx.finalize(onFail); + }; + } + get shallowMorphs() { + return this.branches.reduce((morphs, branch) => appendUnique(morphs, branch.shallowMorphs), []); + } + get defaultShortDescription() { + return this.distribute((branch) => branch.defaultShortDescription, describeBranches); + } + innerToJsonSchema(ctx) { + if (this.branchGroups.length === 1 && this.branchGroups[0].equals($ark.intrinsic.boolean)) return { type: "boolean" }; + const jsonSchemaBranches = this.branchGroups.map((group) => group.toJsonSchemaRecurse(ctx)); + if (jsonSchemaBranches.every((branch) => Object.keys(branch).length === 1 && hasKey(branch, "const"))) return { enum: jsonSchemaBranches.map((branch) => branch.const) }; + return { anyOf: jsonSchemaBranches }; + } + traverseAllows = (data, ctx) => this.branches.some((b) => b.traverseAllows(data, ctx)); + traverseApply = (data, ctx) => { + const errors = []; + for (let i = 0; i < this.branches.length; i++) { + ctx.pushBranch(); + this.branches[i].traverseApply(data, ctx); + if (!ctx.hasError()) { + if (this.branches[i].includesTransform) return ctx.queuedMorphs.push(...ctx.popBranch().queuedMorphs); + return ctx.popBranch(); + } + errors.push(ctx.popBranch().error); + } + ctx.errorFromNodeContext({ + code: "union", + errors, + meta: this.meta + }); + }; + traverseOptimistic = (data) => { + for (let i = 0; i < this.branches.length; i++) { + const branch = this.branches[i]; + if (branch.traverseAllows(data)) { + if (branch.contextFreeMorph) return branch.contextFreeMorph(data); + return data; + } + } + return unset; + }; + compile(js) { + if (!this.discriminant || this.unitBranches.length === this.branches.length && this.branches.length === 2) return this.compileIndiscriminable(js); + let condition = this.discriminant.optionallyChainedPropString; + if (this.discriminant.kind === "domain") condition = `typeof ${condition} === "object" ? ${condition} === null ? "null" : "object" : typeof ${condition} === "function" ? "object" : typeof ${condition}`; + const cases = this.discriminant.cases; + const caseKeys = Object.keys(cases); + const { optimistic } = js; + js.optimistic = false; + js.block(`switch(${condition})`, () => { + for (const k in cases) { + const v = cases[k]; + const caseCondition = k === "default" ? k : `case ${k}`; + js.line(`${caseCondition}: return ${v === true ? optimistic ? js.data : v : optimistic ? `${js.invoke(v)} ? ${v.contextFreeMorph ? `${registeredReference(v.contextFreeMorph)}(${js.data})` : js.data} : "${unset}"` : js.invoke(v)}`); + } + return js; + }); + if (js.traversalKind === "Allows") { + js.return(optimistic ? `"${unset}"` : false); + return; + } + const expected = describeBranches(this.discriminant.kind === "domain" ? caseKeys.map((k) => { + const jsTypeOf = k.slice(1, -1); + return jsTypeOf === "function" ? domainDescriptions.object : domainDescriptions[jsTypeOf]; + }) : caseKeys); + const serializedPathSegments = this.discriminant.path.map((k) => typeof k === "symbol" ? registeredReference(k) : JSON.stringify(k)); + const serializedExpected = JSON.stringify(expected); + const serializedActual = this.discriminant.kind === "domain" ? `${serializedTypeOfDescriptions}[${condition}]` : `${serializedPrintable}(${condition})`; + js.line(`ctx.errorFromNodeContext({ + code: "predicate", + expected: ${serializedExpected}, + actual: ${serializedActual}, + relativePath: [${serializedPathSegments}], + meta: ${this.compiledMeta} +})`); + } + compileIndiscriminable(js) { + if (js.traversalKind === "Apply") { + js.const("errors", "[]"); + for (const branch of this.branches) js.line("ctx.pushBranch()").line(js.invoke(branch)).if("!ctx.hasError()", () => js.return(branch.includesTransform ? "ctx.queuedMorphs.push(...ctx.popBranch().queuedMorphs)" : "ctx.popBranch()")).line("errors.push(ctx.popBranch().error)"); + js.line(`ctx.errorFromNodeContext({ code: "union", errors, meta: ${this.compiledMeta} })`); + } else { + const { optimistic } = js; + js.optimistic = false; + for (const branch of this.branches) js.if(`${js.invoke(branch)}`, () => js.return(optimistic ? branch.contextFreeMorph ? `${registeredReference(branch.contextFreeMorph)}(${js.data})` : js.data : true)); + js.return(optimistic ? `"${unset}"` : false); + } + } + get nestableExpression() { + return this.isBoolean ? "boolean" : `(${this.expression})`; + } + discriminate() { + if (this.branches.length < 2 || this.isCyclic) return null; + if (this.unitBranches.length === this.branches.length) { + const cases$1 = flatMorph(this.unitBranches, (i, n) => [`${n.in.serializedValue}`, n.hasKind("morph") ? n : true]); + return { + kind: "unit", + path: [], + optionallyChainedPropString: "data", + cases: cases$1 + }; + } + const candidates = []; + for (let lIndex = 0; lIndex < this.branches.length - 1; lIndex++) { + const l = this.branches[lIndex]; + for (let rIndex = lIndex + 1; rIndex < this.branches.length; rIndex++) { + const r = this.branches[rIndex]; + const result = intersectNodesRoot(l.in, r.in, l.$); + if (!(result instanceof Disjoint)) continue; + for (const entry of result) { + if (!entry.kind || entry.optional) continue; + let lSerialized; + let rSerialized; + if (entry.kind === "domain") { + const lValue = entry.l; + const rValue = entry.r; + lSerialized = `"${typeof lValue === "string" ? lValue : lValue.domain}"`; + rSerialized = `"${typeof rValue === "string" ? rValue : rValue.domain}"`; + } else if (entry.kind === "unit") { + lSerialized = entry.l.serializedValue; + rSerialized = entry.r.serializedValue; + } else continue; + const matching = candidates.find((d) => arrayEquals(d.path, entry.path) && d.kind === entry.kind); + if (!matching) candidates.push({ + kind: entry.kind, + cases: { + [lSerialized]: { + branchIndices: [lIndex], + condition: entry.l + }, + [rSerialized]: { + branchIndices: [rIndex], + condition: entry.r + } + }, + path: entry.path + }); + else { + if (matching.cases[lSerialized]) matching.cases[lSerialized].branchIndices = appendUnique(matching.cases[lSerialized].branchIndices, lIndex); + else matching.cases[lSerialized] ??= { + branchIndices: [lIndex], + condition: entry.l + }; + if (matching.cases[rSerialized]) matching.cases[rSerialized].branchIndices = appendUnique(matching.cases[rSerialized].branchIndices, rIndex); + else matching.cases[rSerialized] ??= { + branchIndices: [rIndex], + condition: entry.r + }; + } + } + } + } + const orderedCandidates = this.ordered ? orderCandidates(candidates, this.branches) : candidates; + if (!orderedCandidates.length) return null; + const ctx = createCaseResolutionContext(orderedCandidates, this); + const cases = {}; + for (const k in ctx.best.cases) { + const resolution = resolveCase(ctx, k); + if (resolution === null) { + cases[k] = true; + continue; + } + if (resolution.length === this.branches.length) return null; + if (this.ordered) resolution.sort((l, r) => l.originalIndex - r.originalIndex); + const branches = resolution.map((entry) => entry.branch); + const caseNode = branches.length === 1 ? branches[0] : this.$.node("union", this.ordered ? { + branches, + ordered: true + } : branches); + Object.assign(this.referencesById, caseNode.referencesById); + cases[k] = caseNode; + } + if (ctx.defaultEntries.length) { + const branches = ctx.defaultEntries.map((entry) => entry.branch); + cases.default = this.$.node("union", this.ordered ? { + branches, + ordered: true + } : branches, { prereduced: true }); + Object.assign(this.referencesById, cases.default.referencesById); + } + return Object.assign(ctx.location, { cases }); + } +}; +const createCaseResolutionContext = (orderedCandidates, node$1) => { + const best = orderedCandidates.sort((l, r) => Object.keys(r.cases).length - Object.keys(l.cases).length)[0]; + const location = { + kind: best.kind, + path: best.path, + optionallyChainedPropString: optionallyChainPropString(best.path) + }; + const defaultEntries = node$1.branches.map((branch, originalIndex) => ({ + originalIndex, + branch + })); + return { + best, + location, + defaultEntries, + node: node$1 + }; +}; +const resolveCase = (ctx, key) => { + const caseCtx = ctx.best.cases[key]; + const discriminantNode = discriminantCaseToNode(caseCtx.condition, ctx.location.path, ctx.node.$); + let resolvedEntries = []; + const nextDefaults = []; + for (let i = 0; i < ctx.defaultEntries.length; i++) { + const entry = ctx.defaultEntries[i]; + if (caseCtx.branchIndices.includes(entry.originalIndex)) { + const pruned = pruneDiscriminant(ctx.node.branches[entry.originalIndex], ctx.location); + if (pruned === null) resolvedEntries = null; + else resolvedEntries?.push({ + originalIndex: entry.originalIndex, + branch: pruned + }); + } else if (entry.branch.hasKind("alias") && discriminantNode.hasKind("domain") && discriminantNode.domain === "object") resolvedEntries?.push(entry); + else { + if (entry.branch.in.overlaps(discriminantNode)) { + const overlapping = pruneDiscriminant(entry.branch, ctx.location); + resolvedEntries?.push({ + originalIndex: entry.originalIndex, + branch: overlapping + }); + } + nextDefaults.push(entry); + } + } + ctx.defaultEntries = nextDefaults; + return resolvedEntries; +}; +const orderCandidates = (candidates, originalBranches) => { + const viableCandidates = candidates.filter((candidate) => { + const caseGroups = Object.values(candidate.cases).map((caseCtx) => caseCtx.branchIndices); + for (let i = 0; i < caseGroups.length - 1; i++) { + const currentGroup = caseGroups[i]; + for (let j = i + 1; j < caseGroups.length; j++) { + const nextGroup = caseGroups[j]; + for (const currentIndex of currentGroup) for (const nextIndex of nextGroup) if (currentIndex > nextIndex) { + if (originalBranches[currentIndex].overlaps(originalBranches[nextIndex])) return false; + } + } + } + return true; + }); + return viableCandidates; +}; +const discriminantCaseToNode = (caseDiscriminant, path$1, $) => { + let node$1 = caseDiscriminant === "undefined" ? $.node("unit", { unit: void 0 }) : caseDiscriminant === "null" ? $.node("unit", { unit: null }) : caseDiscriminant === "boolean" ? $.units([true, false]) : caseDiscriminant; + for (let i = path$1.length - 1; i >= 0; i--) { + const key = path$1[i]; + node$1 = $.node("intersection", typeof key === "number" ? { + proto: "Array", + sequence: [...range(key).map((_) => ({})), node$1] + } : { + domain: "object", + required: [{ + key, + value: node$1 + }] + }); + } + return node$1; +}; +const optionallyChainPropString = (path$1) => path$1.reduce((acc, k) => acc + compileLiteralPropAccess(k, true), "data"); +const serializedTypeOfDescriptions = registeredReference(jsTypeOfDescriptions); +const serializedPrintable = registeredReference(printable); +const Union = { + implementation: implementation$5, + Node: UnionNode +}; +const discriminantToJson = (discriminant) => ({ + kind: discriminant.kind, + path: discriminant.path.map((k) => typeof k === "string" ? k : compileSerializedValue(k)), + cases: flatMorph(discriminant.cases, (k, node$1) => [k, node$1 === true ? node$1 : node$1.hasKind("union") && node$1.discriminantJson ? node$1.discriminantJson : node$1.json]) +}); +const describeExpressionOptions = { + delimiter: " | ", + finalDelimiter: " | " +}; +const expressBranches = (expressions) => describeBranches(expressions, describeExpressionOptions); +const describeBranches = (descriptions, opts) => { + const delimiter = opts?.delimiter ?? ", "; + const finalDelimiter = opts?.finalDelimiter ?? " or "; + if (descriptions.length === 0) return "never"; + if (descriptions.length === 1) return descriptions[0]; + if (descriptions.length === 2 && descriptions[0] === "false" && descriptions[1] === "true" || descriptions[0] === "true" && descriptions[1] === "false") return "boolean"; + const seen = {}; + const unique = descriptions.filter((s) => seen[s] ? false : seen[s] = true); + const last = unique.pop(); + return `${unique.join(delimiter)}${unique.length ? finalDelimiter : ""}${last}`; +}; +const intersectBranches = (l, r, ctx) => { + const batchesByR = r.map(() => []); + for (let lIndex = 0; lIndex < l.length; lIndex++) { + let candidatesByR = {}; + for (let rIndex = 0; rIndex < r.length; rIndex++) { + if (batchesByR[rIndex] === null) continue; + if (l[lIndex].equals(r[rIndex])) { + batchesByR[rIndex] = null; + candidatesByR = {}; + break; + } + const branchIntersection = intersectOrPipeNodes(l[lIndex], r[rIndex], ctx); + if (branchIntersection instanceof Disjoint) continue; + if (branchIntersection.equals(l[lIndex])) { + batchesByR[rIndex].push(l[lIndex]); + candidatesByR = {}; + break; + } + if (branchIntersection.equals(r[rIndex])) batchesByR[rIndex] = null; + else candidatesByR[rIndex] = branchIntersection; + } + for (const rIndex in candidatesByR) batchesByR[rIndex][lIndex] = candidatesByR[rIndex]; + } + const resultBranches = batchesByR.flatMap((batch, i) => batch?.flatMap((branch) => branch.branches) ?? r[i]); + return resultBranches.length === 0 ? Disjoint.init("union", l, r) : resultBranches; +}; +const reduceBranches = ({ branches, ordered }) => { + if (branches.length < 2) return branches; + const uniquenessByIndex = branches.map(() => true); + for (let i = 0; i < branches.length; i++) for (let j = i + 1; j < branches.length && uniquenessByIndex[i] && uniquenessByIndex[j]; j++) { + if (branches[i].equals(branches[j])) { + uniquenessByIndex[j] = false; + continue; + } + const intersection = intersectNodesRoot(branches[i].in, branches[j].in, branches[0].$); + if (intersection instanceof Disjoint) continue; + if (!ordered) assertDeterminateOverlap(branches[i], branches[j]); + if (intersection.equals(branches[i].in)) uniquenessByIndex[i] = !!ordered; + else if (intersection.equals(branches[j].in)) uniquenessByIndex[j] = false; + } + return branches.filter((_, i) => uniquenessByIndex[i]); +}; +const assertDeterminateOverlap = (l, r) => { + if (!l.includesTransform && !r.includesTransform) return; + if (!arrayEquals(l.shallowMorphs, r.shallowMorphs)) throwParseError(writeIndiscriminableMorphMessage(l.expression, r.expression)); + if (!arrayEquals(l.flatMorphs, r.flatMorphs, { isEqual: (l$1, r$1) => l$1.propString === r$1.propString && (l$1.node.hasKind("morph") && r$1.node.hasKind("morph") ? l$1.node.hasEqualMorphs(r$1.node) : l$1.node.hasKind("intersection") && r$1.node.hasKind("intersection") ? l$1.node.structure?.structuralMorphRef === r$1.node.structure?.structuralMorphRef : false) })) throwParseError(writeIndiscriminableMorphMessage(l.expression, r.expression)); +}; +const pruneDiscriminant = (discriminantBranch, discriminantCtx) => discriminantBranch.transform((nodeKind, inner) => { + if (nodeKind === "domain" || nodeKind === "unit") return null; + return inner; +}, { shouldTransform: (node$1, ctx) => { + const propString = optionallyChainPropString(ctx.path); + if (!discriminantCtx.optionallyChainedPropString.startsWith(propString)) return false; + if (node$1.hasKind("domain") && node$1.domain === "object") return true; + if ((node$1.hasKind("domain") || discriminantCtx.kind === "unit") && propString === discriminantCtx.optionallyChainedPropString) return true; + return node$1.children.length !== 0 && node$1.kind !== "index"; +} }); +const writeIndiscriminableMorphMessage = (lDescription, rDescription) => `An unordered union of a type including a morph and a type with overlapping input is indeterminate: +Left: ${lDescription} +Right: ${rDescription}`; +const writeOrderedIntersectionMessage = (lDescription, rDescription) => `The intersection of two ordered unions is indeterminate: +Left: ${lDescription} +Right: ${rDescription}`; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/roots/unit.js +const implementation$4 = implementNode({ + kind: "unit", + hasAssociatedError: true, + keys: { unit: { + preserveUndefined: true, + serialize: (schema$1) => schema$1 instanceof Date ? schema$1.toISOString() : defaultValueSerializer(schema$1) + } }, + normalize: (schema$1) => schema$1, + defaults: { + description: (node$1) => printable(node$1.unit), + problem: ({ expected, actual }) => `${expected === actual ? `must be reference equal to ${expected} (serialized to the same value)` : `must be ${expected} (was ${actual})`}` + }, + intersections: { + unit: (l, r) => Disjoint.init("unit", l, r), + ...defineRightwardIntersections("unit", (l, r) => { + if (r.allows(l.unit)) return l; + const rBasis = r.hasKind("intersection") ? r.basis : r; + if (rBasis) { + const rDomain = rBasis.hasKind("domain") ? rBasis : $ark.intrinsic.object; + if (l.domain !== rDomain.domain) { + const lDomainDisjointValue = l.domain === "undefined" || l.domain === "null" || l.domain === "boolean" ? l.domain : $ark.intrinsic[l.domain]; + return Disjoint.init("domain", lDomainDisjointValue, rDomain); + } + } + return Disjoint.init("assignability", l, r.hasKind("intersection") ? r.children.find((rConstraint) => !rConstraint.allows(l.unit)) : r); + }) + } +}); +var UnitNode = class extends InternalBasis { + compiledValue = this.json.unit; + serializedValue = typeof this.unit === "string" || this.unit instanceof Date ? JSON.stringify(this.compiledValue) : `${this.compiledValue}`; + compiledCondition = compileEqualityCheck(this.unit, this.serializedValue); + compiledNegation = compileEqualityCheck(this.unit, this.serializedValue, "negated"); + expression = printable(this.unit); + domain = domainOf(this.unit); + get defaultShortDescription() { + return this.domain === "object" ? domainDescriptions.object : this.description; + } + innerToJsonSchema(ctx) { + return this.unit === null ? { type: "null" } : $ark.intrinsic.jsonPrimitive.allows(this.unit) ? { const: this.unit } : ctx.fallback.unit({ + code: "unit", + base: {}, + unit: this.unit + }); + } + traverseAllows = this.unit instanceof Date ? (data) => data instanceof Date && data.toISOString() === this.compiledValue : Number.isNaN(this.unit) ? (data) => Number.isNaN(data) : (data) => data === this.unit; +}; +const Unit = { + implementation: implementation$4, + Node: UnitNode +}; +const compileEqualityCheck = (unit, serializedValue, negated) => { + if (unit instanceof Date) { + const condition = `data instanceof Date && data.toISOString() === ${serializedValue}`; + return negated ? `!(${condition})` : condition; + } + if (Number.isNaN(unit)) return `${negated ? "!" : ""}Number.isNaN(data)`; + return `data ${negated ? "!" : "="}== ${serializedValue}`; +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/structure/index.js +const implementation$3 = implementNode({ + kind: "index", + hasAssociatedError: false, + intersectionIsOpen: true, + keys: { + signature: { + child: true, + parse: (schema$1, ctx) => { + const key = ctx.$.parseSchema(schema$1); + if (!key.extends($ark.intrinsic.key)) return throwParseError(writeInvalidPropertyKeyMessage(key.expression)); + const enumerableBranches = key.branches.filter((b) => b.hasKind("unit")); + if (enumerableBranches.length) return throwParseError(writeEnumerableIndexBranches(enumerableBranches.map((b) => printable(b.unit)))); + return key; + } + }, + value: { + child: true, + parse: (schema$1, ctx) => ctx.$.parseSchema(schema$1) + } + }, + normalize: (schema$1) => schema$1, + defaults: { description: (node$1) => `[${node$1.signature.expression}]: ${node$1.value.description}` }, + intersections: { index: (l, r, ctx) => { + if (l.signature.equals(r.signature)) { + const valueIntersection = intersectOrPipeNodes(l.value, r.value, ctx); + const value$1 = valueIntersection instanceof Disjoint ? $ark.intrinsic.never.internal : valueIntersection; + return ctx.$.node("index", { + signature: l.signature, + value: value$1 + }); + } + if (l.signature.extends(r.signature) && l.value.subsumes(r.value)) return r; + if (r.signature.extends(l.signature) && r.value.subsumes(l.value)) return l; + return null; + } } +}); +var IndexNode = class extends BaseConstraint { + impliedBasis = $ark.intrinsic.object.internal; + expression = `[${this.signature.expression}]: ${this.value.expression}`; + flatRefs = append(this.value.flatRefs.map((ref) => flatRef([this.signature, ...ref.path], ref.node)), flatRef([this.signature], this.value)); + traverseAllows = (data, ctx) => stringAndSymbolicEntriesOf(data).every((entry) => { + if (this.signature.traverseAllows(entry[0], ctx)) return traverseKey(entry[0], () => this.value.traverseAllows(entry[1], ctx), ctx); + return true; + }); + traverseApply = (data, ctx) => { + for (const entry of stringAndSymbolicEntriesOf(data)) if (this.signature.traverseAllows(entry[0], ctx)) traverseKey(entry[0], () => this.value.traverseApply(entry[1], ctx), ctx); + }; + _transform(mapper, ctx) { + ctx.path.push(this.signature); + const result = super._transform(mapper, ctx); + ctx.path.pop(); + return result; + } + compile() {} +}; +const Index = { + implementation: implementation$3, + Node: IndexNode +}; +const writeEnumerableIndexBranches = (keys) => `Index keys ${keys.join(", ")} should be specified as named props.`; +const writeInvalidPropertyKeyMessage = (indexSchema) => `Indexed key definition '${indexSchema}' must be a string or symbol`; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/structure/required.js +const implementation$2 = implementNode({ + kind: "required", + hasAssociatedError: true, + intersectionIsOpen: true, + keys: { + key: {}, + value: { + child: true, + parse: (schema$1, ctx) => ctx.$.parseSchema(schema$1) + } + }, + normalize: (schema$1) => schema$1, + defaults: { + description: (node$1) => `${node$1.compiledKey}: ${node$1.value.description}`, + expected: (ctx) => ctx.missingValueDescription, + actual: () => "missing" + }, + intersections: { + required: intersectProps, + optional: intersectProps + } +}); +var RequiredNode = class extends BaseProp { + expression = `${this.compiledKey}: ${this.value.expression}`; + errorContext = Object.freeze({ + code: "required", + missingValueDescription: this.value.defaultShortDescription, + relativePath: [this.key], + meta: this.meta + }); + compiledErrorContext = compileObjectLiteral(this.errorContext); +}; +const Required$1 = { + implementation: implementation$2, + Node: RequiredNode +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/structure/sequence.js +const implementation$1 = implementNode({ + kind: "sequence", + hasAssociatedError: false, + collapsibleKey: "variadic", + keys: { + prefix: { + child: true, + parse: (schema$1, ctx) => { + if (schema$1.length === 0) return void 0; + return schema$1.map((element) => ctx.$.parseSchema(element)); + } + }, + optionals: { + child: true, + parse: (schema$1, ctx) => { + if (schema$1.length === 0) return void 0; + return schema$1.map((element) => ctx.$.parseSchema(element)); + } + }, + defaultables: { + child: (defaultables) => defaultables.map((element) => element[0]), + parse: (defaultables, ctx) => { + if (defaultables.length === 0) return void 0; + return defaultables.map((element) => { + const node$1 = ctx.$.parseSchema(element[0]); + assertDefaultValueAssignability(node$1, element[1], null); + return [node$1, element[1]]; + }); + }, + serialize: (defaults) => defaults.map((element) => [element[0].collapsibleJson, defaultValueSerializer(element[1])]) + }, + variadic: { + child: true, + parse: (schema$1, ctx) => ctx.$.parseSchema(schema$1, ctx) + }, + minVariadicLength: { parse: (min) => min === 0 ? void 0 : min }, + postfix: { + child: true, + parse: (schema$1, ctx) => { + if (schema$1.length === 0) return void 0; + return schema$1.map((element) => ctx.$.parseSchema(element)); + } + } + }, + normalize: (schema$1) => { + if (typeof schema$1 === "string") return { variadic: schema$1 }; + if ("variadic" in schema$1 || "prefix" in schema$1 || "defaultables" in schema$1 || "optionals" in schema$1 || "postfix" in schema$1 || "minVariadicLength" in schema$1) { + if (schema$1.postfix?.length) { + if (!schema$1.variadic) return throwParseError(postfixWithoutVariadicMessage); + if (schema$1.optionals?.length || schema$1.defaultables?.length) return throwParseError(postfixAfterOptionalOrDefaultableMessage); + } + if (schema$1.minVariadicLength && !schema$1.variadic) return throwParseError("minVariadicLength may not be specified without a variadic element"); + return schema$1; + } + return { variadic: schema$1 }; + }, + reduce: (raw, $) => { + let minVariadicLength = raw.minVariadicLength ?? 0; + const prefix = raw.prefix?.slice() ?? []; + const defaultables = raw.defaultables?.slice() ?? []; + const optionals = raw.optionals?.slice() ?? []; + const postfix = raw.postfix?.slice() ?? []; + if (raw.variadic) { + while (optionals.at(-1)?.equals(raw.variadic)) optionals.pop(); + if (optionals.length === 0 && defaultables.length === 0) while (prefix.at(-1)?.equals(raw.variadic)) { + prefix.pop(); + minVariadicLength++; + } + while (postfix[0]?.equals(raw.variadic)) { + postfix.shift(); + minVariadicLength++; + } + } else if (optionals.length === 0 && defaultables.length === 0) prefix.push(...postfix.splice(0)); + if (minVariadicLength !== raw.minVariadicLength || raw.prefix && raw.prefix.length !== prefix.length) return $.node("sequence", { + ...raw, + prefix, + defaultables, + optionals, + postfix, + minVariadicLength + }, { prereduced: true }); + }, + defaults: { description: (node$1) => { + if (node$1.isVariadicOnly) return `${node$1.variadic.nestableExpression}[]`; + const innerDescription = node$1.tuple.map((element) => element.kind === "defaultables" ? `${element.node.nestableExpression} = ${printable(element.default)}` : element.kind === "optionals" ? `${element.node.nestableExpression}?` : element.kind === "variadic" ? `...${element.node.nestableExpression}[]` : element.node.expression).join(", "); + return `[${innerDescription}]`; + } }, + intersections: { sequence: (l, r, ctx) => { + const rootState = _intersectSequences({ + l: l.tuple, + r: r.tuple, + disjoint: new Disjoint(), + result: [], + fixedVariants: [], + ctx + }); + const viableBranches = rootState.disjoint.length === 0 ? [rootState, ...rootState.fixedVariants] : rootState.fixedVariants; + return viableBranches.length === 0 ? rootState.disjoint : viableBranches.length === 1 ? ctx.$.node("sequence", sequenceTupleToInner(viableBranches[0].result)) : ctx.$.node("union", viableBranches.map((state) => ({ + proto: Array, + sequence: sequenceTupleToInner(state.result) + }))); + } } +}); +var SequenceNode = class extends BaseConstraint { + impliedBasis = $ark.intrinsic.Array.internal; + tuple = sequenceInnerToTuple(this.inner); + prefixLength = this.prefix?.length ?? 0; + defaultablesLength = this.defaultables?.length ?? 0; + optionalsLength = this.optionals?.length ?? 0; + postfixLength = this.postfix?.length ?? 0; + defaultablesAndOptionals = []; + prevariadic = this.tuple.filter((el) => { + if (el.kind === "defaultables" || el.kind === "optionals") { + this.defaultablesAndOptionals.push(el.node); + return true; + } + return el.kind === "prefix"; + }); + variadicOrPostfix = conflatenate(this.variadic && [this.variadic], this.postfix); + flatRefs = this.addFlatRefs(); + addFlatRefs() { + appendUniqueFlatRefs(this.flatRefs, this.prevariadic.flatMap((element, i) => append(element.node.flatRefs.map((ref) => flatRef([`${i}`, ...ref.path], ref.node)), flatRef([`${i}`], element.node)))); + appendUniqueFlatRefs(this.flatRefs, this.variadicOrPostfix.flatMap((element) => append(element.flatRefs.map((ref) => flatRef([$ark.intrinsic.nonNegativeIntegerString.internal, ...ref.path], ref.node)), flatRef([$ark.intrinsic.nonNegativeIntegerString.internal], element)))); + return this.flatRefs; + } + isVariadicOnly = this.prevariadic.length + this.postfixLength === 0; + minVariadicLength = this.inner.minVariadicLength ?? 0; + minLength = this.prefixLength + this.minVariadicLength + this.postfixLength; + minLengthNode = this.minLength === 0 ? null : this.$.node("minLength", this.minLength); + maxLength = this.variadic ? null : this.tuple.length; + maxLengthNode = this.maxLength === null ? null : this.$.node("maxLength", this.maxLength); + impliedSiblings = this.minLengthNode ? this.maxLengthNode ? [this.minLengthNode, this.maxLengthNode] : [this.minLengthNode] : this.maxLengthNode ? [this.maxLengthNode] : []; + defaultValueMorphs = getDefaultableMorphs(this); + defaultValueMorphsReference = this.defaultValueMorphs.length ? registeredReference(this.defaultValueMorphs) : void 0; + elementAtIndex(data, index) { + if (index < this.prevariadic.length) return this.tuple[index]; + const firstPostfixIndex = data.length - this.postfixLength; + if (index >= firstPostfixIndex) return { + kind: "postfix", + node: this.postfix[index - firstPostfixIndex] + }; + return { + kind: "variadic", + node: this.variadic ?? throwInternalError(`Unexpected attempt to access index ${index} on ${this}`) + }; + } + traverseAllows = (data, ctx) => { + for (let i = 0; i < data.length; i++) if (!this.elementAtIndex(data, i).node.traverseAllows(data[i], ctx)) return false; + return true; + }; + traverseApply = (data, ctx) => { + let i = 0; + for (; i < data.length; i++) traverseKey(i, () => this.elementAtIndex(data, i).node.traverseApply(data[i], ctx), ctx); + }; + get element() { + return this.cacheGetter("element", this.$.node("union", this.children)); + } + compile(js) { + if (this.prefix) for (const [i, node$1] of this.prefix.entries()) js.traverseKey(`${i}`, `data[${i}]`, node$1); + for (const [i, node$1] of this.defaultablesAndOptionals.entries()) { + const dataIndex = `${i + this.prefixLength}`; + js.if(`${dataIndex} >= ${js.data}.length`, () => js.traversalKind === "Allows" ? js.return(true) : js.return()); + js.traverseKey(dataIndex, `data[${dataIndex}]`, node$1); + } + if (this.variadic) { + if (this.postfix) js.const("firstPostfixIndex", `${js.data}.length${this.postfix ? `- ${this.postfix.length}` : ""}`); + js.for(`i < ${this.postfix ? "firstPostfixIndex" : "data.length"}`, () => js.traverseKey("i", "data[i]", this.variadic), this.prevariadic.length); + if (this.postfix) for (const [i, node$1] of this.postfix.entries()) { + const keyExpression = `firstPostfixIndex + ${i}`; + js.traverseKey(keyExpression, `data[${keyExpression}]`, node$1); + } + } + if (js.traversalKind === "Allows") js.return(true); + } + _transform(mapper, ctx) { + ctx.path.push($ark.intrinsic.nonNegativeIntegerString.internal); + const result = super._transform(mapper, ctx); + ctx.path.pop(); + return result; + } + expression = this.description; + reduceJsonSchema(schema$1, ctx) { + if (this.prevariadic.length) schema$1.prefixItems = this.prevariadic.map((el) => { + const valueSchema = el.node.toJsonSchemaRecurse(ctx); + if (el.kind === "defaultables") { + const value$1 = typeof el.default === "function" ? el.default() : el.default; + valueSchema.default = $ark.intrinsic.jsonData.allows(value$1) ? value$1 : ctx.fallback.defaultValue({ + code: "defaultValue", + base: valueSchema, + value: value$1 + }); + } + return valueSchema; + }); + if (this.minLength) schema$1.minItems = this.minLength; + if (this.variadic) { + const variadicSchema = Object.assign(schema$1, { items: this.variadic.toJsonSchemaRecurse(ctx) }); + if (this.maxLength) variadicSchema.maxItems = this.maxLength; + if (this.postfix) { + const elements = this.postfix.map((el) => el.toJsonSchemaRecurse(ctx)); + schema$1 = ctx.fallback.arrayPostfix({ + code: "arrayPostfix", + base: variadicSchema, + elements + }); + } + } else { + schema$1.items = false; + delete schema$1.maxItems; + } + return schema$1; + } +}; +const defaultableMorphsCache$1 = {}; +const getDefaultableMorphs = (node$1) => { + if (!node$1.defaultables) return []; + const morphs = []; + let cacheKey = "["; + const lastDefaultableIndex = node$1.prefixLength + node$1.defaultablesLength - 1; + for (let i = node$1.prefixLength; i <= lastDefaultableIndex; i++) { + const [elementNode, defaultValue] = node$1.defaultables[i - node$1.prefixLength]; + morphs.push(computeDefaultValueMorph(i, elementNode, defaultValue)); + cacheKey += `${i}: ${elementNode.id} = ${defaultValueSerializer(defaultValue)}, `; + } + cacheKey += "]"; + return defaultableMorphsCache$1[cacheKey] ??= morphs; +}; +const Sequence = { + implementation: implementation$1, + Node: SequenceNode +}; +const sequenceInnerToTuple = (inner) => { + const tuple = []; + if (inner.prefix) for (const node$1 of inner.prefix) tuple.push({ + kind: "prefix", + node: node$1 + }); + if (inner.defaultables) for (const [node$1, defaultValue] of inner.defaultables) tuple.push({ + kind: "defaultables", + node: node$1, + default: defaultValue + }); + if (inner.optionals) for (const node$1 of inner.optionals) tuple.push({ + kind: "optionals", + node: node$1 + }); + if (inner.variadic) tuple.push({ + kind: "variadic", + node: inner.variadic + }); + if (inner.postfix) for (const node$1 of inner.postfix) tuple.push({ + kind: "postfix", + node: node$1 + }); + return tuple; +}; +const sequenceTupleToInner = (tuple) => tuple.reduce((result, element) => { + if (element.kind === "variadic") result.variadic = element.node; + else if (element.kind === "defaultables") result.defaultables = append(result.defaultables, [[element.node, element.default]]); + else result[element.kind] = append(result[element.kind], element.node); + return result; +}, {}); +const postfixAfterOptionalOrDefaultableMessage = "A postfix required element cannot follow an optional or defaultable element"; +const postfixWithoutVariadicMessage = "A postfix element requires a variadic element"; +const _intersectSequences = (s) => { + const [lHead, ...lTail] = s.l; + const [rHead, ...rTail] = s.r; + if (!lHead || !rHead) return s; + const lHasPostfix = lTail.at(-1)?.kind === "postfix"; + const rHasPostfix = rTail.at(-1)?.kind === "postfix"; + const kind = lHead.kind === "prefix" || rHead.kind === "prefix" ? "prefix" : lHead.kind === "postfix" || rHead.kind === "postfix" ? "postfix" : lHead.kind === "variadic" && rHead.kind === "variadic" ? "variadic" : lHasPostfix || rHasPostfix ? "prefix" : lHead.kind === "defaultables" || rHead.kind === "defaultables" ? "defaultables" : "optionals"; + if (lHead.kind === "prefix" && rHead.kind === "variadic" && rHasPostfix) { + const postfixBranchResult = _intersectSequences({ + ...s, + fixedVariants: [], + r: rTail.map((element) => ({ + ...element, + kind: "prefix" + })) + }); + if (postfixBranchResult.disjoint.length === 0) s.fixedVariants.push(postfixBranchResult); + } else if (rHead.kind === "prefix" && lHead.kind === "variadic" && lHasPostfix) { + const postfixBranchResult = _intersectSequences({ + ...s, + fixedVariants: [], + l: lTail.map((element) => ({ + ...element, + kind: "prefix" + })) + }); + if (postfixBranchResult.disjoint.length === 0) s.fixedVariants.push(postfixBranchResult); + } + const result = intersectOrPipeNodes(lHead.node, rHead.node, s.ctx); + if (result instanceof Disjoint) if (kind === "prefix" || kind === "postfix") { + s.disjoint.push(...result.withPrefixKey(kind === "prefix" ? s.result.length : `-${lTail.length + 1}`, "required")); + s.result = [...s.result, { + kind, + node: $ark.intrinsic.never.internal + }]; + } else if (kind === "optionals" || kind === "defaultables") return s; + else return _intersectSequences({ + ...s, + fixedVariants: [], + l: lTail.map((element) => ({ + ...element, + kind: "prefix" + })), + r: lTail.map((element) => ({ + ...element, + kind: "prefix" + })) + }); + else if (kind === "defaultables") { + if (lHead.kind === "defaultables" && rHead.kind === "defaultables" && lHead.default !== rHead.default) throwParseError(writeDefaultIntersectionMessage(lHead.default, rHead.default)); + s.result = [...s.result, { + kind, + node: result, + default: lHead.kind === "defaultables" ? lHead.default : rHead.kind === "defaultables" ? rHead.default : throwInternalError(`Unexpected defaultable intersection from ${lHead.kind} and ${rHead.kind} elements.`) + }]; + } else s.result = [...s.result, { + kind, + node: result + }]; + const lRemaining = s.l.length; + const rRemaining = s.r.length; + if (lHead.kind !== "variadic" || lRemaining >= rRemaining && (rHead.kind === "variadic" || rRemaining === 1)) s.l = lTail; + if (rHead.kind !== "variadic" || rRemaining >= lRemaining && (lHead.kind === "variadic" || lRemaining === 1)) s.r = rTail; + return _intersectSequences(s); +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/structure/structure.js +const createStructuralWriter = (childStringProp) => (node$1) => { + if (node$1.props.length || node$1.index) { + const parts = node$1.index?.map((index) => index[childStringProp]) ?? []; + for (const prop of node$1.props) parts.push(prop[childStringProp]); + if (node$1.undeclared) parts.push(`+ (undeclared): ${node$1.undeclared}`); + const objectLiteralDescription = `{ ${parts.join(", ")} }`; + return node$1.sequence ? `${objectLiteralDescription} & ${node$1.sequence.description}` : objectLiteralDescription; + } + return node$1.sequence?.description ?? "{}"; +}; +const structuralDescription = createStructuralWriter("description"); +const structuralExpression = createStructuralWriter("expression"); +const intersectPropsAndIndex = (l, r, $) => { + const kind = l.required ? "required" : "optional"; + if (!r.signature.allows(l.key)) return null; + const value$1 = intersectNodesRoot(l.value, r.value, $); + if (value$1 instanceof Disjoint) return kind === "optional" ? $.node("optional", { + key: l.key, + value: $ark.intrinsic.never.internal + }) : value$1.withPrefixKey(l.key, l.kind); + return null; +}; +const implementation = implementNode({ + kind: "structure", + hasAssociatedError: false, + normalize: (schema$1) => schema$1, + applyConfig: (schema$1, config) => { + if (!schema$1.undeclared && config.onUndeclaredKey !== "ignore") return { + ...schema$1, + undeclared: config.onUndeclaredKey + }; + return schema$1; + }, + keys: { + required: { + child: true, + parse: constraintKeyParser("required"), + reduceIo: (ioKind, inner, nodes) => { + inner.required = append(inner.required, nodes.map((node$1) => node$1[ioKind])); + return; + } + }, + optional: { + child: true, + parse: constraintKeyParser("optional"), + reduceIo: (ioKind, inner, nodes) => { + if (ioKind === "in") { + inner.optional = nodes.map((node$1) => node$1.in); + return; + } + for (const node$1 of nodes) inner[node$1.outProp.kind] = append(inner[node$1.outProp.kind], node$1.outProp.out); + } + }, + index: { + child: true, + parse: constraintKeyParser("index") + }, + sequence: { + child: true, + parse: constraintKeyParser("sequence") + }, + undeclared: { + parse: (behavior) => behavior === "ignore" ? void 0 : behavior, + reduceIo: (ioKind, inner, value$1) => { + if (value$1 !== "delete") return; + if (ioKind === "in") delete inner.undeclared; + else inner.undeclared = "reject"; + } + } + }, + defaults: { description: structuralDescription }, + intersections: { structure: (l, r, ctx) => { + const lInner = { ...l.inner }; + const rInner = { ...r.inner }; + const disjointResult = new Disjoint(); + if (l.undeclared) { + const lKey = l.keyof(); + for (const k of r.requiredKeys) if (!lKey.allows(k)) disjointResult.add("presence", $ark.intrinsic.never.internal, r.propsByKey[k].value, { path: [k] }); + if (rInner.optional) rInner.optional = rInner.optional.filter((n) => lKey.allows(n.key)); + if (rInner.index) rInner.index = rInner.index.flatMap((n) => { + if (n.signature.extends(lKey)) return n; + const indexOverlap = intersectNodesRoot(lKey, n.signature, ctx.$); + if (indexOverlap instanceof Disjoint) return []; + const normalized = normalizeIndex(indexOverlap, n.value, ctx.$); + if (normalized.required) rInner.required = conflatenate(rInner.required, normalized.required); + if (normalized.optional) rInner.optional = conflatenate(rInner.optional, normalized.optional); + return normalized.index ?? []; + }); + } + if (r.undeclared) { + const rKey = r.keyof(); + for (const k of l.requiredKeys) if (!rKey.allows(k)) disjointResult.add("presence", l.propsByKey[k].value, $ark.intrinsic.never.internal, { path: [k] }); + if (lInner.optional) lInner.optional = lInner.optional.filter((n) => rKey.allows(n.key)); + if (lInner.index) lInner.index = lInner.index.flatMap((n) => { + if (n.signature.extends(rKey)) return n; + const indexOverlap = intersectNodesRoot(rKey, n.signature, ctx.$); + if (indexOverlap instanceof Disjoint) return []; + const normalized = normalizeIndex(indexOverlap, n.value, ctx.$); + if (normalized.required) lInner.required = conflatenate(lInner.required, normalized.required); + if (normalized.optional) lInner.optional = conflatenate(lInner.optional, normalized.optional); + return normalized.index ?? []; + }); + } + const baseInner = {}; + if (l.undeclared || r.undeclared) baseInner.undeclared = l.undeclared === "reject" || r.undeclared === "reject" ? "reject" : "delete"; + const childIntersectionResult = intersectConstraints({ + kind: "structure", + baseInner, + l: flattenConstraints(lInner), + r: flattenConstraints(rInner), + roots: [], + ctx + }); + if (childIntersectionResult instanceof Disjoint) disjointResult.push(...childIntersectionResult); + if (disjointResult.length) return disjointResult; + return childIntersectionResult; + } }, + reduce: (inner, $) => { + if (inner.index) { + if (!(inner.required || inner.optional)) return; + let updated = false; + const requiredProps = inner.required ?? []; + const optionalProps = inner.optional ?? []; + const newOptionalProps = [...optionalProps]; + for (const index of inner.index) { + for (const requiredProp of requiredProps) { + const intersection = intersectPropsAndIndex(requiredProp, index, $); + if (intersection instanceof Disjoint) return intersection; + } + for (const [indx, optionalProp] of optionalProps.entries()) { + const intersection = intersectPropsAndIndex(optionalProp, index, $); + if (intersection instanceof Disjoint) return intersection; + if (intersection === null) continue; + newOptionalProps[indx] = intersection; + updated = true; + } + } + if (updated) return $.node("structure", { + ...inner, + optional: newOptionalProps + }, { prereduced: true }); + } + } +}); +var StructureNode = class extends BaseConstraint { + impliedBasis = $ark.intrinsic.object.internal; + impliedSiblings = this.children.flatMap((n) => n.impliedSiblings ?? []); + props = conflatenate(this.required, this.optional); + propsByKey = flatMorph(this.props, (i, node$1) => [node$1.key, node$1]); + propsByKeyReference = registeredReference(this.propsByKey); + expression = structuralExpression(this); + requiredKeys = this.required?.map((node$1) => node$1.key) ?? []; + optionalKeys = this.optional?.map((node$1) => node$1.key) ?? []; + literalKeys = [...this.requiredKeys, ...this.optionalKeys]; + _keyof; + keyof() { + if (this._keyof) return this._keyof; + let branches = this.$.units(this.literalKeys).branches; + if (this.index) for (const { signature } of this.index) branches = branches.concat(signature.branches); + return this._keyof = this.$.node("union", branches); + } + map(flatMapProp) { + return this.$.node("structure", this.props.flatMap(flatMapProp).reduce((structureInner, mapped) => { + const originalProp = this.propsByKey[mapped.key]; + if (isNode(mapped)) { + if (mapped.kind !== "required" && mapped.kind !== "optional") return throwParseError(`Map result must have kind "required" or "optional" (was ${mapped.kind})`); + structureInner[mapped.kind] = append(structureInner[mapped.kind], mapped); + return structureInner; + } + const mappedKind = mapped.kind ?? originalProp?.kind ?? "required"; + const mappedPropInner = flatMorph(mapped, (k, v) => k in Optional.implementation.keys ? [k, v] : []); + structureInner[mappedKind] = append(structureInner[mappedKind], this.$.node(mappedKind, mappedPropInner)); + return structureInner; + }, {})); + } + assertHasKeys(keys) { + const invalidKeys = keys.filter((k) => !typeOrTermExtends(k, this.keyof())); + if (invalidKeys.length) return throwParseError(writeInvalidKeysMessage(this.expression, invalidKeys)); + } + get(indexer, ...path$1) { + let value$1; + let required = false; + const key = indexerToKey(indexer); + if ((typeof key === "string" || typeof key === "symbol") && this.propsByKey[key]) { + value$1 = this.propsByKey[key].value; + required = this.propsByKey[key].required; + } + if (this.index) { + for (const n of this.index) if (typeOrTermExtends(key, n.signature)) value$1 = value$1?.and(n.value) ?? n.value; + } + if (this.sequence && typeOrTermExtends(key, $ark.intrinsic.nonNegativeIntegerString)) if (hasArkKind(key, "root")) { + if (this.sequence.variadic) value$1 = value$1?.and(this.sequence.element) ?? this.sequence.element; + } else { + const index = Number.parseInt(key); + if (index < this.sequence.prevariadic.length) { + const fixedElement = this.sequence.prevariadic[index].node; + value$1 = value$1?.and(fixedElement) ?? fixedElement; + required ||= index < this.sequence.prefixLength; + } else if (this.sequence.variadic) { + const nonFixedElement = this.$.node("union", this.sequence.variadicOrPostfix); + value$1 = value$1?.and(nonFixedElement) ?? nonFixedElement; + } + } + if (!value$1) { + if (this.sequence?.variadic && hasArkKind(key, "root") && key.extends($ark.intrinsic.number)) return throwParseError(writeNumberIndexMessage(key.expression, this.sequence.expression)); + return throwParseError(writeInvalidKeysMessage(this.expression, [key])); + } + const result = value$1.get(...path$1); + return required ? result : result.or($ark.intrinsic.undefined); + } + pick(...keys) { + this.assertHasKeys(keys); + return this.$.node("structure", this.filterKeys("pick", keys)); + } + omit(...keys) { + this.assertHasKeys(keys); + return this.$.node("structure", this.filterKeys("omit", keys)); + } + optionalize() { + const { required,...inner } = this.inner; + return this.$.node("structure", { + ...inner, + optional: this.props.map((prop) => prop.hasKind("required") ? this.$.node("optional", prop.inner) : prop) + }); + } + require() { + const { optional,...inner } = this.inner; + return this.$.node("structure", { + ...inner, + required: this.props.map((prop) => prop.hasKind("optional") ? { + key: prop.key, + value: prop.value + } : prop) + }); + } + merge(r) { + const inner = this.filterKeys("omit", [r.keyof()]); + if (r.required) inner.required = append(inner.required, r.required); + if (r.optional) inner.optional = append(inner.optional, r.optional); + if (r.index) inner.index = append(inner.index, r.index); + if (r.sequence) inner.sequence = r.sequence; + if (r.undeclared) inner.undeclared = r.undeclared; + else delete inner.undeclared; + return this.$.node("structure", inner); + } + filterKeys(operation, keys) { + const result = makeRootAndArrayPropertiesMutable(this.inner); + const shouldKeep = (key) => { + const matchesKey = keys.some((k) => typeOrTermExtends(key, k)); + return operation === "pick" ? matchesKey : !matchesKey; + }; + if (result.required) result.required = result.required.filter((prop) => shouldKeep(prop.key)); + if (result.optional) result.optional = result.optional.filter((prop) => shouldKeep(prop.key)); + if (result.index) result.index = result.index.filter((index) => shouldKeep(index.signature)); + return result; + } + traverseAllows = (data, ctx) => this._traverse("Allows", data, ctx); + traverseApply = (data, ctx) => this._traverse("Apply", data, ctx); + _traverse = (traversalKind, data, ctx) => { + const errorCount = ctx?.currentErrorCount ?? 0; + for (let i = 0; i < this.props.length; i++) if (traversalKind === "Allows") { + if (!this.props[i].traverseAllows(data, ctx)) return false; + } else { + this.props[i].traverseApply(data, ctx); + if (ctx.failFast && ctx.currentErrorCount > errorCount) return false; + } + if (this.sequence) if (traversalKind === "Allows") { + if (!this.sequence.traverseAllows(data, ctx)) return false; + } else { + this.sequence.traverseApply(data, ctx); + if (ctx.failFast && ctx.currentErrorCount > errorCount) return false; + } + if (this.index || this.undeclared === "reject") { + const keys = Object.keys(data); + keys.push(...Object.getOwnPropertySymbols(data)); + for (let i = 0; i < keys.length; i++) { + const k = keys[i]; + if (this.index) { + for (const node$1 of this.index) if (node$1.signature.traverseAllows(k, ctx)) if (traversalKind === "Allows") { + const result = traverseKey(k, () => node$1.value.traverseAllows(data[k], ctx), ctx); + if (!result) return false; + } else { + traverseKey(k, () => node$1.value.traverseApply(data[k], ctx), ctx); + if (ctx.failFast && ctx.currentErrorCount > errorCount) return false; + } + } + if (this.undeclared === "reject" && !this.declaresKey(k)) { + if (traversalKind === "Allows") return false; + ctx.errorFromNodeContext({ + code: "predicate", + expected: "removed", + actual: "", + relativePath: [k], + meta: this.meta + }); + if (ctx.failFast) return false; + } + } + } + if (this.structuralMorph && ctx && !ctx.hasError()) ctx.queueMorphs([this.structuralMorph]); + return true; + }; + get defaultable() { + return this.cacheGetter("defaultable", this.optional?.filter((o) => o.hasDefault()) ?? []); + } + declaresKey = (k) => k in this.propsByKey || this.index?.some((n) => n.signature.allows(k)) || this.sequence !== void 0 && $ark.intrinsic.nonNegativeIntegerString.allows(k); + _compileDeclaresKey(js) { + const parts = []; + if (this.props.length) parts.push(`k in ${this.propsByKeyReference}`); + if (this.index) for (const index of this.index) parts.push(js.invoke(index.signature, { + kind: "Allows", + arg: "k" + })); + if (this.sequence) parts.push("$ark.intrinsic.nonNegativeIntegerString.allows(k)"); + return parts.join(" || ") || "false"; + } + get structuralMorph() { + return this.cacheGetter("structuralMorph", getPossibleMorph(this)); + } + structuralMorphRef = this.structuralMorph && registeredReference(this.structuralMorph); + compile(js) { + if (js.traversalKind === "Apply") js.initializeErrorCount(); + for (const prop of this.props) { + js.check(prop); + if (js.traversalKind === "Apply") js.returnIfFailFast(); + } + if (this.sequence) { + js.check(this.sequence); + if (js.traversalKind === "Apply") js.returnIfFailFast(); + } + if (this.index || this.undeclared === "reject") { + js.const("keys", "Object.keys(data)"); + js.line("keys.push(...Object.getOwnPropertySymbols(data))"); + js.for("i < keys.length", () => this.compileExhaustiveEntry(js)); + } + if (js.traversalKind === "Allows") return js.return(true); + if (this.structuralMorphRef) js.if("ctx && !ctx.hasError()", () => { + js.line(`ctx.queueMorphs([`); + precompileMorphs(js, this); + return js.line("])"); + }); + } + compileExhaustiveEntry(js) { + js.const("k", "keys[i]"); + if (this.index) for (const node$1 of this.index) js.if(`${js.invoke(node$1.signature, { + arg: "k", + kind: "Allows" + })}`, () => js.traverseKey("k", "data[k]", node$1.value)); + if (this.undeclared === "reject") js.if(`!(${this._compileDeclaresKey(js)})`, () => { + if (js.traversalKind === "Allows") return js.return(false); + return js.line(`ctx.errorFromNodeContext({ code: "predicate", expected: "removed", actual: "", relativePath: [k], meta: ${this.compiledMeta} })`).if("ctx.failFast", () => js.return()); + }); + return js; + } + reduceJsonSchema(schema$1, ctx) { + switch (schema$1.type) { + case "object": return this.reduceObjectJsonSchema(schema$1, ctx); + case "array": + const arraySchema = this.sequence?.reduceJsonSchema(schema$1, ctx) ?? schema$1; + if (this.props.length || this.index) return ctx.fallback.arrayObject({ + code: "arrayObject", + base: arraySchema, + object: this.reduceObjectJsonSchema({ type: "object" }, ctx) + }); + return arraySchema; + default: return ToJsonSchema.throwInternalOperandError("structure", schema$1); + } + } + reduceObjectJsonSchema(schema$1, ctx) { + if (this.props.length) { + schema$1.properties = {}; + for (const prop of this.props) { + const valueSchema = prop.value.toJsonSchemaRecurse(ctx); + if (typeof prop.key === "symbol") { + ctx.fallback.symbolKey({ + code: "symbolKey", + base: schema$1, + key: prop.key, + value: valueSchema, + optional: prop.optional + }); + continue; + } + if (prop.hasDefault()) { + const value$1 = typeof prop.default === "function" ? prop.default() : prop.default; + valueSchema.default = $ark.intrinsic.jsonData.allows(value$1) ? value$1 : ctx.fallback.defaultValue({ + code: "defaultValue", + base: valueSchema, + value: value$1 + }); + } + schema$1.properties[prop.key] = valueSchema; + } + if (this.requiredKeys.length && schema$1.properties) schema$1.required = this.requiredKeys.filter((k) => typeof k === "string" && k in schema$1.properties); + } + if (this.index) for (const index of this.index) { + const valueJsonSchema = index.value.toJsonSchemaRecurse(ctx); + if (index.signature.equals($ark.intrinsic.string)) { + schema$1.additionalProperties = valueJsonSchema; + continue; + } + for (const keyBranch of index.signature.branches) { + if (!keyBranch.extends($ark.intrinsic.string)) { + schema$1 = ctx.fallback.symbolKey({ + code: "symbolKey", + base: schema$1, + key: null, + value: valueJsonSchema, + optional: false + }); + continue; + } + let keySchema = { type: "string" }; + if (keyBranch.hasKind("morph")) keySchema = ctx.fallback.morph({ + code: "morph", + base: keyBranch.in.toJsonSchemaRecurse(ctx), + out: keyBranch.out.toJsonSchemaRecurse(ctx) + }); + if (!keyBranch.hasKind("intersection")) return throwInternalError(`Unexpected index branch kind ${keyBranch.kind}.`); + const { pattern } = keyBranch.inner; + if (pattern) { + const keySchemaWithPattern = Object.assign(keySchema, { pattern: pattern[0].rule }); + for (let i = 1; i < pattern.length; i++) keySchema = ctx.fallback.patternIntersection({ + code: "patternIntersection", + base: keySchemaWithPattern, + pattern: pattern[i].rule + }); + schema$1.patternProperties ??= {}; + schema$1.patternProperties[keySchemaWithPattern.pattern] = valueJsonSchema; + } + } + } + if (this.undeclared && !schema$1.additionalProperties) schema$1.additionalProperties = false; + return schema$1; + } +}; +const defaultableMorphsCache = {}; +const constructStructuralMorphCacheKey = (node$1) => { + let cacheKey = ""; + for (let i = 0; i < node$1.defaultable.length; i++) cacheKey += node$1.defaultable[i].defaultValueMorphRef; + if (node$1.sequence?.defaultValueMorphsReference) cacheKey += node$1.sequence?.defaultValueMorphsReference; + if (node$1.undeclared === "delete") { + cacheKey += "delete !("; + if (node$1.required) for (const n of node$1.required) cacheKey += n.compiledKey + " | "; + if (node$1.optional) for (const n of node$1.optional) cacheKey += n.compiledKey + " | "; + if (node$1.index) for (const index of node$1.index) cacheKey += index.signature.id + " | "; + if (node$1.sequence) if (node$1.sequence.maxLength === null) cacheKey += intrinsic.nonNegativeIntegerString.id; + else for (let i = 0; i < node$1.sequence.tuple.length; i++) cacheKey += i + " | "; + cacheKey += ")"; + } + return cacheKey; +}; +const getPossibleMorph = (node$1) => { + const cacheKey = constructStructuralMorphCacheKey(node$1); + if (!cacheKey) return void 0; + if (defaultableMorphsCache[cacheKey]) return defaultableMorphsCache[cacheKey]; + const $arkStructuralMorph = (data, ctx) => { + for (let i = 0; i < node$1.defaultable.length; i++) if (!(node$1.defaultable[i].key in data)) node$1.defaultable[i].defaultValueMorph(data, ctx); + if (node$1.sequence?.defaultables) for (let i = data.length - node$1.sequence.prefixLength; i < node$1.sequence.defaultables.length; i++) node$1.sequence.defaultValueMorphs[i](data, ctx); + if (node$1.undeclared === "delete") { + for (const k in data) if (!node$1.declaresKey(k)) delete data[k]; + } + return data; + }; + return defaultableMorphsCache[cacheKey] = $arkStructuralMorph; +}; +const precompileMorphs = (js, node$1) => { + const requiresContext = node$1.defaultable.some((node$2) => node$2.defaultValueMorph.length === 2) || node$1.sequence?.defaultValueMorphs.some((morph) => morph.length === 2); + const args$1 = `(data${requiresContext ? ", ctx" : ""})`; + return js.block(`${args$1} => `, (js$1) => { + for (let i = 0; i < node$1.defaultable.length; i++) { + const { serializedKey, defaultValueMorphRef } = node$1.defaultable[i]; + js$1.if(`!(${serializedKey} in data)`, (js$2) => js$2.line(`${defaultValueMorphRef}${args$1}`)); + } + if (node$1.sequence?.defaultables) js$1.for(`i < ${node$1.sequence.defaultables.length}`, (js$2) => js$2.set(`data[i]`, 5), `data.length - ${node$1.sequence.prefixLength}`); + if (node$1.undeclared === "delete") js$1.forIn("data", (js$2) => js$2.if(`!(${node$1._compileDeclaresKey(js$2)})`, (js$3) => js$3.line(`delete data[k]`))); + return js$1.return("data"); + }); +}; +const Structure = { + implementation, + Node: StructureNode +}; +const indexerToKey = (indexable) => { + if (hasArkKind(indexable, "root") && indexable.hasKind("unit")) indexable = indexable.unit; + if (typeof indexable === "number") indexable = `${indexable}`; + return indexable; +}; +const writeNumberIndexMessage = (indexExpression, sequenceExpression) => `${indexExpression} is not allowed as an array index on ${sequenceExpression}. Use the 'nonNegativeIntegerString' keyword instead.`; +/** extract enumerable named props from an index signature */ +const normalizeIndex = (signature, value$1, $) => { + const [enumerableBranches, nonEnumerableBranches] = spliterate(signature.branches, (k) => k.hasKind("unit")); + if (!enumerableBranches.length) return { index: $.node("index", { + signature, + value: value$1 + }) }; + const normalized = {}; + for (const n of enumerableBranches) { + const prop = $.node("required", { + key: n.unit, + value: value$1 + }); + normalized[prop.kind] = append(normalized[prop.kind], prop); + } + if (nonEnumerableBranches.length) normalized.index = $.node("index", { + signature: nonEnumerableBranches, + value: value$1 + }); + return normalized; +}; +const typeKeyToString = (k) => hasArkKind(k, "root") ? k.expression : printable(k); +const writeInvalidKeysMessage = (o, keys) => `Key${keys.length === 1 ? "" : "s"} ${keys.map(typeKeyToString).join(", ")} ${keys.length === 1 ? "does" : "do"} not exist on ${o}`; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/kinds.js +const nodeImplementationsByKind = { + ...boundImplementationsByKind, + alias: Alias.implementation, + domain: Domain.implementation, + unit: Unit.implementation, + proto: Proto.implementation, + union: Union.implementation, + morph: Morph.implementation, + intersection: Intersection.implementation, + divisor: Divisor.implementation, + pattern: Pattern.implementation, + predicate: Predicate.implementation, + required: Required$1.implementation, + optional: Optional.implementation, + index: Index.implementation, + sequence: Sequence.implementation, + structure: Structure.implementation +}; +$ark.defaultConfig = withAlphabetizedKeys(Object.assign(flatMorph(nodeImplementationsByKind, (kind, implementation$22) => [kind, implementation$22.defaults]), { + jitless: envHasCsp(), + clone: deepClone, + onUndeclaredKey: "ignore", + exactOptionalPropertyTypes: true, + numberAllowsNaN: false, + dateAllowsInvalid: false, + onFail: null, + keywords: {}, + toJsonSchema: ToJsonSchema.defaultConfig +})); +$ark.resolvedConfig = mergeConfigs($ark.defaultConfig, $ark.config); +const nodeClassesByKind = { + ...boundClassesByKind, + alias: Alias.Node, + domain: Domain.Node, + unit: Unit.Node, + proto: Proto.Node, + union: Union.Node, + morph: Morph.Node, + intersection: Intersection.Node, + divisor: Divisor.Node, + pattern: Pattern.Node, + predicate: Predicate.Node, + required: Required$1.Node, + optional: Optional.Node, + index: Index.Node, + sequence: Sequence.Node, + structure: Structure.Node +}; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/module.js +var RootModule = class extends DynamicBase { + get [arkKind]() { + return "module"; + } +}; +const bindModule = (module, $) => new RootModule(flatMorph(module, (alias, value$1) => [alias, hasArkKind(value$1, "module") ? bindModule(value$1, $) : $.bindReference(value$1)])); + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/scope.js +const schemaBranchesOf = (schema$1) => isArray(schema$1) ? schema$1 : "branches" in schema$1 && isArray(schema$1.branches) ? schema$1.branches : void 0; +const throwMismatchedNodeRootError = (expected, actual) => throwParseError(`Node of kind ${actual} is not valid as a ${expected} definition`); +const writeDuplicateAliasError = (alias) => `#${alias} duplicates public alias ${alias}`; +const scopesByName = {}; +$ark.ambient ??= {}; +let rawUnknownUnion; +const rootScopeFnName = "function $"; +const precompile = (references) => bindPrecompilation(references, precompileReferences(references)); +const bindPrecompilation = (references, precompiler) => { + const precompilation = precompiler.write(rootScopeFnName, 4); + const compiledTraversals = precompiler.compile()(); + for (const node$1 of references) { + if (node$1.precompilation) continue; + node$1.traverseAllows = compiledTraversals[`${node$1.id}Allows`].bind(compiledTraversals); + if (node$1.isRoot() && !node$1.allowsRequiresContext) node$1.allows = node$1.traverseAllows; + node$1.traverseApply = compiledTraversals[`${node$1.id}Apply`].bind(compiledTraversals); + if (compiledTraversals[`${node$1.id}Optimistic`]) node$1.traverseOptimistic = compiledTraversals[`${node$1.id}Optimistic`].bind(compiledTraversals); + node$1.precompilation = precompilation; + } +}; +const precompileReferences = (references) => new CompiledFunction().return(references.reduce((js, node$1) => { + const allowsCompiler = new NodeCompiler({ kind: "Allows" }).indent(); + node$1.compile(allowsCompiler); + const allowsJs = allowsCompiler.write(`${node$1.id}Allows`); + const applyCompiler = new NodeCompiler({ kind: "Apply" }).indent(); + node$1.compile(applyCompiler); + const applyJs = applyCompiler.write(`${node$1.id}Apply`); + const result = `${js}${allowsJs},\n${applyJs},\n`; + if (!node$1.hasKind("union")) return result; + const optimisticCompiler = new NodeCompiler({ + kind: "Allows", + optimistic: true + }).indent(); + node$1.compile(optimisticCompiler); + const optimisticJs = optimisticCompiler.write(`${node$1.id}Optimistic`); + return `${result}${optimisticJs},\n`; +}, "{\n") + "}"); +var BaseScope = class { + config; + resolvedConfig; + name; + get [arkKind]() { + return "scope"; + } + referencesById = {}; + references = []; + resolutions = {}; + exportedNames = []; + aliases = {}; + resolved = false; + nodesByHash = {}; + intrinsic; + constructor(def, config) { + this.config = mergeConfigs($ark.config, config); + this.resolvedConfig = mergeConfigs($ark.resolvedConfig, config); + this.name = this.resolvedConfig.name ?? `anonymousScope${Object.keys(scopesByName).length}`; + if (this.name in scopesByName) throwParseError(`A Scope already named ${this.name} already exists`); + scopesByName[this.name] = this; + const aliasEntries = Object.entries(def).map((entry) => this.preparseOwnAliasEntry(...entry)); + for (const [k, v] of aliasEntries) { + let name = k; + if (k[0] === "#") { + name = k.slice(1); + if (name in this.aliases) throwParseError(writeDuplicateAliasError(name)); + this.aliases[name] = v; + } else { + if (name in this.aliases) throwParseError(writeDuplicateAliasError(k)); + this.aliases[name] = v; + this.exportedNames.push(name); + } + if (!hasArkKind(v, "module") && !hasArkKind(v, "generic") && !isThunk(v)) { + const preparsed = this.preparseOwnDefinitionFormat(v, { alias: name }); + this.resolutions[name] = hasArkKind(preparsed, "root") ? this.bindReference(preparsed) : this.createParseContext(preparsed).id; + } + } + rawUnknownUnion ??= this.node("union", { branches: [ + "string", + "number", + "object", + "bigint", + "symbol", + { unit: true }, + { unit: false }, + { unit: void 0 }, + { unit: null } + ] }, { prereduced: true }); + this.nodesByHash[rawUnknownUnion.hash] = this.node("intersection", {}, { prereduced: true }); + this.intrinsic = $ark.intrinsic ? flatMorph($ark.intrinsic, (k, v) => k.startsWith("json") ? [] : [k, this.bindReference(v)]) : {}; + } + cacheGetter(name, value$1) { + Object.defineProperty(this, name, { value: value$1 }); + return value$1; + } + get internal() { + return this; + } + _json; + get json() { + if (!this._json) this.export(); + return this._json; + } + defineSchema(def) { + return def; + } + generic = (...params) => { + const $ = this; + return (def, possibleHkt) => new GenericRoot(params, possibleHkt ? new LazyGenericBody(def) : def, $, $, possibleHkt ?? null); + }; + units = (values, opts) => { + const uniqueValues = []; + for (const value$1 of values) if (!uniqueValues.includes(value$1)) uniqueValues.push(value$1); + const branches = uniqueValues.map((unit) => this.node("unit", { unit }, opts)); + return this.node("union", branches, { + ...opts, + prereduced: true + }); + }; + lazyResolutions = []; + lazilyResolve(resolve, syntheticAlias) { + const node$1 = this.node("alias", { + reference: syntheticAlias ?? "synthetic", + resolve + }, { prereduced: true }); + if (!this.resolved) this.lazyResolutions.push(node$1); + return node$1; + } + schema = (schema$1, opts) => this.finalize(this.parseSchema(schema$1, opts)); + parseSchema = (schema$1, opts) => this.node(schemaKindOf(schema$1), schema$1, opts); + preparseNode(kinds, schema$1, opts) { + let kind = typeof kinds === "string" ? kinds : schemaKindOf(schema$1, kinds); + if (isNode(schema$1) && schema$1.kind === kind) return schema$1; + if (kind === "alias" && !opts?.prereduced) { + const { reference: reference$1 } = Alias.implementation.normalize(schema$1, this); + if (reference$1.startsWith("$")) { + const resolution = this.resolveRoot(reference$1.slice(1)); + schema$1 = resolution; + kind = resolution.kind; + } + } else if (kind === "union" && hasDomain(schema$1, "object")) { + const branches = schemaBranchesOf(schema$1); + if (branches?.length === 1) { + schema$1 = branches[0]; + kind = schemaKindOf(schema$1); + } + } + if (isNode(schema$1) && schema$1.kind === kind) return schema$1; + const impl = nodeImplementationsByKind[kind]; + const normalizedSchema = impl.normalize?.(schema$1, this) ?? schema$1; + if (isNode(normalizedSchema)) return normalizedSchema.kind === kind ? normalizedSchema : throwMismatchedNodeRootError(kind, normalizedSchema.kind); + return { + ...opts, + $: this, + kind, + def: normalizedSchema, + prefix: opts.alias ?? kind + }; + } + bindReference(reference$1) { + let bound; + if (isNode(reference$1)) bound = reference$1.$ === this ? reference$1 : new reference$1.constructor(reference$1.attachments, this); + else bound = reference$1.$ === this ? reference$1 : new GenericRoot(reference$1.params, reference$1.bodyDef, reference$1.$, this, reference$1.hkt); + if (!this.resolved) Object.assign(this.referencesById, bound.referencesById); + return bound; + } + resolveRoot(name) { + return this.maybeResolveRoot(name) ?? throwParseError(writeUnresolvableMessage(name)); + } + maybeResolveRoot(name) { + const result = this.maybeResolve(name); + if (hasArkKind(result, "generic")) return; + return result; + } + /** If name is a valid reference to a submodule alias, return its resolution */ + maybeResolveSubalias(name) { + return maybeResolveSubalias(this.aliases, name) ?? maybeResolveSubalias(this.ambient, name); + } + get ambient() { + return $ark.ambient; + } + maybeResolve(name) { + const cached$1 = this.resolutions[name]; + if (cached$1) { + if (typeof cached$1 !== "string") return this.bindReference(cached$1); + const v = nodesByRegisteredId[cached$1]; + if (hasArkKind(v, "root")) return this.resolutions[name] = v; + if (hasArkKind(v, "context")) { + if (v.phase === "resolving") return this.node("alias", { reference: `$${name}` }, { prereduced: true }); + if (v.phase === "resolved") return throwInternalError(`Unexpected resolved context for was uncached by its scope: ${printable(v)}`); + v.phase = "resolving"; + const node$1 = this.bindReference(this.parseOwnDefinitionFormat(v.def, v)); + v.phase = "resolved"; + nodesByRegisteredId[node$1.id] = node$1; + nodesByRegisteredId[v.id] = node$1; + return this.resolutions[name] = node$1; + } + return throwInternalError(`Unexpected nodesById entry for ${cached$1}: ${printable(v)}`); + } + let def = this.aliases[name] ?? this.ambient?.[name]; + if (!def) return this.maybeResolveSubalias(name); + def = this.normalizeRootScopeValue(def); + if (hasArkKind(def, "generic")) return this.resolutions[name] = this.bindReference(def); + if (hasArkKind(def, "module")) { + if (!def.root) throwParseError(writeMissingSubmoduleAccessMessage(name)); + return this.resolutions[name] = this.bindReference(def.root); + } + return this.resolutions[name] = this.parse(def, { alias: name }); + } + createParseContext(input) { + const id = input.id ?? registerNodeId(input.prefix); + return nodesByRegisteredId[id] = Object.assign(input, { + [arkKind]: "context", + $: this, + id, + phase: "unresolved" + }); + } + traversal(root) { + return new Traversal(root, this.resolvedConfig); + } + import(...names) { + return new RootModule(flatMorph(this.export(...names), (alias, value$1) => [`#${alias}`, value$1])); + } + precompilation; + _exportedResolutions; + _exports; + export(...names) { + if (!this._exports) { + this._exports = {}; + for (const name of this.exportedNames) { + const def = this.aliases[name]; + this._exports[name] = hasArkKind(def, "module") ? bindModule(def, this) : bootstrapAliasReferences(this.maybeResolve(name)); + } + for (const node$1 of this.lazyResolutions) node$1.resolution; + this._exportedResolutions = resolutionsOfModule(this, this._exports); + this._json = resolutionsToJson(this._exportedResolutions); + Object.assign(this.resolutions, this._exportedResolutions); + this.references = Object.values(this.referencesById); + if (!this.resolvedConfig.jitless) { + const precompiler = precompileReferences(this.references); + this.precompilation = precompiler.write(rootScopeFnName, 4); + bindPrecompilation(this.references, precompiler); + } + this.resolved = true; + } + const namesToExport = names.length ? names : this.exportedNames; + return new RootModule(flatMorph(namesToExport, (_, name) => [name, this._exports[name]])); + } + resolve(name) { + return this.export()[name]; + } + node = (kinds, nodeSchema, opts = {}) => { + const ctxOrNode = this.preparseNode(kinds, nodeSchema, opts); + if (isNode(ctxOrNode)) return this.bindReference(ctxOrNode); + const ctx = this.createParseContext(ctxOrNode); + const node$1 = parseNode(ctx); + const bound = this.bindReference(node$1); + return nodesByRegisteredId[ctx.id] = bound; + }; + parse = (def, opts = {}) => this.finalize(this.parseDefinition(def, opts)); + parseDefinition(def, opts = {}) { + if (hasArkKind(def, "root")) return this.bindReference(def); + const ctxInputOrNode = this.preparseOwnDefinitionFormat(def, opts); + if (hasArkKind(ctxInputOrNode, "root")) return this.bindReference(ctxInputOrNode); + const ctx = this.createParseContext(ctxInputOrNode); + nodesByRegisteredId[ctx.id] = ctx; + let node$1 = this.bindReference(this.parseOwnDefinitionFormat(def, ctx)); + if (node$1.isCyclic) node$1 = withId(node$1, ctx.id); + nodesByRegisteredId[ctx.id] = node$1; + return node$1; + } + finalize(node$1) { + bootstrapAliasReferences(node$1); + if (!node$1.precompilation && !this.resolvedConfig.jitless) precompile(node$1.references); + return node$1; + } +}; +var SchemaScope = class extends BaseScope { + parseOwnDefinitionFormat(def, ctx) { + return parseNode(ctx); + } + preparseOwnDefinitionFormat(schema$1, opts) { + return this.preparseNode(schemaKindOf(schema$1), schema$1, opts); + } + preparseOwnAliasEntry(k, v) { + return [k, v]; + } + normalizeRootScopeValue(v) { + return v; + } +}; +const bootstrapAliasReferences = (resolution) => { + const aliases = resolution.references.filter((node$1) => node$1.hasKind("alias")); + for (const aliasNode of aliases) { + Object.assign(aliasNode.referencesById, aliasNode.resolution.referencesById); + for (const ref of resolution.references) if (aliasNode.id in ref.referencesById) Object.assign(ref.referencesById, aliasNode.referencesById); + } + return resolution; +}; +const resolutionsToJson = (resolutions) => flatMorph(resolutions, (k, v) => [k, hasArkKind(v, "root") || hasArkKind(v, "generic") ? v.json : hasArkKind(v, "module") ? resolutionsToJson(v) : throwInternalError(`Unexpected resolution ${printable(v)}`)]); +const maybeResolveSubalias = (base, name) => { + const dotIndex = name.indexOf("."); + if (dotIndex === -1) return; + const dotPrefix = name.slice(0, dotIndex); + const prefixSchema = base[dotPrefix]; + if (prefixSchema === void 0) return; + if (!hasArkKind(prefixSchema, "module")) return throwParseError(writeNonSubmoduleDotMessage(dotPrefix)); + const subalias = name.slice(dotIndex + 1); + const resolution = prefixSchema[subalias]; + if (resolution === void 0) return maybeResolveSubalias(prefixSchema, subalias); + if (hasArkKind(resolution, "root") || hasArkKind(resolution, "generic")) return resolution; + if (hasArkKind(resolution, "module")) return resolution.root ?? throwParseError(writeMissingSubmoduleAccessMessage(name)); + throwInternalError(`Unexpected resolution for alias '${name}': ${printable(resolution)}`); +}; +const schemaScope = (aliases, config) => new SchemaScope(aliases, config); +const rootSchemaScope = new SchemaScope({}); +const resolutionsOfModule = ($, typeSet) => { + const result = {}; + for (const k in typeSet) { + const v = typeSet[k]; + if (hasArkKind(v, "module")) { + const innerResolutions = resolutionsOfModule($, v); + const prefixedResolutions = flatMorph(innerResolutions, (innerK, innerV) => [`${k}.${innerK}`, innerV]); + Object.assign(result, prefixedResolutions); + } else if (hasArkKind(v, "root") || hasArkKind(v, "generic")) result[k] = v; + else throwInternalError(`Unexpected scope resolution ${printable(v)}`); + } + return result; +}; +const writeUnresolvableMessage = (token) => `'${token}' is unresolvable`; +const writeNonSubmoduleDotMessage = (name) => `'${name}' must reference a module to be accessed using dot syntax`; +const writeMissingSubmoduleAccessMessage = (name) => `Reference to submodule '${name}' must specify an alias`; +rootSchemaScope.export(); +const rootSchema = rootSchemaScope.schema; +const node = rootSchemaScope.node; +const defineSchema = rootSchemaScope.defineSchema; +const genericNode = rootSchemaScope.generic; + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/structure/shared.js +const arrayIndexSource = `^(?:0|[1-9]\\d*)$`; +const arrayIndexMatcher = new RegExp(arrayIndexSource); +const arrayIndexMatcherReference = registeredReference(arrayIndexMatcher); + +//#endregion +//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/intrinsic.js +const intrinsicBases = schemaScope({ + bigint: "bigint", + boolean: [{ unit: false }, { unit: true }], + false: { unit: false }, + never: [], + null: { unit: null }, + number: "number", + object: "object", + string: "string", + symbol: "symbol", + true: { unit: true }, + unknown: {}, + undefined: { unit: void 0 }, + Array, + Date +}, { prereducedAliases: true }).export(); +$ark.intrinsic = { ...intrinsicBases }; +const intrinsicRoots = schemaScope({ + integer: { + domain: "number", + divisor: 1 + }, + lengthBoundable: ["string", Array], + key: ["string", "symbol"], + nonNegativeIntegerString: { + domain: "string", + pattern: arrayIndexSource + } +}, { prereducedAliases: true }).export(); +Object.assign($ark.intrinsic, intrinsicRoots); +const intrinsicJson = schemaScope({ + jsonPrimitive: [ + "string", + "number", + { unit: true }, + { unit: false }, + { unit: null } + ], + jsonObject: { + domain: "object", + index: { + signature: "string", + value: "$jsonData" + } + }, + jsonData: ["$jsonPrimitive", "$jsonObject"] +}, { prereducedAliases: true }).export(); +const intrinsic = { + ...intrinsicBases, + ...intrinsicRoots, + ...intrinsicJson, + emptyStructure: node("structure", {}, { prereduced: true }) +}; +$ark.intrinsic = { ...intrinsic }; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/operand/date.js +const isDateLiteral = (value$1) => typeof value$1 === "string" && value$1[0] === "d" && (value$1[1] === "'" || value$1[1] === "\"") && value$1.at(-1) === value$1[1]; +const isValidDate = (d) => d.toString() !== "Invalid Date"; +const extractDateLiteralSource = (literal) => literal.slice(2, -1); +const writeInvalidDateMessage = (source) => `'${source}' could not be parsed by the Date constructor`; +const tryParseDate = (source, errorOnFail) => maybeParseDate(source, errorOnFail); +const maybeParseDate = (source, errorOnFail) => { + const stringParsedDate = new Date(source); + if (isValidDate(stringParsedDate)) return stringParsedDate; + const epochMillis = tryParseNumber(source); + if (epochMillis !== void 0) { + const numberParsedDate = new Date(epochMillis); + if (isValidDate(numberParsedDate)) return numberParsedDate; + } + return errorOnFail ? throwParseError(errorOnFail === true ? writeInvalidDateMessage(source) : errorOnFail) : void 0; +}; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/operand/enclosed.js +const parseEnclosed = (s, enclosing) => { + const enclosed = s.scanner.shiftUntil(untilLookaheadIsClosing[enclosingTokens[enclosing]]); + if (s.scanner.lookahead === "") return s.error(writeUnterminatedEnclosedMessage(enclosed, enclosing)); + s.scanner.shift(); + if (enclosing === "/") { + try { + new RegExp(enclosed); + } catch (e) { + throwParseError(String(e)); + } + s.root = s.ctx.$.node("intersection", { + domain: "string", + pattern: enclosed + }, { prereduced: true }); + } else if (isKeyOf(enclosing, enclosingQuote)) s.root = s.ctx.$.node("unit", { unit: enclosed }); + else { + const date = tryParseDate(enclosed, writeInvalidDateMessage(enclosed)); + s.root = s.ctx.$.node("unit", { + meta: enclosed, + unit: date + }); + } +}; +const enclosingQuote = { + "'": 1, + "\"": 1 +}; +const enclosingChar = { + "/": 1, + "'": 1, + "\"": 1 +}; +const enclosingTokens = { + "d'": "'", + "d\"": "\"", + "'": "'", + "\"": "\"", + "/": "/" +}; +const untilLookaheadIsClosing = { + "'": (scanner) => scanner.lookahead === `'`, + "\"": (scanner) => scanner.lookahead === `"`, + "/": (scanner) => scanner.lookahead === `/` +}; +const enclosingCharDescriptions = { + "\"": "double-quote", + "'": "single-quote", + "/": "forward slash" +}; +const writeUnterminatedEnclosedMessage = (fragment, enclosingStart) => `${enclosingStart}${fragment} requires a closing ${enclosingCharDescriptions[enclosingTokens[enclosingStart]]}`; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/ast/validate.js +const writePrefixedPrivateReferenceMessage = (name) => `Private type references should not include '#'. Use '${name}' instead.`; +const shallowOptionalMessage = "Optional definitions like 'string?' are only valid as properties in an object or tuple"; +const shallowDefaultableMessage = "Defaultable definitions like 'number = 0' are only valid as properties in an object or tuple"; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/reduce/shared.js +const minComparators = { + ">": true, + ">=": true +}; +const maxComparators = { + "<": true, + "<=": true +}; +const invertedComparators = { + "<": ">", + ">": "<", + "<=": ">=", + ">=": "<=", + "==": "==" +}; +const writeUnmatchedGroupCloseMessage = (unscanned) => `Unmatched )${unscanned === "" ? "" : ` before ${unscanned}`}`; +const writeUnclosedGroupMessage = (missingChar) => `Missing ${missingChar}`; +const writeOpenRangeMessage = (min, comparator) => `Left bounds are only valid when paired with right bounds (try ...${comparator}${min})`; +const writeUnpairableComparatorMessage = (comparator) => `Left-bounded expressions must specify their limits using < or <= (was ${comparator})`; +const writeMultipleLeftBoundsMessage = (openLimit, openComparator, limit, comparator) => `An expression may have at most one left bound (parsed ${openLimit}${invertedComparators[openComparator]}, ${limit}${invertedComparators[comparator]})`; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/operand/genericArgs.js +const parseGenericArgs = (name, g, s) => _parseGenericArgs(name, g, s, []); +const _parseGenericArgs = (name, g, s, argNodes) => { + const argState = s.parseUntilFinalizer(); + argNodes.push(argState.root); + if (argState.finalizer === ">") { + if (argNodes.length !== g.params.length) return s.error(writeInvalidGenericArgCountMessage(name, g.names, argNodes.map((arg) => arg.expression))); + return argNodes; + } + if (argState.finalizer === ",") return _parseGenericArgs(name, g, s, argNodes); + return argState.error(writeUnclosedGroupMessage(">")); +}; +const writeInvalidGenericArgCountMessage = (name, params, argDefs) => `${name}<${params.join(", ")}> requires exactly ${params.length} args (got ${argDefs.length}${argDefs.length === 0 ? "" : `: ${argDefs.join(", ")}`})`; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/operand/unenclosed.js +const parseUnenclosed = (s) => { + const token = s.scanner.shiftUntilNextTerminator(); + if (token === "keyof") s.addPrefix("keyof"); + else s.root = unenclosedToNode(s, token); +}; +const parseGenericInstantiation = (name, g, s) => { + s.scanner.shiftUntilNonWhitespace(); + const lookahead = s.scanner.shift(); + if (lookahead !== "<") return s.error(writeInvalidGenericArgCountMessage(name, g.names, [])); + const parsedArgs = parseGenericArgs(name, g, s); + return g(...parsedArgs); +}; +const unenclosedToNode = (s, token) => maybeParseReference(s, token) ?? maybeParseUnenclosedLiteral(s, token) ?? s.error(token === "" ? s.scanner.lookahead === "#" ? writePrefixedPrivateReferenceMessage(s.shiftedByOne().scanner.shiftUntilNextTerminator()) : writeMissingOperandMessage(s) : writeUnresolvableMessage(token)); +const maybeParseReference = (s, token) => { + if (s.ctx.args?.[token]) { + const arg = s.ctx.args[token]; + if (typeof arg !== "string") return arg; + return s.ctx.$.node("alias", { reference: arg }, { prereduced: true }); + } + const resolution = s.ctx.$.maybeResolve(token); + if (hasArkKind(resolution, "root")) return resolution; + if (resolution === void 0) return; + if (hasArkKind(resolution, "generic")) return parseGenericInstantiation(token, resolution, s); + return throwParseError(`Unexpected resolution ${printable(resolution)}`); +}; +const maybeParseUnenclosedLiteral = (s, token) => { + const maybeNumber = tryParseWellFormedNumber(token); + if (maybeNumber !== void 0) return s.ctx.$.node("unit", { unit: maybeNumber }); + const maybeBigint = tryParseWellFormedBigint(token); + if (maybeBigint !== void 0) return s.ctx.$.node("unit", { unit: maybeBigint }); +}; +const writeMissingOperandMessage = (s) => { + const operator = s.previousOperator(); + return operator ? writeMissingRightOperandMessage(operator, s.scanner.unscanned) : writeExpressionExpectedMessage(s.scanner.unscanned); +}; +const writeMissingRightOperandMessage = (token, unscanned = "") => `Token '${token}' requires a right operand${unscanned ? ` before '${unscanned}'` : ""}`; +const writeExpressionExpectedMessage = (unscanned) => `Expected an expression${unscanned ? ` before '${unscanned}'` : ""}`; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/operand/operand.js +const parseOperand = (s) => s.scanner.lookahead === "" ? s.error(writeMissingOperandMessage(s)) : s.scanner.lookahead === "(" ? s.shiftedByOne().reduceGroupOpen() : s.scanner.lookaheadIsIn(enclosingChar) ? parseEnclosed(s, s.scanner.shift()) : s.scanner.lookaheadIsIn(whitespaceChars) ? parseOperand(s.shiftedByOne()) : s.scanner.lookahead === "d" ? s.scanner.nextLookahead in enclosingQuote ? parseEnclosed(s, `${s.scanner.shift()}${s.scanner.shift()}`) : parseUnenclosed(s) : parseUnenclosed(s); + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/scanner.js +var ArkTypeScanner = class ArkTypeScanner extends Scanner { + shiftUntilNextTerminator() { + this.shiftUntilNonWhitespace(); + return this.shiftUntil(() => this.lookahead in ArkTypeScanner.terminatingChars); + } + static terminatingChars = { + "<": 1, + ">": 1, + "=": 1, + "|": 1, + "&": 1, + ")": 1, + "[": 1, + "%": 1, + ",": 1, + ":": 1, + "?": 1, + "#": 1, + ...whitespaceChars + }; + static finalizingLookaheads = { + ">": 1, + ",": 1, + "": 1, + "=": 1, + "?": 1 + }; + static lookaheadIsFinalizing = (lookahead, unscanned) => lookahead === ">" ? unscanned[0] === "=" ? unscanned[1] === "=" : unscanned.trimStart() === "" || isKeyOf(unscanned.trimStart()[0], ArkTypeScanner.terminatingChars) : lookahead === "=" ? unscanned[0] !== "=" : lookahead === "," || lookahead === "?"; +}; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/operator/bounds.js +const parseBound = (s, start) => { + const comparator = shiftComparator(s, start); + if (s.root.hasKind("unit")) { + if (typeof s.root.unit === "number") { + s.reduceLeftBound(s.root.unit, comparator); + s.unsetRoot(); + return; + } + if (s.root.unit instanceof Date) { + const literal = `d'${s.root.description ?? s.root.unit.toISOString()}'`; + s.unsetRoot(); + s.reduceLeftBound(literal, comparator); + return; + } + } + return parseRightBound(s, comparator); +}; +const comparatorStartChars = { + "<": 1, + ">": 1, + "=": 1 +}; +const shiftComparator = (s, start) => s.scanner.lookaheadIs("=") ? `${start}${s.scanner.shift()}` : start; +const getBoundKinds = (comparator, limit, root, boundKind) => { + if (root.extends($ark.intrinsic.number)) { + if (typeof limit !== "number") return throwParseError(writeInvalidLimitMessage(comparator, limit, boundKind)); + return comparator === "==" ? ["min", "max"] : comparator[0] === ">" ? ["min"] : ["max"]; + } + if (root.extends($ark.intrinsic.lengthBoundable)) { + if (typeof limit !== "number") return throwParseError(writeInvalidLimitMessage(comparator, limit, boundKind)); + return comparator === "==" ? ["exactLength"] : comparator[0] === ">" ? ["minLength"] : ["maxLength"]; + } + if (root.extends($ark.intrinsic.Date)) return comparator === "==" ? ["after", "before"] : comparator[0] === ">" ? ["after"] : ["before"]; + return throwParseError(writeUnboundableMessage(root.expression)); +}; +const openLeftBoundToRoot = (leftBound) => ({ + rule: isDateLiteral(leftBound.limit) ? extractDateLiteralSource(leftBound.limit) : leftBound.limit, + exclusive: leftBound.comparator.length === 1 +}); +const parseRightBound = (s, comparator) => { + const previousRoot = s.unsetRoot(); + const previousScannerIndex = s.scanner.location; + s.parseOperand(); + const limitNode = s.unsetRoot(); + const limitToken = s.scanner.sliceChars(previousScannerIndex, s.scanner.location); + s.root = previousRoot; + if (!limitNode.hasKind("unit") || typeof limitNode.unit !== "number" && !(limitNode.unit instanceof Date)) return s.error(writeInvalidLimitMessage(comparator, limitToken, "right")); + const limit = limitNode.unit; + const exclusive = comparator.length === 1; + const boundKinds = getBoundKinds(comparator, typeof limit === "number" ? limit : limitToken, previousRoot, "right"); + for (const kind of boundKinds) s.constrainRoot(kind, comparator === "==" ? { rule: limit } : { + rule: limit, + exclusive + }); + if (!s.branches.leftBound) return; + if (!isKeyOf(comparator, maxComparators)) return s.error(writeUnpairableComparatorMessage(comparator)); + const lowerBoundKind = getBoundKinds(s.branches.leftBound.comparator, s.branches.leftBound.limit, previousRoot, "left"); + s.constrainRoot(lowerBoundKind[0], openLeftBoundToRoot(s.branches.leftBound)); + s.branches.leftBound = null; +}; +const writeInvalidLimitMessage = (comparator, limit, boundKind) => `Comparator ${boundKind === "left" ? invertedComparators[comparator] : comparator} must be ${boundKind === "left" ? "preceded" : "followed"} by a corresponding literal (was ${limit})`; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/operator/brand.js +const parseBrand = (s) => { + s.scanner.shiftUntilNonWhitespace(); + const brandName = s.scanner.shiftUntilNextTerminator(); + s.root = s.root.brand(brandName); +}; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/operator/divisor.js +const parseDivisor = (s) => { + const divisorToken = s.scanner.shiftUntilNextTerminator(); + const divisor = tryParseInteger(divisorToken, { errorOnFail: writeInvalidDivisorMessage(divisorToken) }); + if (divisor === 0) s.error(writeInvalidDivisorMessage(0)); + s.root = s.root.constrain("divisor", divisor); +}; +const writeInvalidDivisorMessage = (divisor) => `% operator must be followed by a non-zero integer literal (was ${divisor})`; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/operator/operator.js +const parseOperator = (s) => { + const lookahead = s.scanner.shift(); + return lookahead === "" ? s.finalize("") : lookahead === "[" ? s.scanner.shift() === "]" ? s.setRoot(s.root.array()) : s.error(incompleteArrayTokenMessage) : lookahead === "|" ? s.scanner.lookahead === ">" ? s.shiftedByOne().pushRootToBranch("|>") : s.pushRootToBranch(lookahead) : lookahead === "&" ? s.pushRootToBranch(lookahead) : lookahead === ")" ? s.finalizeGroup() : ArkTypeScanner.lookaheadIsFinalizing(lookahead, s.scanner.unscanned) ? s.finalize(lookahead) : isKeyOf(lookahead, comparatorStartChars) ? parseBound(s, lookahead) : lookahead === "%" ? parseDivisor(s) : lookahead === "#" ? parseBrand(s) : lookahead in whitespaceChars ? parseOperator(s) : s.error(writeUnexpectedCharacterMessage(lookahead)); +}; +const writeUnexpectedCharacterMessage = (char, shouldBe = "") => `'${char}' is not allowed here${shouldBe && ` (should be ${shouldBe})`}`; +const incompleteArrayTokenMessage = `Missing expected ']'`; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/operator/default.js +const parseDefault = (s) => { + const baseNode = s.unsetRoot(); + s.parseOperand(); + const defaultNode = s.unsetRoot(); + if (!defaultNode.hasKind("unit")) return s.error(writeNonLiteralDefaultMessage(defaultNode.expression)); + const defaultValue = defaultNode.unit instanceof Date ? () => new Date(defaultNode.unit) : defaultNode.unit; + return [ + baseNode, + "=", + defaultValue + ]; +}; +const writeNonLiteralDefaultMessage = (defaultDef) => `Default value '${defaultDef}' must a literal value`; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/string.js +const parseString = (def, ctx) => { + const aliasResolution = ctx.$.maybeResolveRoot(def); + if (aliasResolution) return aliasResolution; + if (def.endsWith("[]")) { + const possibleElementResolution = ctx.$.maybeResolveRoot(def.slice(0, -2)); + if (possibleElementResolution) return possibleElementResolution.array(); + } + const s = new DynamicState(new ArkTypeScanner(def), ctx); + const node$1 = fullStringParse(s); + if (s.finalizer === ">") throwParseError(writeUnexpectedCharacterMessage(">")); + return node$1; +}; +const fullStringParse = (s) => { + s.parseOperand(); + let result = parseUntilFinalizer(s).root; + if (!result) return throwInternalError(`Root was unexpectedly unset after parsing string '${s.scanner.scanned}'`); + if (s.finalizer === "=") result = parseDefault(s); + else if (s.finalizer === "?") result = [result, "?"]; + s.scanner.shiftUntilNonWhitespace(); + if (s.scanner.lookahead) throwParseError(writeUnexpectedCharacterMessage(s.scanner.lookahead)); + return result; +}; +const parseUntilFinalizer = (s) => { + while (s.finalizer === void 0) next(s); + return s; +}; +const next = (s) => s.hasRoot() ? s.parseOperator() : s.parseOperand(); + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/reduce/dynamic.js +var DynamicState = class DynamicState { + root; + branches = { + prefixes: [], + leftBound: null, + intersection: null, + union: null, + pipe: null + }; + finalizer; + groups = []; + scanner; + ctx; + constructor(scanner, ctx) { + this.scanner = scanner; + this.ctx = ctx; + } + error(message) { + return throwParseError(message); + } + hasRoot() { + return this.root !== void 0; + } + setRoot(root) { + this.root = root; + } + unsetRoot() { + const value$1 = this.root; + this.root = void 0; + return value$1; + } + constrainRoot(...args$1) { + this.root = this.root.constrain(args$1[0], args$1[1]); + } + finalize(finalizer) { + if (this.groups.length) return this.error(writeUnclosedGroupMessage(")")); + this.finalizeBranches(); + this.finalizer = finalizer; + } + reduceLeftBound(limit, comparator) { + const invertedComparator = invertedComparators[comparator]; + if (!isKeyOf(invertedComparator, minComparators)) return this.error(writeUnpairableComparatorMessage(comparator)); + if (this.branches.leftBound) return this.error(writeMultipleLeftBoundsMessage(this.branches.leftBound.limit, this.branches.leftBound.comparator, limit, invertedComparator)); + this.branches.leftBound = { + comparator: invertedComparator, + limit + }; + } + finalizeBranches() { + this.assertRangeUnset(); + if (this.branches.pipe) { + this.pushRootToBranch("|>"); + this.root = this.branches.pipe; + return; + } + if (this.branches.union) { + this.pushRootToBranch("|"); + this.root = this.branches.union; + return; + } + if (this.branches.intersection) { + this.pushRootToBranch("&"); + this.root = this.branches.intersection; + return; + } + this.applyPrefixes(); + } + finalizeGroup() { + this.finalizeBranches(); + const topBranchState = this.groups.pop(); + if (!topBranchState) return this.error(writeUnmatchedGroupCloseMessage(this.scanner.unscanned)); + this.branches = topBranchState; + } + addPrefix(prefix) { + this.branches.prefixes.push(prefix); + } + applyPrefixes() { + while (this.branches.prefixes.length) { + const lastPrefix = this.branches.prefixes.pop(); + this.root = lastPrefix === "keyof" ? this.root.keyof() : throwInternalError(`Unexpected prefix '${lastPrefix}'`); + } + } + pushRootToBranch(token) { + this.assertRangeUnset(); + this.applyPrefixes(); + const root = this.root; + this.root = void 0; + this.branches.intersection = this.branches.intersection?.rawAnd(root) ?? root; + if (token === "&") return; + this.branches.union = this.branches.union?.rawOr(this.branches.intersection) ?? this.branches.intersection; + this.branches.intersection = null; + if (token === "|") return; + this.branches.pipe = this.branches.pipe?.rawPipeOnce(this.branches.union) ?? this.branches.union; + this.branches.union = null; + } + parseUntilFinalizer() { + return parseUntilFinalizer(new DynamicState(this.scanner, this.ctx)); + } + parseOperator() { + return parseOperator(this); + } + parseOperand() { + return parseOperand(this); + } + assertRangeUnset() { + if (this.branches.leftBound) return this.error(writeOpenRangeMessage(this.branches.leftBound.limit, this.branches.leftBound.comparator)); + } + reduceGroupOpen() { + this.groups.push(this.branches); + this.branches = { + prefixes: [], + leftBound: null, + union: null, + intersection: null, + pipe: null + }; + } + previousOperator() { + return this.branches.leftBound?.comparator ?? this.branches.prefixes.at(-1) ?? (this.branches.intersection ? "&" : this.branches.union ? "|" : this.branches.pipe ? "|>" : void 0); + } + shiftedByOne() { + this.scanner.shift(); + return this; + } +}; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/generic.js +const emptyGenericParameterMessage = "An empty string is not a valid generic parameter name"; +const parseGenericParamName = (scanner, result, ctx) => { + scanner.shiftUntilNonWhitespace(); + const name = scanner.shiftUntilNextTerminator(); + if (name === "") { + if (scanner.lookahead === "" && result.length) return result; + return throwParseError(emptyGenericParameterMessage); + } + scanner.shiftUntilNonWhitespace(); + return _parseOptionalConstraint(scanner, name, result, ctx); +}; +const extendsToken = "extends "; +const _parseOptionalConstraint = (scanner, name, result, ctx) => { + scanner.shiftUntilNonWhitespace(); + if (scanner.unscanned.startsWith(extendsToken)) scanner.jumpForward(8); + else { + if (scanner.lookahead === ",") scanner.shift(); + result.push(name); + return parseGenericParamName(scanner, result, ctx); + } + const s = parseUntilFinalizer(new DynamicState(scanner, ctx)); + result.push([name, s.root]); + return parseGenericParamName(scanner, result, ctx); +}; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/match.js +var InternalMatchParser = class extends Callable { + $; + constructor($) { + super((...args$1) => new InternalChainedMatchParser($)(...args$1), { bind: $ }); + this.$ = $; + } + in(def) { + return new InternalChainedMatchParser(this.$, def === void 0 ? void 0 : this.$.parse(def)); + } + at(key, cases) { + return new InternalChainedMatchParser(this.$).at(key, cases); + } + case(when, then) { + return new InternalChainedMatchParser(this.$).case(when, then); + } +}; +var InternalChainedMatchParser = class extends Callable { + $; + in; + key; + branches = []; + constructor($, In) { + super((cases) => this.caseEntries(Object.entries(cases).map(([k, v]) => k === "default" ? [k, v] : [this.$.parse(k), v]))); + this.$ = $; + this.in = In; + } + at(key, cases) { + if (this.key) throwParseError(doubleAtMessage); + if (this.branches.length) throwParseError(chainedAtMessage); + this.key = key; + return cases ? this.match(cases) : this; + } + case(def, resolver) { + return this.caseEntry(this.$.parse(def), resolver); + } + caseEntry(node$1, resolver) { + const wrappableNode = this.key ? this.$.parse({ [this.key]: node$1 }) : node$1; + const branch = wrappableNode.pipe(resolver); + this.branches.push(branch); + return this; + } + match(cases) { + return this(cases); + } + strings(cases) { + return this.caseEntries(Object.entries(cases).map(([k, v]) => k === "default" ? [k, v] : [this.$.node("unit", { unit: k }), v])); + } + caseEntries(entries) { + for (let i = 0; i < entries.length; i++) { + const [k, v] = entries[i]; + if (k === "default") { + if (i !== entries.length - 1) throwParseError(`default may only be specified as the last key of a switch definition`); + return this.default(v); + } + if (typeof v !== "function") return throwParseError(`Value for case "${k}" must be a function (was ${domainOf(v)})`); + this.caseEntry(k, v); + } + return this; + } + default(defaultCase) { + if (typeof defaultCase === "function") this.case(intrinsic.unknown, defaultCase); + const schema$1 = { + branches: this.branches, + ordered: true + }; + if (defaultCase === "never" || defaultCase === "assert") schema$1.meta = { onFail: throwOnDefault }; + const cases = this.$.node("union", schema$1); + if (!this.in) return this.$.finalize(cases); + let inputValidatedCases = this.in.pipe(cases); + if (defaultCase === "never" || defaultCase === "assert") inputValidatedCases = inputValidatedCases.configureReferences({ onFail: throwOnDefault }, "self"); + return this.$.finalize(inputValidatedCases); + } +}; +const throwOnDefault = (errors) => errors.throw(); +const chainedAtMessage = `A key matcher must be specified before the first case i.e. match.at('foo') or match.in().at('bar')`; +const doubleAtMessage = `At most one key matcher may be specified per expression`; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/property.js +const parseProperty = (def, ctx) => { + if (isArray(def)) { + if (def[1] === "=") return [ + ctx.$.parseOwnDefinitionFormat(def[0], ctx), + "=", + def[2] + ]; + if (def[1] === "?") return [ctx.$.parseOwnDefinitionFormat(def[0], ctx), "?"]; + } + return parseInnerDefinition(def, ctx); +}; +const invalidOptionalKeyKindMessage = `Only required keys may make their values optional, e.g. { [mySymbol]: ['number', '?'] }`; +const invalidDefaultableKeyKindMessage = `Only required keys may specify default values, e.g. { value: 'number = 0' }`; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/objectLiteral.js +const parseObjectLiteral = (def, ctx) => { + let spread; + const structure = {}; + const defEntries = stringAndSymbolicEntriesOf(def); + for (const [k, v] of defEntries) { + const parsedKey = preparseKey(k); + if (parsedKey.kind === "spread") { + if (!isEmptyObject(structure)) return throwParseError(nonLeadingSpreadError); + const operand = ctx.$.parseOwnDefinitionFormat(v, ctx); + if (operand.equals(intrinsic.object)) continue; + if (!operand.hasKind("intersection") || !operand.basis?.equals(intrinsic.object)) return throwParseError(writeInvalidSpreadTypeMessage(operand.expression)); + spread = operand.structure; + continue; + } + if (parsedKey.kind === "undeclared") { + if (v !== "reject" && v !== "delete" && v !== "ignore") throwParseError(writeInvalidUndeclaredBehaviorMessage(v)); + structure.undeclared = v; + continue; + } + const parsedValue = parseProperty(v, ctx); + const parsedEntryKey = parsedKey; + if (parsedKey.kind === "required") { + if (!isArray(parsedValue)) appendNamedProp(structure, "required", { + key: parsedKey.normalized, + value: parsedValue + }, ctx); + else appendNamedProp(structure, "optional", parsedValue[1] === "=" ? { + key: parsedKey.normalized, + value: parsedValue[0], + default: parsedValue[2] + } : { + key: parsedKey.normalized, + value: parsedValue[0] + }, ctx); + continue; + } + if (isArray(parsedValue)) { + if (parsedValue[1] === "?") throwParseError(invalidOptionalKeyKindMessage); + if (parsedValue[1] === "=") throwParseError(invalidDefaultableKeyKindMessage); + } + if (parsedKey.kind === "optional") { + appendNamedProp(structure, "optional", { + key: parsedKey.normalized, + value: parsedValue + }, ctx); + continue; + } + const signature = ctx.$.parseOwnDefinitionFormat(parsedEntryKey.normalized, ctx); + const normalized = normalizeIndex(signature, parsedValue, ctx.$); + if (normalized.index) structure.index = append(structure.index, normalized.index); + if (normalized.required) structure.required = append(structure.required, normalized.required); + } + const structureNode = ctx.$.node("structure", structure); + return ctx.$.parseSchema({ + domain: "object", + structure: spread?.merge(structureNode) ?? structureNode + }); +}; +const appendNamedProp = (structure, kind, inner, ctx) => { + structure[kind] = append(structure[kind], ctx.$.node(kind, inner)); +}; +const writeInvalidUndeclaredBehaviorMessage = (actual) => `Value of '+' key must be 'reject', 'delete', or 'ignore' (was ${printable(actual)})`; +const nonLeadingSpreadError = "Spread operator may only be used as the first key in an object"; +const preparseKey = (key) => typeof key === "symbol" ? { + kind: "required", + normalized: key +} : key.at(-1) === "?" ? key.at(-2) === escapeChar ? { + kind: "required", + normalized: `${key.slice(0, -2)}?` +} : { + kind: "optional", + normalized: key.slice(0, -1) +} : key[0] === "[" && key.at(-1) === "]" ? { + kind: "index", + normalized: key.slice(1, -1) +} : key[0] === escapeChar && key[1] === "[" && key.at(-1) === "]" ? { + kind: "required", + normalized: key.slice(1) +} : key === "..." ? { kind: "spread" } : key === "+" ? { kind: "undeclared" } : { + kind: "required", + normalized: key === "\\..." ? "..." : key === "\\+" ? "+" : key +}; +const writeInvalidSpreadTypeMessage = (def) => `Spread operand must resolve to an object literal type (was ${def})`; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/tupleExpressions.js +const maybeParseTupleExpression = (def, ctx) => isIndexZeroExpression(def) ? indexZeroParsers[def[0]](def, ctx) : isIndexOneExpression(def) ? indexOneParsers[def[1]](def, ctx) : null; +const parseKeyOfTuple = (def, ctx) => ctx.$.parseOwnDefinitionFormat(def[1], ctx).keyof(); +const parseBranchTuple = (def, ctx) => { + if (def[2] === void 0) return throwParseError(writeMissingRightOperandMessage(def[1], "")); + const l = ctx.$.parseOwnDefinitionFormat(def[0], ctx); + const r = ctx.$.parseOwnDefinitionFormat(def[2], ctx); + if (def[1] === "|") return ctx.$.node("union", { branches: [l, r] }); + const result = def[1] === "&" ? intersectNodesRoot(l, r, ctx.$) : pipeNodesRoot(l, r, ctx.$); + if (result instanceof Disjoint) return result.throw(); + return result; +}; +const parseArrayTuple = (def, ctx) => ctx.$.parseOwnDefinitionFormat(def[0], ctx).array(); +const parseMorphTuple = (def, ctx) => { + if (typeof def[2] !== "function") return throwParseError(writeMalformedFunctionalExpressionMessage("=>", def[2])); + return ctx.$.parseOwnDefinitionFormat(def[0], ctx).pipe(def[2]); +}; +const writeMalformedFunctionalExpressionMessage = (operator, value$1) => `${operator === ":" ? "Narrow" : "Morph"} expression requires a function following '${operator}' (was ${typeof value$1})`; +const parseNarrowTuple = (def, ctx) => { + if (typeof def[2] !== "function") return throwParseError(writeMalformedFunctionalExpressionMessage(":", def[2])); + return ctx.$.parseOwnDefinitionFormat(def[0], ctx).constrain("predicate", def[2]); +}; +const parseAttributeTuple = (def, ctx) => ctx.$.parseOwnDefinitionFormat(def[0], ctx).configureReferences(def[2], "shallow"); +const defineIndexOneParsers = (parsers) => parsers; +const postfixParsers = defineIndexOneParsers({ + "[]": parseArrayTuple, + "?": () => throwParseError(shallowOptionalMessage) +}); +const infixParsers = defineIndexOneParsers({ + "|": parseBranchTuple, + "&": parseBranchTuple, + ":": parseNarrowTuple, + "=>": parseMorphTuple, + "|>": parseBranchTuple, + "@": parseAttributeTuple, + "=": () => throwParseError(shallowDefaultableMessage) +}); +const indexOneParsers = { + ...postfixParsers, + ...infixParsers +}; +const isIndexOneExpression = (def) => indexOneParsers[def[1]] !== void 0; +const defineIndexZeroParsers = (parsers) => parsers; +const indexZeroParsers = defineIndexZeroParsers({ + keyof: parseKeyOfTuple, + instanceof: (def, ctx) => { + if (typeof def[1] !== "function") return throwParseError(writeInvalidConstructorMessage(objectKindOrDomainOf(def[1]))); + const branches = def.slice(1).map((ctor) => typeof ctor === "function" ? ctx.$.node("proto", { proto: ctor }) : throwParseError(writeInvalidConstructorMessage(objectKindOrDomainOf(ctor)))); + return branches.length === 1 ? branches[0] : ctx.$.node("union", { branches }); + }, + "===": (def, ctx) => ctx.$.units(def.slice(1)) +}); +const isIndexZeroExpression = (def) => indexZeroParsers[def[0]] !== void 0; +const writeInvalidConstructorMessage = (actual) => `Expected a constructor following 'instanceof' operator (was ${actual})`; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/tupleLiteral.js +const parseTupleLiteral = (def, ctx) => { + let sequences = [{}]; + let i = 0; + while (i < def.length) { + let spread = false; + if (def[i] === "..." && i < def.length - 1) { + spread = true; + i++; + } + const parsedProperty = parseProperty(def[i], ctx); + const [valueNode, operator, possibleDefaultValue] = !isArray(parsedProperty) ? [parsedProperty] : parsedProperty; + i++; + if (spread) { + if (!valueNode.extends($ark.intrinsic.Array)) return throwParseError(writeNonArraySpreadMessage(valueNode.expression)); + sequences = sequences.flatMap((base) => valueNode.distribute((branch) => appendSpreadBranch(makeRootAndArrayPropertiesMutable(base), branch))); + } else sequences = sequences.map((base) => { + if (operator === "?") return appendOptionalElement(base, valueNode); + if (operator === "=") return appendDefaultableElement(base, valueNode, possibleDefaultValue); + return appendRequiredElement(base, valueNode); + }); + } + return ctx.$.parseSchema(sequences.map((sequence) => isEmptyObject(sequence) ? { + proto: Array, + exactLength: 0 + } : { + proto: Array, + sequence + })); +}; +const appendRequiredElement = (base, element) => { + if (base.defaultables || base.optionals) return throwParseError(base.variadic ? postfixAfterOptionalOrDefaultableMessage : requiredPostOptionalMessage); + if (base.variadic) base.postfix = append(base.postfix, element); + else base.prefix = append(base.prefix, element); + return base; +}; +const appendOptionalElement = (base, element) => { + if (base.variadic) return throwParseError(optionalOrDefaultableAfterVariadicMessage); + base.optionals = append(base.optionals, element); + return base; +}; +const appendDefaultableElement = (base, element, value$1) => { + if (base.variadic) return throwParseError(optionalOrDefaultableAfterVariadicMessage); + if (base.optionals) return throwParseError(defaultablePostOptionalMessage); + base.defaultables = append(base.defaultables, [[element, value$1]]); + return base; +}; +const appendVariadicElement = (base, element) => { + if (base.postfix) throwParseError(multipleVariadicMesage); + if (base.variadic) { + if (!base.variadic.equals(element)) throwParseError(multipleVariadicMesage); + } else base.variadic = element.internal; + return base; +}; +const appendSpreadBranch = (base, branch) => { + const spread = branch.select({ + method: "find", + kind: "sequence" + }); + if (!spread) return appendVariadicElement(base, $ark.intrinsic.unknown); + if (spread.prefix) for (const node$1 of spread.prefix) appendRequiredElement(base, node$1); + if (spread.optionals) for (const node$1 of spread.optionals) appendOptionalElement(base, node$1); + if (spread.variadic) appendVariadicElement(base, spread.variadic); + if (spread.postfix) for (const node$1 of spread.postfix) appendRequiredElement(base, node$1); + return base; +}; +const writeNonArraySpreadMessage = (operand) => `Spread element must be an array (was ${operand})`; +const multipleVariadicMesage = "A tuple may have at most one variadic element"; +const requiredPostOptionalMessage = "A required element may not follow an optional element"; +const optionalOrDefaultableAfterVariadicMessage = "An optional element may not follow a variadic element"; +const defaultablePostOptionalMessage = "A defaultable element may not follow an optional element without a default"; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/definition.js +const parseCache = {}; +const parseInnerDefinition = (def, ctx) => { + if (typeof def === "string") { + if (ctx.args && Object.keys(ctx.args).some((k) => def.includes(k))) return parseString(def, ctx); + const scopeCache = parseCache[ctx.$.name] ??= {}; + return scopeCache[def] ??= parseString(def, ctx); + } + return hasDomain(def, "object") ? parseObject(def, ctx) : throwParseError(writeBadDefinitionTypeMessage(domainOf(def))); +}; +const parseObject = (def, ctx) => { + const objectKind = objectKindOf(def); + switch (objectKind) { + case void 0: + if (hasArkKind(def, "root")) return def; + return parseObjectLiteral(def, ctx); + case "Array": return parseTuple(def, ctx); + case "RegExp": return ctx.$.node("intersection", { + domain: "string", + pattern: def + }, { prereduced: true }); + case "Function": { + const resolvedDef = isThunk(def) ? def() : def; + if (hasArkKind(resolvedDef, "root")) return resolvedDef; + return throwParseError(writeBadDefinitionTypeMessage("Function")); + } + default: return throwParseError(writeBadDefinitionTypeMessage(objectKind ?? printable(def))); + } +}; +const parseTuple = (def, ctx) => maybeParseTupleExpression(def, ctx) ?? parseTupleLiteral(def, ctx); +const writeBadDefinitionTypeMessage = (actual) => `Type definitions must be strings or objects (was ${actual})`; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/type.js +var InternalTypeParser = class extends Callable { + constructor($) { + const attach = Object.assign({ + errors: ArkErrors, + hkt: Hkt, + $, + raw: $.parse, + module: $.constructor.module, + scope: $.constructor.scope, + define: $.define, + match: $.match, + generic: $.generic, + schema: $.schema, + keywords: $.ambient, + unit: $.unit, + enumerated: $.enumerated, + instanceOf: $.instanceOf, + valueOf: $.valueOf, + or: $.or, + and: $.and, + merge: $.merge, + pipe: $.pipe + }, $.ambientAttachments); + super((...args$1) => { + if (args$1.length === 1) return $.parse(args$1[0]); + if (args$1.length === 2 && typeof args$1[0] === "string" && args$1[0][0] === "<" && args$1[0].at(-1) === ">") { + const paramString = args$1[0].slice(1, -1); + const params = $.parseGenericParams(paramString, {}); + return new GenericRoot(params, args$1[1], $, $, null); + } + return $.parse(args$1); + }, { + bind: $, + attach + }); + } +}; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/scope.js +const $arkTypeRegistry = $ark; +var InternalScope = class InternalScope extends BaseScope { + get ambientAttachments() { + if (!$arkTypeRegistry.typeAttachments) return; + return this.cacheGetter("ambientAttachments", flatMorph($arkTypeRegistry.typeAttachments, (k, v) => [k, this.bindReference(v)])); + } + preparseOwnAliasEntry(alias, def) { + const firstParamIndex = alias.indexOf("<"); + if (firstParamIndex === -1) { + if (hasArkKind(def, "module") || hasArkKind(def, "generic")) return [alias, def]; + const qualifiedName = this.name === "ark" ? alias : alias === "root" ? this.name : `${this.name}.${alias}`; + const config = this.resolvedConfig.keywords?.[qualifiedName]; + if (config) def = [ + def, + "@", + config + ]; + return [alias, def]; + } + if (alias.at(-1) !== ">") throwParseError(`'>' must be the last character of a generic declaration in a scope`); + const name = alias.slice(0, firstParamIndex); + const paramString = alias.slice(firstParamIndex + 1, -1); + return [name, () => { + const params = this.parseGenericParams(paramString, { alias: name }); + const generic$1 = parseGeneric(params, def, this); + return generic$1; + }]; + } + parseGenericParams(def, opts) { + return parseGenericParamName(new ArkTypeScanner(def), [], this.createParseContext({ + ...opts, + def, + prefix: "generic" + })); + } + normalizeRootScopeValue(resolution) { + if (isThunk(resolution) && !hasArkKind(resolution, "generic")) return resolution(); + return resolution; + } + preparseOwnDefinitionFormat(def, opts) { + return { + ...opts, + def, + prefix: opts.alias ?? "type" + }; + } + parseOwnDefinitionFormat(def, ctx) { + const isScopeAlias = ctx.alias && ctx.alias in this.aliases; + if (!isScopeAlias && !ctx.args) ctx.args = { this: ctx.id }; + const result = parseInnerDefinition(def, ctx); + if (isArray(result)) { + if (result[1] === "=") return throwParseError(shallowDefaultableMessage); + if (result[1] === "?") return throwParseError(shallowOptionalMessage); + } + return result; + } + unit = (value$1) => this.units([value$1]); + valueOf = (tsEnum) => this.units(enumValues(tsEnum)); + enumerated = (...values) => this.units(values); + instanceOf = (ctor) => this.node("proto", { proto: ctor }, { prereduced: true }); + or = (...defs) => this.schema(defs.map((def) => this.parse(def))); + and = (...defs) => defs.reduce((node$1, def) => node$1.and(this.parse(def)), this.intrinsic.unknown); + merge = (...defs) => defs.reduce((node$1, def) => node$1.merge(this.parse(def)), this.intrinsic.object); + pipe = (...morphs) => this.intrinsic.unknown.pipe(...morphs); + match = new InternalMatchParser(this); + declare = () => ({ type: this.type }); + define(def) { + return def; + } + type = new InternalTypeParser(this); + static scope = (def, config = {}) => new InternalScope(def, config); + static module = (def, config = {}) => this.scope(def, config).export(); +}; +const scope = Object.assign(InternalScope.scope, { define: (def) => def }); +const Scope = InternalScope; + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/keywords/builtins.js +var MergeHkt = class extends Hkt { + description = "merge an object's properties onto another like `Merge(User, { isAdmin: \"true\" })`"; +}; +const Merge = genericNode(["base", intrinsic.object], ["props", intrinsic.object])((args$1) => args$1.base.merge(args$1.props), MergeHkt); +const arkBuiltins = Scope.module({ + Key: intrinsic.key, + Merge +}); + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/keywords/Array.js +var liftFromHkt = class extends Hkt {}; +const liftFrom = genericNode("element")((args$1) => { + const nonArrayElement = args$1.element.exclude(intrinsic.Array); + const lifted = nonArrayElement.array(); + return nonArrayElement.rawOr(lifted).pipe(liftArray).distribute((branch) => branch.assertHasKind("morph").declareOut(lifted), rootSchema); +}, liftFromHkt); +const arkArray = Scope.module({ + root: intrinsic.Array, + readonly: "root", + index: intrinsic.nonNegativeIntegerString, + liftFrom +}, { name: "Array" }); + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/keywords/FormData.js +const value = rootSchema(["string", registry.FileConstructor]); +const parsedFormDataValue = value.rawOr(value.array()); +const parsed = rootSchema({ + meta: "an object representing parsed form data", + domain: "object", + index: { + signature: "string", + value: parsedFormDataValue + } +}); +const arkFormData = Scope.module({ + root: ["instanceof", FormData], + value, + parsed, + parse: rootSchema({ + in: FormData, + morphs: (data) => { + const result = {}; + for (const [k, v] of data) if (k in result) { + const existing = result[k]; + if (typeof existing === "string" || existing instanceof registry.FileConstructor) result[k] = [existing, v]; + else existing.push(v); + } else result[k] = v; + return result; + }, + declaredOut: parsed + }) +}, { name: "FormData" }); + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/keywords/TypedArray.js +const TypedArray = Scope.module({ + Int8: ["instanceof", Int8Array], + Uint8: ["instanceof", Uint8Array], + Uint8Clamped: ["instanceof", Uint8ClampedArray], + Int16: ["instanceof", Int16Array], + Uint16: ["instanceof", Uint16Array], + Int32: ["instanceof", Int32Array], + Uint32: ["instanceof", Uint32Array], + Float32: ["instanceof", Float32Array], + Float64: ["instanceof", Float64Array], + BigInt64: ["instanceof", BigInt64Array], + BigUint64: ["instanceof", BigUint64Array] +}, { name: "TypedArray" }); + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/keywords/constructors.js +const omittedPrototypes = { + Boolean: 1, + Number: 1, + String: 1 +}; +const arkPrototypes = Scope.module({ + ...flatMorph({ + ...ecmascriptConstructors, + ...platformConstructors + }, (k, v) => k in omittedPrototypes ? [] : [k, ["instanceof", v]]), + Array: arkArray, + TypedArray, + FormData: arkFormData +}); + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/keywords/number.js +/** +* As per the ECMA-262 specification: +* A time value supports a slightly smaller range of -8,640,000,000,000,000 to 8,640,000,000,000,000 milliseconds. +* +* @see https://262.ecma-international.org/15.0/index.html#sec-time-values-and-time-range +*/ +const epoch$1 = rootSchema({ + domain: { + domain: "number", + meta: "a number representing a Unix timestamp" + }, + divisor: { + rule: 1, + meta: `an integer representing a Unix timestamp` + }, + min: { + rule: -864e13, + meta: `a Unix timestamp after -8640000000000000` + }, + max: { + rule: 864e13, + meta: "a Unix timestamp before 8640000000000000" + }, + meta: "an integer representing a safe Unix timestamp" +}); +const integer = rootSchema({ + domain: "number", + divisor: 1 +}); +const number = Scope.module({ + root: intrinsic.number, + integer, + epoch: epoch$1, + safe: rootSchema({ + domain: { + domain: "number", + numberAllowsNaN: false + }, + min: Number.MIN_SAFE_INTEGER, + max: Number.MAX_SAFE_INTEGER + }), + NaN: ["===", NaN], + Infinity: ["===", Number.POSITIVE_INFINITY], + NegativeInfinity: ["===", Number.NEGATIVE_INFINITY] +}, { name: "number" }); + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/keywords/string.js +const regexStringNode = (regex$1, description, jsonSchemaFormat) => { + const schema$1 = { + domain: "string", + pattern: { + rule: regex$1.source, + flags: regex$1.flags, + meta: description + } + }; + if (jsonSchemaFormat) schema$1.meta = { format: jsonSchemaFormat }; + return node("intersection", schema$1); +}; +const stringIntegerRoot = regexStringNode(wellFormedIntegerMatcher, "a well-formed integer string"); +const stringInteger = Scope.module({ + root: stringIntegerRoot, + parse: rootSchema({ + in: stringIntegerRoot, + morphs: (s, ctx) => { + const parsed$1 = Number.parseInt(s); + return Number.isSafeInteger(parsed$1) ? parsed$1 : ctx.error("an integer in the range Number.MIN_SAFE_INTEGER to Number.MAX_SAFE_INTEGER"); + }, + declaredOut: intrinsic.integer + }) +}, { name: "string.integer" }); +const hex = regexStringNode(/^[\dA-Fa-f]+$/, "hex characters only"); +const base64 = Scope.module({ + root: regexStringNode(/^(?:[\d+/A-Za-z]{4})*(?:[\d+/A-Za-z]{2}==|[\d+/A-Za-z]{3}=)?$/, "base64-encoded"), + url: regexStringNode(/^(?:[\w-]{4})*(?:[\w-]{2}(?:==|%3D%3D)?|[\w-]{3}(?:=|%3D)?)?$/, "base64url-encoded") +}, { name: "string.base64" }); +const preformattedCapitalize = regexStringNode(/^[A-Z].*$/, "capitalized"); +const capitalize = Scope.module({ + root: rootSchema({ + in: "string", + morphs: (s) => s.charAt(0).toUpperCase() + s.slice(1), + declaredOut: preformattedCapitalize + }), + preformatted: preformattedCapitalize +}, { name: "string.capitalize" }); +const isLuhnValid = (creditCardInput) => { + const sanitized = creditCardInput.replaceAll(/[ -]+/g, ""); + let sum = 0; + let digit; + let tmpNum; + let shouldDouble = false; + for (let i = sanitized.length - 1; i >= 0; i--) { + digit = sanitized.substring(i, i + 1); + tmpNum = Number.parseInt(digit, 10); + if (shouldDouble) { + tmpNum *= 2; + sum += tmpNum >= 10 ? tmpNum % 10 + 1 : tmpNum; + } else sum += tmpNum; + shouldDouble = !shouldDouble; + } + return !!(sum % 10 === 0 ? sanitized : false); +}; +const creditCardMatcher = /^(?:4\d{12}(?:\d{3,6})?|5[1-5]\d{14}|(222[1-9]|22[3-9]\d|2[3-6]\d{2}|27[01]\d|2720)\d{12}|6(?:011|5\d\d)\d{12,15}|3[47]\d{13}|3(?:0[0-5]|[68]\d)\d{11}|(?:2131|1800|35\d{3})\d{11}|6[27]\d{14}|^(81\d{14,17}))$/; +const creditCard = rootSchema({ + domain: "string", + pattern: { + meta: "a credit card number", + rule: creditCardMatcher.source + }, + predicate: { + meta: "a credit card number", + predicate: isLuhnValid + } +}); +const iso8601Matcher = /^([+-]?\d{4}(?!\d{2}\b))((-?)((0[1-9]|1[0-2])(\3([12]\d|0[1-9]|3[01]))?|W([0-4]\d|5[0-3])(-?[1-7])?|(00[1-9]|0[1-9]\d|[12]\d{2}|3([0-5]\d|6[1-6])))(T((([01]\d|2[0-3])((:?)[0-5]\d)?|24:?00)([,.]\d+(?!:))?)?(\17[0-5]\d([,.]\d+)?)?([Zz]|([+-])([01]\d|2[0-3]):?([0-5]\d)?)?)?)?$/; +const isParsableDate = (s) => !Number.isNaN(new Date(s).valueOf()); +const parsableDate = rootSchema({ + domain: "string", + predicate: { + meta: "a parsable date", + predicate: isParsableDate + } +}).assertHasKind("intersection"); +const epochRoot = stringInteger.root.internal.narrow((s, ctx) => { + const n = Number.parseInt(s); + const out = number.epoch(n); + if (out instanceof ArkErrors) { + ctx.errors.merge(out); + return false; + } + return true; +}).configure({ description: "an integer string representing a safe Unix timestamp" }, "self").assertHasKind("intersection"); +const epoch = Scope.module({ + root: epochRoot, + parse: rootSchema({ + in: epochRoot, + morphs: (s) => new Date(s), + declaredOut: intrinsic.Date + }) +}, { name: "string.date.epoch" }); +const isoRoot = regexStringNode(iso8601Matcher, "an ISO 8601 (YYYY-MM-DDTHH:mm:ss.sssZ) date").internal.assertHasKind("intersection"); +const iso = Scope.module({ + root: isoRoot, + parse: rootSchema({ + in: isoRoot, + morphs: (s) => new Date(s), + declaredOut: intrinsic.Date + }) +}, { name: "string.date.iso" }); +const stringDate = Scope.module({ + root: parsableDate, + parse: rootSchema({ + declaredIn: parsableDate, + in: "string", + morphs: (s, ctx) => { + const date = new Date(s); + if (Number.isNaN(date.valueOf())) return ctx.error("a parsable date"); + return date; + }, + declaredOut: intrinsic.Date + }), + iso, + epoch +}, { name: "string.date" }); +const email = regexStringNode(/^[\w%+.-]+@[\d.A-Za-z-]+\.[A-Za-z]{2,}$/, "an email address", "email"); +const ipv4Segment = "(?:[0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])"; +const ipv4Address = `(${ipv4Segment}[.]){3}${ipv4Segment}`; +const ipv4Matcher = /* @__PURE__ */ new RegExp(`^${ipv4Address}$`); +const ipv6Segment = "(?:[0-9a-fA-F]{1,4})"; +const ipv6Matcher = /* @__PURE__ */ new RegExp(`^((?:${ipv6Segment}:){7}(?:${ipv6Segment}|:)|(?:${ipv6Segment}:){6}(?:${ipv4Address}|:${ipv6Segment}|:)|(?:${ipv6Segment}:){5}(?::${ipv4Address}|(:${ipv6Segment}){1,2}|:)|(?:${ipv6Segment}:){4}(?:(:${ipv6Segment}){0,1}:${ipv4Address}|(:${ipv6Segment}){1,3}|:)|(?:${ipv6Segment}:){3}(?:(:${ipv6Segment}){0,2}:${ipv4Address}|(:${ipv6Segment}){1,4}|:)|(?:${ipv6Segment}:){2}(?:(:${ipv6Segment}){0,3}:${ipv4Address}|(:${ipv6Segment}){1,5}|:)|(?:${ipv6Segment}:){1}(?:(:${ipv6Segment}){0,4}:${ipv4Address}|(:${ipv6Segment}){1,6}|:)|(?::((?::${ipv6Segment}){0,5}:${ipv4Address}|(?::${ipv6Segment}){1,7}|:)))(%[0-9a-zA-Z.]{1,})?\$`); +const ip = Scope.module({ + root: [ + "v4 | v6", + "@", + "an IP address" + ], + v4: regexStringNode(ipv4Matcher, "an IPv4 address", "ipv4"), + v6: regexStringNode(ipv6Matcher, "an IPv6 address", "ipv6") +}, { name: "string.ip" }); +const jsonStringDescription = "a JSON string"; +const writeJsonSyntaxErrorProblem = (error) => { + if (!(error instanceof SyntaxError)) throw error; + return `must be ${jsonStringDescription} (${error})`; +}; +const jsonRoot = rootSchema({ + meta: jsonStringDescription, + domain: "string", + predicate: { + meta: jsonStringDescription, + predicate: (s, ctx) => { + try { + JSON.parse(s); + return true; + } catch (e) { + return ctx.reject({ + code: "predicate", + expected: jsonStringDescription, + problem: writeJsonSyntaxErrorProblem(e) + }); + } + } + } +}); +const parseJson = (s, ctx) => { + if (s.length === 0) return ctx.error({ + code: "predicate", + expected: jsonStringDescription, + actual: "empty" + }); + try { + return JSON.parse(s); + } catch (e) { + return ctx.error({ + code: "predicate", + expected: jsonStringDescription, + problem: writeJsonSyntaxErrorProblem(e) + }); + } +}; +const json$1 = Scope.module({ + root: jsonRoot, + parse: rootSchema({ + meta: "safe JSON string parser", + in: "string", + morphs: parseJson, + declaredOut: intrinsic.jsonObject + }) +}, { name: "string.json" }); +const preformattedLower = regexStringNode(/^[a-z]*$/, "only lowercase letters"); +const lower = Scope.module({ + root: rootSchema({ + in: "string", + morphs: (s) => s.toLowerCase(), + declaredOut: preformattedLower + }), + preformatted: preformattedLower +}, { name: "string.lower" }); +const normalizedForms = [ + "NFC", + "NFD", + "NFKC", + "NFKD" +]; +const preformattedNodes = flatMorph(normalizedForms, (i, form) => [form, rootSchema({ + domain: "string", + predicate: (s) => s.normalize(form) === s, + meta: `${form}-normalized unicode` +})]); +const normalizeNodes = flatMorph(normalizedForms, (i, form) => [form, rootSchema({ + in: "string", + morphs: (s) => s.normalize(form), + declaredOut: preformattedNodes[form] +})]); +const NFC = Scope.module({ + root: normalizeNodes.NFC, + preformatted: preformattedNodes.NFC +}, { name: "string.normalize.NFC" }); +const NFD = Scope.module({ + root: normalizeNodes.NFD, + preformatted: preformattedNodes.NFD +}, { name: "string.normalize.NFD" }); +const NFKC = Scope.module({ + root: normalizeNodes.NFKC, + preformatted: preformattedNodes.NFKC +}, { name: "string.normalize.NFKC" }); +const NFKD = Scope.module({ + root: normalizeNodes.NFKD, + preformatted: preformattedNodes.NFKD +}, { name: "string.normalize.NFKD" }); +const normalize = Scope.module({ + root: "NFC", + NFC, + NFD, + NFKC, + NFKD +}, { name: "string.normalize" }); +const numericRoot = regexStringNode(numericStringMatcher, "a well-formed numeric string"); +const stringNumeric = Scope.module({ + root: numericRoot, + parse: rootSchema({ + in: numericRoot, + morphs: (s) => Number.parseFloat(s), + declaredOut: intrinsic.number + }) +}, { name: "string.numeric" }); +const regexPatternDescription = "a regex pattern"; +const regex = rootSchema({ + domain: "string", + predicate: { + meta: regexPatternDescription, + predicate: (s, ctx) => { + try { + new RegExp(s); + return true; + } catch (e) { + return ctx.reject({ + code: "predicate", + expected: regexPatternDescription, + problem: String(e) + }); + } + } + }, + meta: { format: "regex" } +}); +const semverMatcher = /^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[A-Za-z-][\dA-Za-z-]*)(?:\.(?:0|[1-9]\d*|\d*[A-Za-z-][\dA-Za-z-]*))*))?(?:\+([\dA-Za-z-]+(?:\.[\dA-Za-z-]+)*))?$/; +const semver = regexStringNode(semverMatcher, "a semantic version (see https://semver.org/)"); +const preformattedTrim = regexStringNode(/^\S.*\S$|^\S?$/, "trimmed"); +const trim = Scope.module({ + root: rootSchema({ + in: "string", + morphs: (s) => s.trim(), + declaredOut: preformattedTrim + }), + preformatted: preformattedTrim +}, { name: "string.trim" }); +const preformattedUpper = regexStringNode(/^[A-Z]*$/, "only uppercase letters"); +const upper = Scope.module({ + root: rootSchema({ + in: "string", + morphs: (s) => s.toUpperCase(), + declaredOut: preformattedUpper + }), + preformatted: preformattedUpper +}, { name: "string.upper" }); +const isParsableUrl = (s) => { + if (URL.canParse) return URL.canParse(s); + try { + new URL(s); + return true; + } catch { + return false; + } +}; +const urlRoot = rootSchema({ + domain: "string", + predicate: { + meta: "a URL string", + predicate: isParsableUrl + }, + meta: { format: "uri" } +}); +const url = Scope.module({ + root: urlRoot, + parse: rootSchema({ + declaredIn: urlRoot, + in: "string", + morphs: (s, ctx) => { + try { + return new URL(s); + } catch { + return ctx.error("a URL string"); + } + }, + declaredOut: rootSchema(URL) + }) +}, { name: "string.url" }); +const uuid = Scope.module({ + root: [ + "versioned | nil | max", + "@", + { + description: "a UUID", + format: "uuid" + } + ], + "#nil": "'00000000-0000-0000-0000-000000000000'", + "#max": "'ffffffff-ffff-ffff-ffff-ffffffffffff'", + "#versioned": /[\da-f]{8}-[\da-f]{4}-[1-8][\da-f]{3}-[89ab][\da-f]{3}-[\da-f]{12}/i, + v1: regexStringNode(/^[\da-f]{8}-[\da-f]{4}-1[\da-f]{3}-[89ab][\da-f]{3}-[\da-f]{12}$/i, "a UUIDv1"), + v2: regexStringNode(/^[\da-f]{8}-[\da-f]{4}-2[\da-f]{3}-[89ab][\da-f]{3}-[\da-f]{12}$/i, "a UUIDv2"), + v3: regexStringNode(/^[\da-f]{8}-[\da-f]{4}-3[\da-f]{3}-[89ab][\da-f]{3}-[\da-f]{12}$/i, "a UUIDv3"), + v4: regexStringNode(/^[\da-f]{8}-[\da-f]{4}-4[\da-f]{3}-[89ab][\da-f]{3}-[\da-f]{12}$/i, "a UUIDv4"), + v5: regexStringNode(/^[\da-f]{8}-[\da-f]{4}-5[\da-f]{3}-[89ab][\da-f]{3}-[\da-f]{12}$/i, "a UUIDv5"), + v6: regexStringNode(/^[\da-f]{8}-[\da-f]{4}-6[\da-f]{3}-[89ab][\da-f]{3}-[\da-f]{12}$/i, "a UUIDv6"), + v7: regexStringNode(/^[\da-f]{8}-[\da-f]{4}-7[\da-f]{3}-[89ab][\da-f]{3}-[\da-f]{12}$/i, "a UUIDv7"), + v8: regexStringNode(/^[\da-f]{8}-[\da-f]{4}-8[\da-f]{3}-[89ab][\da-f]{3}-[\da-f]{12}$/i, "a UUIDv8") +}, { name: "string.uuid" }); +const string = Scope.module({ + root: intrinsic.string, + alpha: regexStringNode(/^[A-Za-z]*$/, "only letters"), + alphanumeric: regexStringNode(/^[\dA-Za-z]*$/, "only letters and digits 0-9"), + hex, + base64, + capitalize, + creditCard, + date: stringDate, + digits: regexStringNode(/^\d*$/, "only digits 0-9"), + email, + integer: stringInteger, + ip, + json: json$1, + lower, + normalize, + numeric: stringNumeric, + regex, + semver, + trim, + upper, + url, + uuid +}, { name: "string" }); + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/keywords/ts.js +const arkTsKeywords = Scope.module({ + bigint: intrinsic.bigint, + boolean: intrinsic.boolean, + false: intrinsic.false, + never: intrinsic.never, + null: intrinsic.null, + number: intrinsic.number, + object: intrinsic.object, + string: intrinsic.string, + symbol: intrinsic.symbol, + true: intrinsic.true, + unknown: intrinsic.unknown, + undefined: intrinsic.undefined +}); +const unknown = Scope.module({ + root: intrinsic.unknown, + any: intrinsic.unknown +}, { name: "unknown" }); +const json = Scope.module({ + root: intrinsic.jsonObject, + stringify: node("morph", { + in: intrinsic.jsonObject, + morphs: (data) => JSON.stringify(data), + declaredOut: intrinsic.string + }) +}, { name: "object.json" }); +const object = Scope.module({ + root: intrinsic.object, + json +}, { name: "object" }); +var RecordHkt = class extends Hkt { + description = "instantiate an object from an index signature and corresponding value type like `Record(\"string\", \"number\")`"; +}; +const Record = genericNode(["K", intrinsic.key], "V")((args$1) => ({ + domain: "object", + index: { + signature: args$1.K, + value: args$1.V + } +}), RecordHkt); +var PickHkt = class extends Hkt { + description = "pick a set of properties from an object like `Pick(User, \"name | age\")`"; +}; +const Pick = genericNode(["T", intrinsic.object], ["K", intrinsic.key])((args$1) => args$1.T.pick(args$1.K), PickHkt); +var OmitHkt = class extends Hkt { + description = "omit a set of properties from an object like `Omit(User, \"age\")`"; +}; +const Omit = genericNode(["T", intrinsic.object], ["K", intrinsic.key])((args$1) => args$1.T.omit(args$1.K), OmitHkt); +var PartialHkt = class extends Hkt { + description = "make all named properties of an object optional like `Partial(User)`"; +}; +const Partial = genericNode(["T", intrinsic.object])((args$1) => args$1.T.partial(), PartialHkt); +var RequiredHkt = class extends Hkt { + description = "make all named properties of an object required like `Required(User)`"; +}; +const Required = genericNode(["T", intrinsic.object])((args$1) => args$1.T.required(), RequiredHkt); +var ExcludeHkt = class extends Hkt { + description = "exclude branches of a union like `Exclude(\"boolean\", \"true\")`"; +}; +const Exclude = genericNode("T", "U")((args$1) => args$1.T.exclude(args$1.U), ExcludeHkt); +var ExtractHkt = class extends Hkt { + description = "extract branches of a union like `Extract(\"0 | false | 1\", \"number\")`"; +}; +const Extract = genericNode("T", "U")((args$1) => args$1.T.extract(args$1.U), ExtractHkt); +const arkTsGenerics = Scope.module({ + Exclude, + Extract, + Omit, + Partial, + Pick, + Record, + Required +}); + +//#endregion +//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/keywords/keywords.js +const ark = scope({ + ...arkTsKeywords, + ...arkTsGenerics, + ...arkPrototypes, + ...arkBuiltins, + string, + number, + object, + unknown +}, { + prereducedAliases: true, + name: "ark" +}); +const keywords = ark.export(); +Object.assign($arkTypeRegistry.ambient, keywords); +$arkTypeRegistry.typeAttachments = { + string: keywords.string.root, + number: keywords.number.root, + bigint: keywords.bigint, + boolean: keywords.boolean, + symbol: keywords.symbol, + undefined: keywords.undefined, + null: keywords.null, + object: keywords.object.root, + unknown: keywords.unknown.root, + false: keywords.false, + true: keywords.true, + never: keywords.never, + arrayIndex: keywords.Array.index, + Key: keywords.Key, + Record: keywords.Record, + Array: keywords.Array.root, + Date: keywords.Date +}; +const type = Object.assign(ark.type, $arkTypeRegistry.typeAttachments); +const match = ark.match; +const generic = ark.generic; +const schema = ark.schema; +const define = ark.define; +const declare = ark.declare; + +//#endregion +//#region node_modules/.pnpm/find-up-simple@1.0.1/node_modules/find-up-simple/index.js +const toPath = (urlOrPath) => urlOrPath instanceof URL ? fileURLToPath(urlOrPath) : urlOrPath; +async function findUp(name, { cwd = process$1.cwd(), type: type$1 = "file", stopAt } = {}) { + let directory = path.resolve(toPath(cwd) ?? ""); + const { root } = path.parse(directory); + stopAt = path.resolve(directory, toPath(stopAt ?? root)); + const isAbsoluteName = path.isAbsolute(name); + while (directory) { + const filePath = isAbsoluteName ? name : path.join(directory, name); + try { + const stats = await fsPromises.stat(filePath); + if (type$1 === "file" && stats.isFile() || type$1 === "directory" && stats.isDirectory()) return filePath; + } catch {} + if (directory === stopAt || directory === root) break; + directory = path.dirname(directory); + } +} + +//#endregion +//#region src/lib/core/PackageJson.ts +const PackageJson = type({ + name: "string", + version: "string.semver", + widgetName: "string.upper" +}); + +//#endregion +//#region src/build.ts +async function build() { + console.log("Building the project..."); + const result = await readPackageUp(); + if (!result) throw new Error("No package.json found"); + const pkg = PackageJson(result); + if (pkg instanceof type.errors) { + console.error(pkg.summary); + throw new Error("package.json is invalid"); + } + console.dir(pkg); +} +async function readPackageUp() { + const filePath = await findUp("package.json"); + console.log("Found package.json at:", filePath); + if (!filePath) return; + const data = await readFile(filePath, "utf-8"); + try { + return JSON.parse(data); + } catch { + console.error("Failed to parse package.json"); + } +} + +//#endregion +//#region src/constants.ts +const { version } = JSON.parse(readFileSync(new URL("../package.json", import.meta.url)).toString()); +const VERSION = version; + +//#endregion +//#region src/cli.ts +const cli = cac("mpx"); +cli.command("build", "Build the project").action(build); +cli.help(); +cli.version(VERSION); +if (process.argv.length === 2) { + cli.outputHelp(); + process.exit(1); +} +cli.on("command:*", () => { + console.error(`Unknown command: "%s"`, cli.args.join(" ")); + console.error("See 'mpw --help' for a list of available commands."); + process.exit(1); +}); +cli.parse(); + +//#endregion \ No newline at end of file diff --git a/packages/mpx/package.json b/packages/mpx/package.json index 598006c3..d8ba8a99 100644 --- a/packages/mpx/package.json +++ b/packages/mpx/package.json @@ -5,7 +5,8 @@ "type": "module", "scripts": { "preinstall": "npx only-allow pnpm", - "test": "echo 'test is missing'" + "test": "echo 'test is missing'", + "dev": "premove bin && rolldown -c rolldown.config.ts -w" }, "keywords": [ "mendix", @@ -22,8 +23,10 @@ "devDependencies": { "@tsconfig/node22": "^22.0.2", "@types/node": "^24.0.13", + "arktype": "^2.1.20", "cac": "^6.7.14", "find-up-simple": "^1.0.1", + "premove": "^4.0.0", "prettier": "^3.6.2" } } diff --git a/packages/mpx/pnpm-lock.yaml b/packages/mpx/pnpm-lock.yaml index 0c446de4..acaae56f 100644 --- a/packages/mpx/pnpm-lock.yaml +++ b/packages/mpx/pnpm-lock.yaml @@ -18,18 +18,30 @@ importers: '@types/node': specifier: ^24.0.13 version: 24.0.13 + arktype: + specifier: ^2.1.20 + version: 2.1.20 cac: specifier: ^6.7.14 version: 6.7.14 find-up-simple: specifier: ^1.0.1 version: 1.0.1 + premove: + specifier: ^4.0.0 + version: 4.0.0 prettier: specifier: ^3.6.2 version: 3.6.2 packages: + '@ark/schema@0.46.0': + resolution: {integrity: sha512-c2UQdKgP2eqqDArfBqQIJppxJHvNNXuQPeuSPlDML4rjw+f1cu0qAlzOG4b8ujgm9ctIDWwhpyw6gjG5ledIVQ==} + + '@ark/util@0.46.0': + resolution: {integrity: sha512-JPy/NGWn/lvf1WmGCPw2VGpBg5utZraE84I7wli18EDF3p3zc/e9WolT35tINeZO3l7C77SjqRJeAUoT0CvMRg==} + '@emnapi/core@1.4.4': resolution: {integrity: sha512-A9CnAbC6ARNMKcIcrQwq6HeHCjpcBZ5wSx4U01WXCqEKlrzB9F9315WDNHkrs2xbx7YjjSxbUYxuN6EQzpcY2g==} @@ -125,6 +137,9 @@ packages: resolution: {integrity: sha512-BGcItUBWSMRgOCe+SVZJ+S7yTRG0eGt9cXAHev72yuGcY23hnLA7Bky5L/xLyPINoSN95geovfBkqoTlNZYa7w==} engines: {node: '>=14'} + arktype@2.1.20: + resolution: {integrity: sha512-IZCEEXaJ8g+Ijd59WtSYwtjnqXiwM8sWQ5EjGamcto7+HVN9eK0C4p0zDlCuAwWhpqr6fIBkxPuYDl4/Mcj/+Q==} + cac@6.7.14: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} engines: {node: '>=8'} @@ -133,6 +148,11 @@ packages: resolution: {integrity: sha512-afd4O7zpqHeRyg4PfDQsXmlDe2PfdHtJt6Akt8jOWaApLOZk5JXs6VMR29lz03pRe9mpykrRCYIYxaJYcfpncQ==} engines: {node: '>=18'} + premove@4.0.0: + resolution: {integrity: sha512-zim/Hr4+FVdCIM7zL9b9Z0Wfd5Ya3mnKtiuDv7L5lzYzanSq6cOcVJ7EFcgK4I0pt28l8H0jX/x3nyog380XgQ==} + engines: {node: '>=6'} + hasBin: true + prettier@3.6.2: resolution: {integrity: sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==} engines: {node: '>=14'} @@ -150,6 +170,12 @@ packages: snapshots: + '@ark/schema@0.46.0': + dependencies: + '@ark/util': 0.46.0 + + '@ark/util@0.46.0': {} + '@emnapi/core@1.4.4': dependencies: '@emnapi/wasi-threads': 1.0.3 @@ -230,10 +256,17 @@ snapshots: ansis@4.1.0: {} + arktype@2.1.20: + dependencies: + '@ark/schema': 0.46.0 + '@ark/util': 0.46.0 + cac@6.7.14: {} find-up-simple@1.0.1: {} + premove@4.0.0: {} + prettier@3.6.2: {} rolldown@1.0.0-beta.26: diff --git a/packages/mpx/rolldown.config.ts b/packages/mpx/rolldown.config.ts index e193f5a3..4fedd8e6 100644 --- a/packages/mpx/rolldown.config.ts +++ b/packages/mpx/rolldown.config.ts @@ -4,7 +4,7 @@ const config: RolldownOptions = { input: "./src/cli.ts", external: ["rolldown"], output: { - file: "./bin/mpw.js", + file: "./bin/mpx.js", inlineDynamicImports: true }, platform: "node" diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 065d3b5e..46996130 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -1,17 +1,26 @@ -import { findUp } from "find-up-simple" -import { readFile } from "node:fs/promises" +import { type } from "arktype"; +import { findUp } from "find-up-simple"; +import { readFile } from "node:fs/promises"; +import { PackageJson } from "./lib/core/PackageJson.js"; export async function build() { - const result = await readPackageUp() + console.log("Building the project..."); + const result = await readPackageUp(); if (!result) { - console.error("No package.json found") - process.exit(1) + throw new Error("No package.json found"); } - console.dir(result) + const pkg = PackageJson(result); + + if (pkg instanceof type.errors) { + console.error(pkg.summary); + throw new Error("package.json is invalid"); + } + console.dir(pkg); } export async function readPackageUp(): Promise<{} | undefined> { const filePath = await findUp("package.json"); + console.log("Found package.json at:", filePath); if (!filePath) { return; } @@ -19,5 +28,7 @@ export async function readPackageUp(): Promise<{} | undefined> { try { return JSON.parse(data); - } catch {} -} \ No newline at end of file + } catch { + console.error("Failed to parse package.json"); + } +} diff --git a/packages/mpx/src/cli.ts b/packages/mpx/src/cli.ts index 7461c5d0..611bef58 100644 --- a/packages/mpx/src/cli.ts +++ b/packages/mpx/src/cli.ts @@ -1,25 +1,25 @@ #!/usr/bin/env node -import { cac } from "cac" -import { build } from "./build.js" -import { VERSION } from "./constants.js" +import { cac } from "cac"; +import { build } from "./build.js"; +import { VERSION } from "./constants.js"; -const cli = cac("mpw") +const cli = cac("mpx"); -cli.command("build", "Build the project").action(build) +cli.command("build", "Build the project").action(build); -cli.help() -cli.version(VERSION) +cli.help(); +cli.version(VERSION); if (process.argv.length === 2) { - cli.outputHelp() - process.exit(1) + cli.outputHelp(); + process.exit(1); } cli.on("command:*", () => { - console.error(`Unknown command: "%s"`, cli.args.join(" ")) - console.error("See 'mpw --help' for a list of available commands.") - process.exit(1) -}) + console.error(`Unknown command: "%s"`, cli.args.join(" ")); + console.error("See 'mpw --help' for a list of available commands."); + process.exit(1); +}); -cli.parse() +cli.parse(); diff --git a/packages/mpx/src/lib/core/PackageJson.ts b/packages/mpx/src/lib/core/PackageJson.ts new file mode 100644 index 00000000..2ec3d1b5 --- /dev/null +++ b/packages/mpx/src/lib/core/PackageJson.ts @@ -0,0 +1,9 @@ +import { type } from "arktype"; + +export const PackageJson = type({ + name: "string", + version: "string.semver", + widgetName: "string.upper" +}); + +export type PackageJson = typeof PackageJson.infer; diff --git a/packages/mpx/test/package.json b/packages/mpx/test/package.json new file mode 100644 index 00000000..9ae64689 --- /dev/null +++ b/packages/mpx/test/package.json @@ -0,0 +1,5 @@ +{ + "name": "fun", + "version": "1.1.1", + "widgetName": "fun" +} From 6fb306a5d1f8d8a92752f2fa85cf891d89acde33 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Sat, 12 Jul 2025 10:42:25 +0200 Subject: [PATCH 03/45] feat: simple build --- packages/mpx/.gitignore | 2 + packages/mpx/bin/mpx.js | 7895 +--------------------- packages/mpx/package-lock.json | 15 - packages/mpx/package.json | 6 +- packages/mpx/pnpm-lock.yaml | 152 +- packages/mpx/rolldown.config.ts | 10 +- packages/mpx/src/build.ts | 60 +- packages/mpx/src/cli.ts | 24 +- packages/mpx/src/error-utils.ts | 8 + packages/mpx/src/lib/core/PackageJson.ts | 6 +- packages/mpx/test/package.json | 5 - packages/mpx/tsconfig.json | 1 + 12 files changed, 227 insertions(+), 7957 deletions(-) create mode 100644 packages/mpx/.gitignore delete mode 100644 packages/mpx/package-lock.json create mode 100644 packages/mpx/src/error-utils.ts delete mode 100644 packages/mpx/test/package.json diff --git a/packages/mpx/.gitignore b/packages/mpx/.gitignore new file mode 100644 index 00000000..2ed8e451 --- /dev/null +++ b/packages/mpx/.gitignore @@ -0,0 +1,2 @@ +box +input diff --git a/packages/mpx/bin/mpx.js b/packages/mpx/bin/mpx.js index e1aead94..ade4831b 100644 --- a/packages/mpx/bin/mpx.js +++ b/packages/mpx/bin/mpx.js @@ -1,7896 +1,3 @@ #!/usr/bin/env node -import { EventEmitter } from "events"; -import process$1 from "node:process"; -import fsPromises, { readFile } from "node:fs/promises"; -import { fileURLToPath } from "node:url"; -import { readFileSync } from "node:fs"; -import path from "node:path"; -//#region node_modules/.pnpm/cac@6.7.14/node_modules/cac/dist/index.mjs -function toArr(any) { - return any == null ? [] : Array.isArray(any) ? any : [any]; -} -function toVal(out, key, val, opts) { - var x, old = out[key], nxt = !!~opts.string.indexOf(key) ? val == null || val === true ? "" : String(val) : typeof val === "boolean" ? val : !!~opts.boolean.indexOf(key) ? val === "false" ? false : val === "true" || (out._.push((x = +val, x * 0 === 0) ? x : val), !!val) : (x = +val, x * 0 === 0) ? x : val; - out[key] = old == null ? nxt : Array.isArray(old) ? old.concat(nxt) : [old, nxt]; -} -function mri2(args$1, opts) { - args$1 = args$1 || []; - opts = opts || {}; - var k, arr, arg, name, val, out = { _: [] }; - var i = 0, j = 0, idx = 0, len = args$1.length; - const alibi = opts.alias !== void 0; - const strict = opts.unknown !== void 0; - const defaults = opts.default !== void 0; - opts.alias = opts.alias || {}; - opts.string = toArr(opts.string); - opts.boolean = toArr(opts.boolean); - if (alibi) for (k in opts.alias) { - arr = opts.alias[k] = toArr(opts.alias[k]); - for (i = 0; i < arr.length; i++) (opts.alias[arr[i]] = arr.concat(k)).splice(i, 1); - } - for (i = opts.boolean.length; i-- > 0;) { - arr = opts.alias[opts.boolean[i]] || []; - for (j = arr.length; j-- > 0;) opts.boolean.push(arr[j]); - } - for (i = opts.string.length; i-- > 0;) { - arr = opts.alias[opts.string[i]] || []; - for (j = arr.length; j-- > 0;) opts.string.push(arr[j]); - } - if (defaults) for (k in opts.default) { - name = typeof opts.default[k]; - arr = opts.alias[k] = opts.alias[k] || []; - if (opts[name] !== void 0) { - opts[name].push(k); - for (i = 0; i < arr.length; i++) opts[name].push(arr[i]); - } - } - const keys = strict ? Object.keys(opts.alias) : []; - for (i = 0; i < len; i++) { - arg = args$1[i]; - if (arg === "--") { - out._ = out._.concat(args$1.slice(++i)); - break; - } - for (j = 0; j < arg.length; j++) if (arg.charCodeAt(j) !== 45) break; - if (j === 0) out._.push(arg); - else if (arg.substring(j, j + 3) === "no-") { - name = arg.substring(j + 3); - if (strict && !~keys.indexOf(name)) return opts.unknown(arg); - out[name] = false; - } else { - for (idx = j + 1; idx < arg.length; idx++) if (arg.charCodeAt(idx) === 61) break; - name = arg.substring(j, idx); - val = arg.substring(++idx) || i + 1 === len || ("" + args$1[i + 1]).charCodeAt(0) === 45 || args$1[++i]; - arr = j === 2 ? [name] : name; - for (idx = 0; idx < arr.length; idx++) { - name = arr[idx]; - if (strict && !~keys.indexOf(name)) return opts.unknown("-".repeat(j) + name); - toVal(out, name, idx + 1 < arr.length || val, opts); - } - } - } - if (defaults) { - for (k in opts.default) if (out[k] === void 0) out[k] = opts.default[k]; - } - if (alibi) for (k in out) { - arr = opts.alias[k] || []; - while (arr.length > 0) out[arr.shift()] = out[k]; - } - return out; -} -const removeBrackets = (v) => v.replace(/[<[].+/, "").trim(); -const findAllBrackets = (v) => { - const ANGLED_BRACKET_RE_GLOBAL = /<([^>]+)>/g; - const SQUARE_BRACKET_RE_GLOBAL = /\[([^\]]+)\]/g; - const res = []; - const parse = (match$1) => { - let variadic = false; - let value$1 = match$1[1]; - if (value$1.startsWith("...")) { - value$1 = value$1.slice(3); - variadic = true; - } - return { - required: match$1[0].startsWith("<"), - value: value$1, - variadic - }; - }; - let angledMatch; - while (angledMatch = ANGLED_BRACKET_RE_GLOBAL.exec(v)) res.push(parse(angledMatch)); - let squareMatch; - while (squareMatch = SQUARE_BRACKET_RE_GLOBAL.exec(v)) res.push(parse(squareMatch)); - return res; -}; -const getMriOptions = (options) => { - const result = { - alias: {}, - boolean: [] - }; - for (const [index, option] of options.entries()) { - if (option.names.length > 1) result.alias[option.names[0]] = option.names.slice(1); - if (option.isBoolean) if (option.negated) { - const hasStringTypeOption = options.some((o, i) => { - return i !== index && o.names.some((name) => option.names.includes(name)) && typeof o.required === "boolean"; - }); - if (!hasStringTypeOption) result.boolean.push(option.names[0]); - } else result.boolean.push(option.names[0]); - } - return result; -}; -const findLongest = (arr) => { - return arr.sort((a, b) => { - return a.length > b.length ? -1 : 1; - })[0]; -}; -const padRight = (str, length) => { - return str.length >= length ? str : `${str}${" ".repeat(length - str.length)}`; -}; -const camelcase = (input) => { - return input.replace(/([a-z])-([a-z])/g, (_, p1, p2) => { - return p1 + p2.toUpperCase(); - }); -}; -const setDotProp = (obj, keys, val) => { - let i = 0; - let length = keys.length; - let t = obj; - let x; - for (; i < length; ++i) { - x = t[keys[i]]; - t = t[keys[i]] = i === length - 1 ? val : x != null ? x : !!~keys[i + 1].indexOf(".") || !(+keys[i + 1] > -1) ? {} : []; - } -}; -const setByType = (obj, transforms) => { - for (const key of Object.keys(transforms)) { - const transform = transforms[key]; - if (transform.shouldTransform) { - obj[key] = Array.prototype.concat.call([], obj[key]); - if (typeof transform.transformFunction === "function") obj[key] = obj[key].map(transform.transformFunction); - } - } -}; -const getFileName = (input) => { - const m = /([^\\\/]+)$/.exec(input); - return m ? m[1] : ""; -}; -const camelcaseOptionName = (name) => { - return name.split(".").map((v, i) => { - return i === 0 ? camelcase(v) : v; - }).join("."); -}; -var CACError = class extends Error { - constructor(message) { - super(message); - this.name = this.constructor.name; - if (typeof Error.captureStackTrace === "function") Error.captureStackTrace(this, this.constructor); - else this.stack = new Error(message).stack; - } -}; -var Option = class { - constructor(rawName, description, config) { - this.rawName = rawName; - this.description = description; - this.config = Object.assign({}, config); - rawName = rawName.replace(/\.\*/g, ""); - this.negated = false; - this.names = removeBrackets(rawName).split(",").map((v) => { - let name = v.trim().replace(/^-{1,2}/, ""); - if (name.startsWith("no-")) { - this.negated = true; - name = name.replace(/^no-/, ""); - } - return camelcaseOptionName(name); - }).sort((a, b) => a.length > b.length ? 1 : -1); - this.name = this.names[this.names.length - 1]; - if (this.negated && this.config.default == null) this.config.default = true; - if (rawName.includes("<")) this.required = true; - else if (rawName.includes("[")) this.required = false; - else this.isBoolean = true; - } -}; -const processArgs = process.argv; -const platformInfo = `${process.platform}-${process.arch} node-${process.version}`; -var Command = class { - constructor(rawName, description, config = {}, cli$1) { - this.rawName = rawName; - this.description = description; - this.config = config; - this.cli = cli$1; - this.options = []; - this.aliasNames = []; - this.name = removeBrackets(rawName); - this.args = findAllBrackets(rawName); - this.examples = []; - } - usage(text) { - this.usageText = text; - return this; - } - allowUnknownOptions() { - this.config.allowUnknownOptions = true; - return this; - } - ignoreOptionDefaultValue() { - this.config.ignoreOptionDefaultValue = true; - return this; - } - version(version$1, customFlags = "-v, --version") { - this.versionNumber = version$1; - this.option(customFlags, "Display version number"); - return this; - } - example(example) { - this.examples.push(example); - return this; - } - option(rawName, description, config) { - const option = new Option(rawName, description, config); - this.options.push(option); - return this; - } - alias(name) { - this.aliasNames.push(name); - return this; - } - action(callback) { - this.commandAction = callback; - return this; - } - isMatched(name) { - return this.name === name || this.aliasNames.includes(name); - } - get isDefaultCommand() { - return this.name === "" || this.aliasNames.includes("!"); - } - get isGlobalCommand() { - return this instanceof GlobalCommand; - } - hasOption(name) { - name = name.split(".")[0]; - return this.options.find((option) => { - return option.names.includes(name); - }); - } - outputHelp() { - const { name, commands } = this.cli; - const { versionNumber, options: globalOptions, helpCallback } = this.cli.globalCommand; - let sections = [{ body: `${name}${versionNumber ? `/${versionNumber}` : ""}` }]; - sections.push({ - title: "Usage", - body: ` $ ${name} ${this.usageText || this.rawName}` - }); - const showCommands = (this.isGlobalCommand || this.isDefaultCommand) && commands.length > 0; - if (showCommands) { - const longestCommandName = findLongest(commands.map((command) => command.rawName)); - sections.push({ - title: "Commands", - body: commands.map((command) => { - return ` ${padRight(command.rawName, longestCommandName.length)} ${command.description}`; - }).join("\n") - }); - sections.push({ - title: `For more info, run any command with the \`--help\` flag`, - body: commands.map((command) => ` $ ${name}${command.name === "" ? "" : ` ${command.name}`} --help`).join("\n") - }); - } - let options = this.isGlobalCommand ? globalOptions : [...this.options, ...globalOptions || []]; - if (!this.isGlobalCommand && !this.isDefaultCommand) options = options.filter((option) => option.name !== "version"); - if (options.length > 0) { - const longestOptionName = findLongest(options.map((option) => option.rawName)); - sections.push({ - title: "Options", - body: options.map((option) => { - return ` ${padRight(option.rawName, longestOptionName.length)} ${option.description} ${option.config.default === void 0 ? "" : `(default: ${option.config.default})`}`; - }).join("\n") - }); - } - if (this.examples.length > 0) sections.push({ - title: "Examples", - body: this.examples.map((example) => { - if (typeof example === "function") return example(name); - return example; - }).join("\n") - }); - if (helpCallback) sections = helpCallback(sections) || sections; - console.log(sections.map((section) => { - return section.title ? `${section.title}: -${section.body}` : section.body; - }).join("\n\n")); - } - outputVersion() { - const { name } = this.cli; - const { versionNumber } = this.cli.globalCommand; - if (versionNumber) console.log(`${name}/${versionNumber} ${platformInfo}`); - } - checkRequiredArgs() { - const minimalArgsCount = this.args.filter((arg) => arg.required).length; - if (this.cli.args.length < minimalArgsCount) throw new CACError(`missing required args for command \`${this.rawName}\``); - } - checkUnknownOptions() { - const { options, globalCommand } = this.cli; - if (!this.config.allowUnknownOptions) { - for (const name of Object.keys(options)) if (name !== "--" && !this.hasOption(name) && !globalCommand.hasOption(name)) throw new CACError(`Unknown option \`${name.length > 1 ? `--${name}` : `-${name}`}\``); - } - } - checkOptionValue() { - const { options: parsedOptions, globalCommand } = this.cli; - const options = [...globalCommand.options, ...this.options]; - for (const option of options) { - const value$1 = parsedOptions[option.name.split(".")[0]]; - if (option.required) { - const hasNegated = options.some((o) => o.negated && o.names.includes(option.name)); - if (value$1 === true || value$1 === false && !hasNegated) throw new CACError(`option \`${option.rawName}\` value is missing`); - } - } - } -}; -var GlobalCommand = class extends Command { - constructor(cli$1) { - super("@@global@@", "", {}, cli$1); - } -}; -var __assign = Object.assign; -var CAC = class extends EventEmitter { - constructor(name = "") { - super(); - this.name = name; - this.commands = []; - this.rawArgs = []; - this.args = []; - this.options = {}; - this.globalCommand = new GlobalCommand(this); - this.globalCommand.usage(" [options]"); - } - usage(text) { - this.globalCommand.usage(text); - return this; - } - command(rawName, description, config) { - const command = new Command(rawName, description || "", config, this); - command.globalCommand = this.globalCommand; - this.commands.push(command); - return command; - } - option(rawName, description, config) { - this.globalCommand.option(rawName, description, config); - return this; - } - help(callback) { - this.globalCommand.option("-h, --help", "Display this message"); - this.globalCommand.helpCallback = callback; - this.showHelpOnExit = true; - return this; - } - version(version$1, customFlags = "-v, --version") { - this.globalCommand.version(version$1, customFlags); - this.showVersionOnExit = true; - return this; - } - example(example) { - this.globalCommand.example(example); - return this; - } - outputHelp() { - if (this.matchedCommand) this.matchedCommand.outputHelp(); - else this.globalCommand.outputHelp(); - } - outputVersion() { - this.globalCommand.outputVersion(); - } - setParsedInfo({ args: args$1, options }, matchedCommand, matchedCommandName) { - this.args = args$1; - this.options = options; - if (matchedCommand) this.matchedCommand = matchedCommand; - if (matchedCommandName) this.matchedCommandName = matchedCommandName; - return this; - } - unsetMatchedCommand() { - this.matchedCommand = void 0; - this.matchedCommandName = void 0; - } - parse(argv = processArgs, { run = true } = {}) { - this.rawArgs = argv; - if (!this.name) this.name = argv[1] ? getFileName(argv[1]) : "cli"; - let shouldParse = true; - for (const command of this.commands) { - const parsed$1 = this.mri(argv.slice(2), command); - const commandName = parsed$1.args[0]; - if (command.isMatched(commandName)) { - shouldParse = false; - const parsedInfo = __assign(__assign({}, parsed$1), { args: parsed$1.args.slice(1) }); - this.setParsedInfo(parsedInfo, command, commandName); - this.emit(`command:${commandName}`, command); - } - } - if (shouldParse) { - for (const command of this.commands) if (command.name === "") { - shouldParse = false; - const parsed$1 = this.mri(argv.slice(2), command); - this.setParsedInfo(parsed$1, command); - this.emit(`command:!`, command); - } - } - if (shouldParse) { - const parsed$1 = this.mri(argv.slice(2)); - this.setParsedInfo(parsed$1); - } - if (this.options.help && this.showHelpOnExit) { - this.outputHelp(); - run = false; - this.unsetMatchedCommand(); - } - if (this.options.version && this.showVersionOnExit && this.matchedCommandName == null) { - this.outputVersion(); - run = false; - this.unsetMatchedCommand(); - } - const parsedArgv = { - args: this.args, - options: this.options - }; - if (run) this.runMatchedCommand(); - if (!this.matchedCommand && this.args[0]) this.emit("command:*"); - return parsedArgv; - } - mri(argv, command) { - const cliOptions = [...this.globalCommand.options, ...command ? command.options : []]; - const mriOptions = getMriOptions(cliOptions); - let argsAfterDoubleDashes = []; - const doubleDashesIndex = argv.indexOf("--"); - if (doubleDashesIndex > -1) { - argsAfterDoubleDashes = argv.slice(doubleDashesIndex + 1); - argv = argv.slice(0, doubleDashesIndex); - } - let parsed$1 = mri2(argv, mriOptions); - parsed$1 = Object.keys(parsed$1).reduce((res, name) => { - return __assign(__assign({}, res), { [camelcaseOptionName(name)]: parsed$1[name] }); - }, { _: [] }); - const args$1 = parsed$1._; - const options = { "--": argsAfterDoubleDashes }; - const ignoreDefault = command && command.config.ignoreOptionDefaultValue ? command.config.ignoreOptionDefaultValue : this.globalCommand.config.ignoreOptionDefaultValue; - let transforms = Object.create(null); - for (const cliOption of cliOptions) { - if (!ignoreDefault && cliOption.config.default !== void 0) for (const name of cliOption.names) options[name] = cliOption.config.default; - if (Array.isArray(cliOption.config.type)) { - if (transforms[cliOption.name] === void 0) { - transforms[cliOption.name] = Object.create(null); - transforms[cliOption.name]["shouldTransform"] = true; - transforms[cliOption.name]["transformFunction"] = cliOption.config.type[0]; - } - } - } - for (const key of Object.keys(parsed$1)) if (key !== "_") { - const keys = key.split("."); - setDotProp(options, keys, parsed$1[key]); - setByType(options, transforms); - } - return { - args: args$1, - options - }; - } - runMatchedCommand() { - const { args: args$1, options, matchedCommand: command } = this; - if (!command || !command.commandAction) return; - command.checkUnknownOptions(); - command.checkOptionValue(); - command.checkRequiredArgs(); - const actionArgs = []; - command.args.forEach((arg, index) => { - if (arg.variadic) actionArgs.push(args$1.slice(index)); - else actionArgs.push(args$1[index]); - }); - actionArgs.push(options); - return command.commandAction.apply(this, actionArgs); - } -}; -const cac = (name = "") => new CAC(name); - -//#endregion -//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/arrays.js -const liftArray = (data) => Array.isArray(data) ? data : [data]; -/** -* Splits an array into two arrays based on the result of a predicate -* -* @param predicate - The guard function used to determine which items to include. -* @returns A tuple containing two arrays: -* - the first includes items for which `predicate` returns true -* - the second includes items for which `predicate` returns false -* -* @example -* const list = [1, "2", "3", 4, 5]; -* const [numbers, strings] = spliterate(list, (x) => typeof x === "number"); -* // Type: number[] -* // Output: [1, 4, 5] -* console.log(evens); -* // Type: string[] -* // Output: ["2", "3"] -* console.log(odds); -*/ -const spliterate = (arr, predicate) => { - const result = [[], []]; - for (const item of arr) if (predicate(item)) result[0].push(item); - else result[1].push(item); - return result; -}; -const ReadonlyArray = Array; -const includes = (array, element) => array.includes(element); -const range = (length, offset = 0) => [...new Array(length)].map((_, i) => i + offset); -/** -* Adds a value or array to an array, returning the concatenated result -*/ -const append = (to, value$1, opts) => { - if (to === void 0) return value$1 === void 0 ? [] : Array.isArray(value$1) ? value$1 : [value$1]; - if (opts?.prepend) if (Array.isArray(value$1)) to.unshift(...value$1); - else to.unshift(value$1); - else if (Array.isArray(value$1)) to.push(...value$1); - else to.push(value$1); - return to; -}; -/** -* Concatenates an element or list with a readonly list -*/ -const conflatenate = (to, elementOrList) => { - if (elementOrList === void 0 || elementOrList === null) return to ?? []; - if (to === void 0 || to === null) return liftArray(elementOrList); - return to.concat(elementOrList); -}; -/** -* Concatenates a variadic list of elements or lists with a readonly list -*/ -const conflatenateAll = (...elementsOrLists) => elementsOrLists.reduce(conflatenate, []); -/** -* Appends a value or concatenates an array to an array if it is not already included, returning the array -*/ -const appendUnique = (to, value$1, opts) => { - if (to === void 0) return Array.isArray(value$1) ? value$1 : [value$1]; - const isEqual = opts?.isEqual ?? ((l, r) => l === r); - for (const v of liftArray(value$1)) if (!to.some((existing) => isEqual(existing, v))) to.push(v); - return to; -}; -const groupBy = (array, discriminant) => array.reduce((result, item) => { - const key = item[discriminant]; - result[key] = append(result[key], item); - return result; -}, {}); -const arrayEquals = (l, r, opts) => l.length === r.length && l.every(opts?.isEqual ? (lItem, i) => opts.isEqual(lItem, r[i]) : (lItem, i) => lItem === r[i]); - -//#endregion -//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/domain.js -const hasDomain = (data, kind) => domainOf(data) === kind; -const domainOf = (data) => { - const builtinType = typeof data; - return builtinType === "object" ? data === null ? "null" : "object" : builtinType === "function" ? "object" : builtinType; -}; -/** Each domain's completion for the phrase "must be _____" */ -const domainDescriptions = { - boolean: "boolean", - null: "null", - undefined: "undefined", - bigint: "a bigint", - number: "a number", - object: "an object", - string: "a string", - symbol: "a symbol" -}; -const jsTypeOfDescriptions = { - ...domainDescriptions, - function: "a function" -}; - -//#endregion -//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/errors.js -var InternalArktypeError = class extends Error {}; -const throwInternalError = (message) => throwError(message, InternalArktypeError); -const throwError = (message, ctor = Error) => { - throw new ctor(message); -}; -var ParseError = class extends Error { - name = "ParseError"; -}; -const throwParseError = (message) => throwError(message, ParseError); -/** -* TypeScript won't suggest strings beginning with a space as properties. -* Useful for symbol-like string properties. -*/ -const noSuggest = (s) => ` ${s}`; - -//#endregion -//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/flatMorph.js -const flatMorph = (o, flatMapEntry) => { - const result = {}; - const inputIsArray = Array.isArray(o); - let outputShouldBeArray = false; - for (const [i, entry] of Object.entries(o).entries()) { - const mapped = inputIsArray ? flatMapEntry(i, entry[1]) : flatMapEntry(...entry, i); - outputShouldBeArray ||= typeof mapped[0] === "number"; - const flattenedEntries = Array.isArray(mapped[0]) || mapped.length === 0 ? mapped : [mapped]; - for (const [k, v] of flattenedEntries) if (typeof k === "object") result[k.group] = append(result[k.group], v); - else result[k] = v; - } - return outputShouldBeArray ? Object.values(result) : result; -}; - -//#endregion -//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/records.js -/** -* Object.entries wrapper providing narrowed types for objects with known sets -* of keys, e.g. those defined internally as configs -*/ -const entriesOf = Object.entries; -const isKeyOf = (k, o) => k in o; -const hasKey = (o, k) => k in o; -var DynamicBase = class { - constructor(properties) { - Object.assign(this, properties); - } -}; -const NoopBase = class {}; -/** @ts-ignore (needed to extend `t`) **/ -var CastableBase = class extends NoopBase {}; -const splitByKeys = (o, leftKeys) => { - const l = {}; - const r = {}; - let k; - for (k in o) if (k in leftKeys) l[k] = o[k]; - else r[k] = o[k]; - return [l, r]; -}; -const omit = (o, keys) => splitByKeys(o, keys)[1]; -const isEmptyObject = (o) => Object.keys(o).length === 0; -const stringAndSymbolicEntriesOf = (o) => [...Object.entries(o), ...Object.getOwnPropertySymbols(o).map((k) => [k, o[k]])]; -/** Like Object.assign, but it will preserve getters instead of evaluating them. */ -const defineProperties = (base, merged) => Object.defineProperties(base, Object.getOwnPropertyDescriptors(merged)); -/** Copies enumerable keys of o to a new object in alphabetical order */ -const withAlphabetizedKeys = (o) => { - const keys = Object.keys(o).sort(); - const result = {}; - for (let i = 0; i < keys.length; i++) result[keys[i]] = o[keys[i]]; - return result; -}; -const unset = noSuggest("represents an uninitialized value"); -const enumValues = (tsEnum) => Object.values(tsEnum).filter((v) => { - if (typeof v === "number") return true; - return typeof tsEnum[v] !== "number"; -}); - -//#endregion -//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/objectKinds.js -const ecmascriptConstructors = { - Array, - Boolean, - Date, - Error, - Function, - Map, - Number, - Promise, - RegExp, - Set, - String, - WeakMap, - WeakSet -}; -/** Node18 */ -const FileConstructor = globalThis.File ?? Blob; -const platformConstructors = { - ArrayBuffer, - Blob, - File: FileConstructor, - FormData, - Headers, - Request, - Response, - URL -}; -const typedArrayConstructors = { - Int8Array, - Uint8Array, - Uint8ClampedArray, - Int16Array, - Uint16Array, - Int32Array, - Uint32Array, - Float32Array, - Float64Array, - BigInt64Array, - BigUint64Array -}; -const builtinConstructors = { - ...ecmascriptConstructors, - ...platformConstructors, - ...typedArrayConstructors, - String, - Number, - Boolean -}; -const objectKindOf = (data) => { - let prototype = Object.getPrototypeOf(data); - while (prototype?.constructor && (!isKeyOf(prototype.constructor.name, builtinConstructors) || !(data instanceof builtinConstructors[prototype.constructor.name]))) prototype = Object.getPrototypeOf(prototype); - const name = prototype?.constructor?.name; - if (name === void 0 || name === "Object") return void 0; - return name; -}; -const objectKindOrDomainOf = (data) => typeof data === "object" && data !== null ? objectKindOf(data) ?? "object" : domainOf(data); -const isArray = Array.isArray; -const ecmascriptDescriptions = { - Array: "an array", - Function: "a function", - Date: "a Date", - RegExp: "a RegExp", - Error: "an Error", - Map: "a Map", - Set: "a Set", - String: "a String object", - Number: "a Number object", - Boolean: "a Boolean object", - Promise: "a Promise", - WeakMap: "a WeakMap", - WeakSet: "a WeakSet" -}; -const platformDescriptions = { - ArrayBuffer: "an ArrayBuffer instance", - Blob: "a Blob instance", - File: "a File instance", - FormData: "a FormData instance", - Headers: "a Headers instance", - Request: "a Request instance", - Response: "a Response instance", - URL: "a URL instance" -}; -const typedArrayDescriptions = { - Int8Array: "an Int8Array", - Uint8Array: "a Uint8Array", - Uint8ClampedArray: "a Uint8ClampedArray", - Int16Array: "an Int16Array", - Uint16Array: "a Uint16Array", - Int32Array: "an Int32Array", - Uint32Array: "a Uint32Array", - Float32Array: "a Float32Array", - Float64Array: "a Float64Array", - BigInt64Array: "a BigInt64Array", - BigUint64Array: "a BigUint64Array" -}; -/** Each defaultObjectKind's completion for the phrase "must be _____" */ -const objectKindDescriptions = { - ...ecmascriptDescriptions, - ...platformDescriptions, - ...typedArrayDescriptions -}; -/** -* this will only return an object kind if it's the root constructor -* example TypeError would return null not 'Error' -**/ -const getBuiltinNameOfConstructor = (ctor) => { - const constructorName = Object(ctor).name ?? null; - return constructorName && isKeyOf(constructorName, builtinConstructors) && builtinConstructors[constructorName] === ctor ? constructorName : null; -}; -/** -* Returns an array of constructors for all ancestors (i.e., prototypes) of a given object. -*/ -const ancestorsOf = (o) => { - let proto = Object.getPrototypeOf(o); - const result = []; - while (proto !== null) { - result.push(proto.constructor); - proto = Object.getPrototypeOf(proto); - } - return result; -}; -const constructorExtends = (ctor, base) => { - let current = ctor.prototype; - while (current !== null) { - if (current === base.prototype) return true; - current = Object.getPrototypeOf(current); - } - return false; -}; - -//#endregion -//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/clone.js -/** Deeply copy the properties of the a non-subclassed Object, Array or Date.*/ -const deepClone = (input) => _clone(input, /* @__PURE__ */ new Map()); -const _clone = (input, seen) => { - if (typeof input !== "object" || input === null) return input; - if (seen?.has(input)) return seen.get(input); - const builtinConstructorName = getBuiltinNameOfConstructor(input.constructor); - if (builtinConstructorName === "Date") return new Date(input.getTime()); - if (builtinConstructorName && builtinConstructorName !== "Array") return input; - const cloned = Array.isArray(input) ? input.slice() : Object.create(Object.getPrototypeOf(input)); - const propertyDescriptors = Object.getOwnPropertyDescriptors(input); - if (seen) { - seen.set(input, cloned); - for (const k in propertyDescriptors) { - const desc = propertyDescriptors[k]; - if ("get" in desc || "set" in desc) continue; - desc.value = _clone(desc.value, seen); - } - } - Object.defineProperties(cloned, propertyDescriptors); - return cloned; -}; - -//#endregion -//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/functions.js -const cached = (thunk) => { - let result = unset; - return () => result === unset ? result = thunk() : result; -}; -const isThunk = (value$1) => typeof value$1 === "function" && value$1.length === 0; -const DynamicFunction = class extends Function { - constructor(...args$1) { - const params = args$1.slice(0, -1); - const body = args$1.at(-1); - try { - super(...params, body); - } catch (e) { - return throwInternalError(`Encountered an unexpected error while compiling your definition: - Message: ${e} - Source: (${args$1.slice(0, -1)}) => { - ${args$1.at(-1)} - }`); - } - } -}; -var Callable = class { - constructor(fn, ...[opts]) { - return Object.assign(Object.setPrototypeOf(fn.bind(opts?.bind ?? this), this.constructor.prototype), opts?.attach); - } -}; -/** -* Checks if the environment has Content Security Policy (CSP) enabled, -* preventing JIT-optimized code from being compiled via new Function(). -* -* @returns `true` if a function created using new Function() can be -* successfully invoked in the environment, `false` otherwise. -* -* The result is cached for subsequent invocations. -*/ -const envHasCsp = cached(() => { - try { - return new Function("return false")(); - } catch { - return true; - } -}); - -//#endregion -//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/generics.js -const brand = noSuggest("brand"); -/** primitive key used to represent an inferred type at compile-time */ -const inferred = noSuggest("arkInferred"); - -//#endregion -//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/hkt.js -const args = noSuggest("args"); -var Hkt = class { - constructor() {} -}; - -//#endregion -//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/isomorphic.js -/** get a CJS/ESM compatible string representing the current file */ -const fileName = () => { - try { - const error = /* @__PURE__ */ new Error(); - const stackLine = error.stack?.split("\n")[2]?.trim() || ""; - const filePath = stackLine.match(/\(?(.+?)(?::\d+:\d+)?\)?$/)?.[1] || "unknown"; - return filePath.replace(/^file:\/\//, ""); - } catch { - return "unknown"; - } -}; -const env = globalThis.process?.env ?? {}; -const isomorphic = { - fileName, - env -}; - -//#endregion -//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/strings.js -const capitalize$1 = (s) => s[0].toUpperCase() + s.slice(1); -const anchoredRegex = (regex$1) => new RegExp(anchoredSource(regex$1), typeof regex$1 === "string" ? "" : regex$1.flags); -const anchoredSource = (regex$1) => { - const source = typeof regex$1 === "string" ? regex$1 : regex$1.source; - return `^(?:${source})$`; -}; -const RegexPatterns = { - negativeLookahead: (pattern) => `(?!${pattern})`, - nonCapturingGroup: (pattern) => `(?:${pattern})` -}; -const escapeChar = "\\"; -const whitespaceChars = { - " ": 1, - "\n": 1, - " ": 1 -}; - -//#endregion -//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/numbers.js -const anchoredNegativeZeroPattern = /^-0\.?0*$/.source; -const positiveIntegerPattern = /[1-9]\d*/.source; -const looseDecimalPattern = /\.\d+/.source; -const strictDecimalPattern = /\.\d*[1-9]/.source; -const createNumberMatcher = (opts) => anchoredRegex(RegexPatterns.negativeLookahead(anchoredNegativeZeroPattern) + RegexPatterns.nonCapturingGroup("-?" + RegexPatterns.nonCapturingGroup(RegexPatterns.nonCapturingGroup("0|" + positiveIntegerPattern) + RegexPatterns.nonCapturingGroup(opts.decimalPattern) + "?") + (opts.allowDecimalOnly ? "|" + opts.decimalPattern : "") + "?")); -/** -* Matches a well-formatted numeric expression according to the following rules: -* 1. Must include an integer portion (i.e. '.321' must be written as '0.321') -* 2. The first digit of the value must not be 0, unless the entire integer portion is 0 -* 3. If the value includes a decimal, its last digit may not be 0 -* 4. The value may not be "-0" -*/ -const wellFormedNumberMatcher = createNumberMatcher({ - decimalPattern: strictDecimalPattern, - allowDecimalOnly: false -}); -const isWellFormedNumber = wellFormedNumberMatcher.test.bind(wellFormedNumberMatcher); -/** -* Similar to wellFormedNumber but more permissive in the following ways: -* -* - Allows numbers without an integer portion like ".5" (well-formed equivalent is "0.5") -* - Allows decimals with trailing zeroes like "0.10" (well-formed equivalent is "0.1") -*/ -const numericStringMatcher = createNumberMatcher({ - decimalPattern: looseDecimalPattern, - allowDecimalOnly: true -}); -const isNumericString = numericStringMatcher.test.bind(numericStringMatcher); -const numberLikeMatcher = /^-?\d*\.?\d*$/; -const isNumberLike = (s) => s.length !== 0 && numberLikeMatcher.test(s); -/** -* Matches a well-formatted integer according to the following rules: -* 1. must begin with an integer, the first digit of which cannot be 0 unless the entire value is 0 -* 2. The value may not be "-0" -*/ -const wellFormedIntegerMatcher = anchoredRegex(RegexPatterns.negativeLookahead("^-0$") + "-?" + RegexPatterns.nonCapturingGroup(RegexPatterns.nonCapturingGroup("0|" + positiveIntegerPattern))); -const isWellFormedInteger = wellFormedIntegerMatcher.test.bind(wellFormedIntegerMatcher); -const integerLikeMatcher = /^-?\d+$/; -const isIntegerLike = integerLikeMatcher.test.bind(integerLikeMatcher); -const numericLiteralDescriptions = { - number: "a number", - bigint: "a bigint", - integer: "an integer" -}; -const writeMalformedNumericLiteralMessage = (def, kind) => `'${def}' was parsed as ${numericLiteralDescriptions[kind]} but could not be narrowed to a literal value. Avoid unnecessary leading or trailing zeros and other abnormal notation`; -const isWellFormed = (def, kind) => kind === "number" ? isWellFormedNumber(def) : isWellFormedInteger(def); -const parseKind = (def, kind) => kind === "number" ? Number(def) : Number.parseInt(def); -const isKindLike = (def, kind) => kind === "number" ? isNumberLike(def) : isIntegerLike(def); -const tryParseNumber = (token, options) => parseNumeric(token, "number", options); -const tryParseWellFormedNumber = (token, options) => parseNumeric(token, "number", { - ...options, - strict: true -}); -const tryParseInteger = (token, options) => parseNumeric(token, "integer", options); -const parseNumeric = (token, kind, options) => { - const value$1 = parseKind(token, kind); - if (!Number.isNaN(value$1)) { - if (isKindLike(token, kind)) { - if (options?.strict) return isWellFormed(token, kind) ? value$1 : throwParseError(writeMalformedNumericLiteralMessage(token, kind)); - return value$1; - } - } - return options?.errorOnFail ? throwParseError(options?.errorOnFail === true ? `Failed to parse ${numericLiteralDescriptions[kind]} from '${token}'` : options?.errorOnFail) : void 0; -}; -const tryParseWellFormedBigint = (def) => { - if (def[def.length - 1] !== "n") return; - const maybeIntegerLiteral = def.slice(0, -1); - let value$1; - try { - value$1 = BigInt(maybeIntegerLiteral); - } catch { - return; - } - if (wellFormedIntegerMatcher.test(maybeIntegerLiteral)) return value$1; - if (integerLikeMatcher.test(maybeIntegerLiteral)) return throwParseError(writeMalformedNumericLiteralMessage(def, "bigint")); -}; - -//#endregion -//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/registry.js -const arkUtilVersion = "0.46.0"; -const initialRegistryContents = { - version: arkUtilVersion, - filename: isomorphic.fileName(), - FileConstructor -}; -const registry = initialRegistryContents; -const namesByResolution = /* @__PURE__ */ new Map(); -const nameCounts = Object.create(null); -const register = (value$1) => { - const existingName = namesByResolution.get(value$1); - if (existingName) return existingName; - let name = baseNameFor(value$1); - if (nameCounts[name]) name = `${name}${nameCounts[name]++}`; - else nameCounts[name] = 1; - registry[name] = value$1; - namesByResolution.set(value$1, name); - return name; -}; -const isDotAccessible = (keyName) => /^[$A-Z_a-z][\w$]*$/.test(keyName); -const baseNameFor = (value$1) => { - switch (typeof value$1) { - case "object": { - if (value$1 === null) break; - const prefix = objectKindOf(value$1) ?? "object"; - return prefix[0].toLowerCase() + prefix.slice(1); - } - case "function": return isDotAccessible(value$1.name) ? value$1.name : "fn"; - case "symbol": return value$1.description && isDotAccessible(value$1.description) ? value$1.description : "symbol"; - } - return throwInternalError(`Unexpected attempt to register serializable value of type ${domainOf(value$1)}`); -}; - -//#endregion -//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/primitive.js -const serializePrimitive = (value$1) => typeof value$1 === "string" ? JSON.stringify(value$1) : typeof value$1 === "bigint" ? `${value$1}n` : `${value$1}`; - -//#endregion -//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/serialize.js -const snapshot = (data, opts = {}) => _serialize(data, { - onUndefined: `$ark.undefined`, - onBigInt: (n) => `$ark.bigint-${n}`, - ...opts -}, []); -const printable = (data, opts) => { - switch (domainOf(data)) { - case "object": - const o = data; - const ctorName = o.constructor.name; - return ctorName === "Object" || ctorName === "Array" ? opts?.quoteKeys === false ? stringifyUnquoted(o, opts?.indent ?? 0, "") : JSON.stringify(_serialize(o, printableOpts, []), null, opts?.indent) : stringifyUnquoted(o, opts?.indent ?? 0, ""); - case "symbol": return printableOpts.onSymbol(data); - default: return serializePrimitive(data); - } -}; -const stringifyUnquoted = (value$1, indent$1, currentIndent) => { - if (typeof value$1 === "function") return printableOpts.onFunction(value$1); - if (typeof value$1 !== "object" || value$1 === null) return serializePrimitive(value$1); - const nextIndent = currentIndent + " ".repeat(indent$1); - if (Array.isArray(value$1)) { - if (value$1.length === 0) return "[]"; - const items = value$1.map((item) => stringifyUnquoted(item, indent$1, nextIndent)).join(",\n" + nextIndent); - return indent$1 ? `[\n${nextIndent}${items}\n${currentIndent}]` : `[${items}]`; - } - const ctorName = value$1.constructor.name; - if (ctorName === "Object") { - const keyValues = stringAndSymbolicEntriesOf(value$1).map(([key, val]) => { - const stringifiedKey = typeof key === "symbol" ? printableOpts.onSymbol(key) : isDotAccessible(key) ? key : JSON.stringify(key); - const stringifiedValue = stringifyUnquoted(val, indent$1, nextIndent); - return `${nextIndent}${stringifiedKey}: ${stringifiedValue}`; - }); - if (keyValues.length === 0) return "{}"; - return indent$1 ? `{\n${keyValues.join(",\n")}\n${currentIndent}}` : `{${keyValues.join(", ")}}`; - } - if (value$1 instanceof Date) return describeCollapsibleDate(value$1); - if ("expression" in value$1 && typeof value$1.expression === "string") return value$1.expression; - return ctorName; -}; -const printableOpts = { - onCycle: () => "(cycle)", - onSymbol: (v) => `Symbol(${register(v)})`, - onFunction: (v) => `Function(${register(v)})` -}; -const _serialize = (data, opts, seen) => { - switch (domainOf(data)) { - case "object": { - const o = data; - if ("toJSON" in o && typeof o.toJSON === "function") return o.toJSON(); - if (typeof o === "function") return printableOpts.onFunction(o); - if (seen.includes(o)) return "(cycle)"; - const nextSeen = [...seen, o]; - if (Array.isArray(o)) return o.map((item) => _serialize(item, opts, nextSeen)); - if (o instanceof Date) return o.toDateString(); - const result = {}; - for (const k in o) result[k] = _serialize(o[k], opts, nextSeen); - for (const s of Object.getOwnPropertySymbols(o)) result[opts.onSymbol?.(s) ?? s.toString()] = _serialize(o[s], opts, nextSeen); - return result; - } - case "symbol": return printableOpts.onSymbol(data); - case "bigint": return opts.onBigInt?.(data) ?? `${data}n`; - case "undefined": return opts.onUndefined ?? "undefined"; - case "string": return data.replaceAll("\\", "\\\\"); - default: return data; - } -}; -/** -* Converts a Date instance to a human-readable description relative to its precision -*/ -const describeCollapsibleDate = (date) => { - const year = date.getFullYear(); - const month = date.getMonth(); - const dayOfMonth = date.getDate(); - const hours = date.getHours(); - const minutes = date.getMinutes(); - const seconds = date.getSeconds(); - const milliseconds = date.getMilliseconds(); - if (month === 0 && dayOfMonth === 1 && hours === 0 && minutes === 0 && seconds === 0 && milliseconds === 0) return `${year}`; - const datePortion = `${months[month]} ${dayOfMonth}, ${year}`; - if (hours === 0 && minutes === 0 && seconds === 0 && milliseconds === 0) return datePortion; - let timePortion = date.toLocaleTimeString(); - const suffix$1 = timePortion.endsWith(" AM") || timePortion.endsWith(" PM") ? timePortion.slice(-3) : ""; - if (suffix$1) timePortion = timePortion.slice(0, -suffix$1.length); - if (milliseconds) timePortion += `.${pad(milliseconds, 3)}`; - else if (timeWithUnnecessarySeconds.test(timePortion)) timePortion = timePortion.slice(0, -3); - return `${timePortion + suffix$1}, ${datePortion}`; -}; -const months = [ - "January", - "February", - "March", - "April", - "May", - "June", - "July", - "August", - "September", - "October", - "November", - "December" -]; -const timeWithUnnecessarySeconds = /:\d\d:00$/; -const pad = (value$1, length) => String(value$1).padStart(length, "0"); - -//#endregion -//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/path.js -const appendStringifiedKey = (path$1, prop, ...[opts]) => { - const stringifySymbol = opts?.stringifySymbol ?? printable; - let propAccessChain = path$1; - switch (typeof prop) { - case "string": - propAccessChain = isDotAccessible(prop) ? path$1 === "" ? prop : `${path$1}.${prop}` : `${path$1}[${JSON.stringify(prop)}]`; - break; - case "number": - propAccessChain = `${path$1}[${prop}]`; - break; - case "symbol": - propAccessChain = `${path$1}[${stringifySymbol(prop)}]`; - break; - default: if (opts?.stringifyNonKey) propAccessChain = `${path$1}[${opts.stringifyNonKey(prop)}]`; - else throwParseError(`${printable(prop)} must be a PropertyKey or stringifyNonKey must be passed to options`); - } - return propAccessChain; -}; -const stringifyPath = (path$1, ...opts) => path$1.reduce((s, k) => appendStringifiedKey(s, k, ...opts), ""); -var ReadonlyPath = class extends ReadonlyArray { - cache = {}; - constructor(...items) { - super(); - this.push(...items); - } - toJSON() { - if (this.cache.json) return this.cache.json; - this.cache.json = []; - for (let i = 0; i < this.length; i++) this.cache.json.push(typeof this[i] === "symbol" ? printable(this[i]) : this[i]); - return this.cache.json; - } - stringify() { - if (this.cache.stringify) return this.cache.stringify; - return this.cache.stringify = stringifyPath(this); - } - stringifyAncestors() { - if (this.cache.stringifyAncestors) return this.cache.stringifyAncestors; - let propString = ""; - const result = [propString]; - for (const path$1 of this) { - propString = appendStringifiedKey(propString, path$1); - result.push(propString); - } - return this.cache.stringifyAncestors = result; - } -}; - -//#endregion -//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/scanner.js -var Scanner = class { - chars; - i; - def; - constructor(def) { - this.def = def; - this.chars = [...def]; - this.i = 0; - } - /** Get lookahead and advance scanner by one */ - shift() { - return this.chars[this.i++] ?? ""; - } - get lookahead() { - return this.chars[this.i] ?? ""; - } - get nextLookahead() { - return this.chars[this.i + 1] ?? ""; - } - get length() { - return this.chars.length; - } - shiftUntil(condition) { - let shifted = ""; - while (this.lookahead) { - if (condition(this, shifted)) if (shifted[shifted.length - 1] === escapeChar) shifted = shifted.slice(0, -1); - else break; - shifted += this.shift(); - } - return shifted; - } - shiftUntilLookahead(charOrSet) { - return typeof charOrSet === "string" ? this.shiftUntil((s) => s.lookahead === charOrSet) : this.shiftUntil((s) => s.lookahead in charOrSet); - } - shiftUntilNonWhitespace() { - return this.shiftUntil(() => !(this.lookahead in whitespaceChars)); - } - jumpToIndex(i) { - this.i = i < 0 ? this.length + i : i; - } - jumpForward(count) { - this.i += count; - } - get location() { - return this.i; - } - get unscanned() { - return this.chars.slice(this.i, this.length).join(""); - } - get scanned() { - return this.chars.slice(0, this.i).join(""); - } - sliceChars(start, end) { - return this.chars.slice(start, end).join(""); - } - lookaheadIs(char) { - return this.lookahead === char; - } - lookaheadIsIn(tokens) { - return this.lookahead in tokens; - } -}; - -//#endregion -//#region node_modules/.pnpm/@ark+util@0.46.0/node_modules/@ark/util/out/traits.js -const implementedTraits = noSuggest("implementedTraits"); -const hasTrait = (traitClass) => (o) => { - if (!hasDomain(o, "object")) return false; - if (implementedTraits in o.constructor && o.constructor[implementedTraits].includes(traitClass)) return true; - return ancestorsOf(o).includes(traitClass); -}; -/** @ts-ignore required to extend NoopBase */ -var Trait = class extends NoopBase { - static get [Symbol.hasInstance]() { - return hasTrait(this); - } - traitsOf() { - return implementedTraits in this.constructor ? this.constructor[implementedTraits] : []; - } -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/shared/registry.js -let _registryName = "$ark"; -let suffix = 2; -while (_registryName in globalThis) _registryName = `$ark${suffix++}`; -const registryName = _registryName; -globalThis[registryName] = registry; -const $ark = registry; -const reference = (name) => `${registryName}.${name}`; -const registeredReference = (value$1) => reference(register(value$1)); - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/shared/compile.js -var CompiledFunction = class extends CastableBase { - argNames; - body = ""; - constructor(...args$1) { - super(); - this.argNames = args$1; - for (const arg of args$1) { - if (arg in this) throw new Error(`Arg name '${arg}' would overwrite an existing property on FunctionBody`); - this[arg] = arg; - } - } - indentation = 0; - indent() { - this.indentation += 4; - return this; - } - dedent() { - this.indentation -= 4; - return this; - } - prop(key, optional = false) { - return compileLiteralPropAccess(key, optional); - } - index(key, optional = false) { - return indexPropAccess(`${key}`, optional); - } - line(statement) { - this.body += `${" ".repeat(this.indentation)}${statement}\n`; - return this; - } - const(identifier, expression) { - this.line(`const ${identifier} = ${expression}`); - return this; - } - let(identifier, expression) { - return this.line(`let ${identifier} = ${expression}`); - } - set(identifier, expression) { - return this.line(`${identifier} = ${expression}`); - } - if(condition, then) { - return this.block(`if (${condition})`, then); - } - elseIf(condition, then) { - return this.block(`else if (${condition})`, then); - } - else(then) { - return this.block("else", then); - } - /** Current index is "i" */ - for(until, body, initialValue = 0) { - return this.block(`for (let i = ${initialValue}; ${until}; i++)`, body); - } - /** Current key is "k" */ - forIn(object$1, body) { - return this.block(`for (const k in ${object$1})`, body); - } - block(prefix, contents, suffix$1 = "") { - this.line(`${prefix} {`); - this.indent(); - contents(this); - this.dedent(); - return this.line(`}${suffix$1}`); - } - return(expression = "") { - return this.line(`return ${expression}`); - } - write(name = "anonymous", indent$1 = 0) { - return `${name}(${this.argNames.join(", ")}) { ${indent$1 ? this.body.split("\n").map((l) => " ".repeat(indent$1) + `${l}`).join("\n") : this.body} }`; - } - compile() { - return new DynamicFunction(...this.argNames, this.body); - } -}; -const compileSerializedValue = (value$1) => hasDomain(value$1, "object") || typeof value$1 === "symbol" ? registeredReference(value$1) : serializePrimitive(value$1); -const compileLiteralPropAccess = (key, optional = false) => { - if (typeof key === "string" && isDotAccessible(key)) return `${optional ? "?" : ""}.${key}`; - return indexPropAccess(serializeLiteralKey(key), optional); -}; -const serializeLiteralKey = (key) => typeof key === "symbol" ? registeredReference(key) : JSON.stringify(key); -const indexPropAccess = (key, optional = false) => `${optional ? "?." : ""}[${key}]`; -var NodeCompiler = class extends CompiledFunction { - traversalKind; - optimistic; - constructor(ctx) { - super("data", "ctx"); - this.traversalKind = ctx.kind; - this.optimistic = ctx.optimistic === true; - } - invoke(node$1, opts) { - const arg = opts?.arg ?? this.data; - const requiresContext = typeof node$1 === "string" ? true : this.requiresContextFor(node$1); - const id = typeof node$1 === "string" ? node$1 : node$1.id; - if (requiresContext) return `${this.referenceToId(id, opts)}(${arg}, ${this.ctx})`; - return `${this.referenceToId(id, opts)}(${arg})`; - } - referenceToId(id, opts) { - const invokedKind = opts?.kind ?? this.traversalKind; - const base = `this.${id}${invokedKind}`; - return opts?.bind ? `${base}.bind(${opts?.bind})` : base; - } - requiresContextFor(node$1) { - return this.traversalKind === "Apply" || node$1.allowsRequiresContext; - } - initializeErrorCount() { - return this.const("errorCount", "ctx.currentErrorCount"); - } - returnIfFail() { - return this.if("ctx.currentErrorCount > errorCount", () => this.return()); - } - returnIfFailFast() { - return this.if("ctx.failFast && ctx.currentErrorCount > errorCount", () => this.return()); - } - traverseKey(keyExpression, accessExpression, node$1) { - const requiresContext = this.requiresContextFor(node$1); - if (requiresContext) this.line(`${this.ctx}.path.push(${keyExpression})`); - this.check(node$1, { arg: accessExpression }); - if (requiresContext) this.line(`${this.ctx}.path.pop()`); - return this; - } - check(node$1, opts) { - return this.traversalKind === "Allows" ? this.if(`!${this.invoke(node$1, opts)}`, () => this.return(false)) : this.line(this.invoke(node$1, opts)); - } -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/shared/utils.js -const makeRootAndArrayPropertiesMutable = (o) => flatMorph(o, (k, v) => [k, isArray(v) ? [...v] : v]); -const arkKind = noSuggest("arkKind"); -const hasArkKind = (value$1, kind) => value$1?.[arkKind] === kind; -const isNode = (value$1) => hasArkKind(value$1, "root") || hasArkKind(value$1, "constraint"); - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/shared/implement.js -const basisKinds = [ - "unit", - "proto", - "domain" -]; -const structuralKinds = [ - "required", - "optional", - "index", - "sequence" -]; -const refinementKinds = [ - "pattern", - "divisor", - "exactLength", - "max", - "min", - "maxLength", - "minLength", - "before", - "after" -]; -const constraintKinds = [ - ...refinementKinds, - ...structuralKinds, - "structure", - "predicate" -]; -const rootKinds = [ - "alias", - "union", - "morph", - "unit", - "intersection", - "proto", - "domain" -]; -const nodeKinds = [...rootKinds, ...constraintKinds]; -const constraintKeys = flatMorph(constraintKinds, (i, kind) => [kind, 1]); -const structureKeys = flatMorph([...structuralKinds, "undeclared"], (i, k) => [k, 1]); -const precedenceByKind = flatMorph(nodeKinds, (i, kind) => [kind, i]); -const isNodeKind = (value$1) => typeof value$1 === "string" && value$1 in precedenceByKind; -const precedenceOfKind = (kind) => precedenceByKind[kind]; -const schemaKindsRightOf = (kind) => rootKinds.slice(precedenceOfKind(kind) + 1); -const unionChildKinds = [...schemaKindsRightOf("union"), "alias"]; -const morphChildKinds = [...schemaKindsRightOf("morph"), "alias"]; -const defaultValueSerializer = (v) => { - if (typeof v === "string" || typeof v === "boolean" || v === null) return v; - if (typeof v === "number") { - if (Number.isNaN(v)) return "NaN"; - if (v === Number.POSITIVE_INFINITY) return "Infinity"; - if (v === Number.NEGATIVE_INFINITY) return "-Infinity"; - return v; - } - return compileSerializedValue(v); -}; -const compileObjectLiteral = (ctx) => { - let result = "{ "; - for (const [k, v] of Object.entries(ctx)) result += `${k}: ${compileSerializedValue(v)}, `; - return result + " }"; -}; -const implementNode = (_) => { - const implementation$22 = _; - if (implementation$22.hasAssociatedError) { - implementation$22.defaults.expected ??= (ctx) => "description" in ctx ? ctx.description : implementation$22.defaults.description(ctx); - implementation$22.defaults.actual ??= (data) => printable(data); - implementation$22.defaults.problem ??= (ctx) => `must be ${ctx.expected}${ctx.actual ? ` (was ${ctx.actual})` : ""}`; - implementation$22.defaults.message ??= (ctx) => { - if (ctx.path.length === 0) return ctx.problem; - const problemWithLocation = `${ctx.propString} ${ctx.problem}`; - if (problemWithLocation[0] === "[") return `value at ${problemWithLocation}`; - return problemWithLocation; - }; - } - return implementation$22; -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/shared/toJsonSchema.js -var ToJsonSchemaError = class extends Error { - name = "ToJsonSchemaError"; - code; - context; - constructor(code, context) { - super(printable(context, { - quoteKeys: false, - indent: 4 - })); - this.code = code; - this.context = context; - } - hasCode(code) { - return this.code === code; - } -}; -const defaultConfig = { - dialect: "https://json-schema.org/draft/2020-12/schema", - useRefs: false, - fallback: { - arrayObject: (ctx) => ToJsonSchema.throw("arrayObject", ctx), - arrayPostfix: (ctx) => ToJsonSchema.throw("arrayPostfix", ctx), - defaultValue: (ctx) => ToJsonSchema.throw("defaultValue", ctx), - domain: (ctx) => ToJsonSchema.throw("domain", ctx), - morph: (ctx) => ToJsonSchema.throw("morph", ctx), - patternIntersection: (ctx) => ToJsonSchema.throw("patternIntersection", ctx), - predicate: (ctx) => ToJsonSchema.throw("predicate", ctx), - proto: (ctx) => ToJsonSchema.throw("proto", ctx), - symbolKey: (ctx) => ToJsonSchema.throw("symbolKey", ctx), - unit: (ctx) => ToJsonSchema.throw("unit", ctx), - date: (ctx) => ToJsonSchema.throw("date", ctx) - } -}; -const ToJsonSchema = { - Error: ToJsonSchemaError, - throw: (...args$1) => { - throw new ToJsonSchema.Error(...args$1); - }, - throwInternalOperandError: (kind, schema$1) => throwInternalError(`Unexpected JSON Schema input for ${kind}: ${printable(schema$1)}`), - defaultConfig -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/config.js -$ark.config ??= {}; -const mergeConfigs = (base, merged) => { - if (!merged) return base; - const result = { ...base }; - let k; - for (k in merged) { - const keywords$1 = { ...base.keywords }; - if (k === "keywords") { - for (const flatAlias in merged[k]) { - const v = merged.keywords[flatAlias]; - if (v === void 0) continue; - keywords$1[flatAlias] = typeof v === "string" ? { description: v } : v; - } - result.keywords = keywords$1; - } else if (k === "toJsonSchema") result[k] = mergeToJsonSchemaConfigs(base.toJsonSchema, merged.toJsonSchema); - else if (isNodeKind(k)) result[k] = { - ...base[k], - ...merged[k] - }; - else result[k] = merged[k]; - } - return result; -}; -const mergeToJsonSchemaConfigs = (baseConfig, mergedConfig) => { - if (!baseConfig) return mergedConfig ?? {}; - if (!mergedConfig) return baseConfig; - const result = { ...baseConfig }; - let k; - for (k in mergedConfig) if (k === "fallback") result.fallback = mergeFallbacks(baseConfig.fallback, mergedConfig.fallback); - else result[k] = mergedConfig[k]; - return result; -}; -const mergeFallbacks = (base, merged) => { - base = normalizeFallback(base); - merged = normalizeFallback(merged); - const result = {}; - let code; - for (code in ToJsonSchema.defaultConfig.fallback) result[code] = merged[code] ?? merged.default ?? base[code] ?? base.default ?? ToJsonSchema.defaultConfig.fallback[code]; - return result; -}; -const normalizeFallback = (fallback) => typeof fallback === "function" ? { default: fallback } : fallback ?? {}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/shared/errors.js -var ArkError = class ArkError extends CastableBase { - [arkKind] = "error"; - path; - data; - nodeConfig; - input; - ctx; - constructor({ prefixPath, relativePath,...input }, ctx) { - super(); - this.input = input; - this.ctx = ctx; - defineProperties(this, input); - const data = ctx.data; - if (input.code === "union") input.errors = input.errors.flatMap((innerError) => { - const flat = innerError.hasCode("union") ? innerError.errors : [innerError]; - if (!prefixPath && !relativePath) return flat; - return flat.map((e) => e.transform((e$1) => ({ - ...e$1, - path: conflatenateAll(prefixPath, e$1.path, relativePath) - }))); - }); - this.nodeConfig = ctx.config[this.code]; - const basePath = [...input.path ?? ctx.path]; - if (relativePath) basePath.push(...relativePath); - if (prefixPath) basePath.unshift(...prefixPath); - this.path = new ReadonlyPath(...basePath); - this.data = "data" in input ? input.data : data; - } - transform(f) { - return new ArkError(f({ - data: this.data, - path: this.path, - ...this.input - }), this.ctx); - } - hasCode(code) { - return this.code === code; - } - get propString() { - return stringifyPath(this.path); - } - get expected() { - if (this.input.expected) return this.input.expected; - const config = this.meta?.expected ?? this.nodeConfig.expected; - return typeof config === "function" ? config(this.input) : config; - } - get actual() { - if (this.input.actual) return this.input.actual; - const config = this.meta?.actual ?? this.nodeConfig.actual; - return typeof config === "function" ? config(this.data) : config; - } - get problem() { - if (this.input.problem) return this.input.problem; - const config = this.meta?.problem ?? this.nodeConfig.problem; - return typeof config === "function" ? config(this) : config; - } - get message() { - if (this.input.message) return this.input.message; - const config = this.meta?.message ?? this.nodeConfig.message; - return typeof config === "function" ? config(this) : config; - } - get flat() { - return this.hasCode("intersection") ? [...this.errors] : [this]; - } - toJSON() { - return { - data: this.data, - path: this.path, - ...this.input, - expected: this.expected, - actual: this.actual, - problem: this.problem, - message: this.message - }; - } - toString() { - return this.message; - } - throw() { - throw this; - } -}; -/** -* A ReadonlyArray of `ArkError`s returned by a Type on invalid input. -* -* Subsequent errors added at an existing path are merged into an -* ArkError intersection. -*/ -var ArkErrors = class ArkErrors extends ReadonlyArray { - [arkKind] = "errors"; - ctx; - constructor(ctx) { - super(); - this.ctx = ctx; - } - /** - * Errors by a pathString representing their location. - */ - byPath = Object.create(null); - /** - * {@link byPath} flattened so that each value is an array of ArkError instances at that path. - * - * ✅ Since "intersection" errors will be flattened to their constituent `.errors`, - * they will never be directly present in this representation. - */ - get flatByPath() { - return flatMorph(this.byPath, (k, v) => [k, v.flat]); - } - /** - * {@link byPath} flattened so that each value is an array of problem strings at that path. - */ - get flatProblemsByPath() { - return flatMorph(this.byPath, (k, v) => [k, v.flat.map((e) => e.problem)]); - } - /** - * All pathStrings at which errors are present mapped to the errors occuring - * at that path or any nested path within it. - */ - byAncestorPath = Object.create(null); - count = 0; - mutable = this; - /** - * Throw a TraversalError based on these errors. - */ - throw() { - throw this.toTraversalError(); - } - /** - * Converts ArkErrors to TraversalError, a subclass of `Error` suitable for throwing with nice - * formatting. - */ - toTraversalError() { - return new TraversalError(this); - } - /** - * Append an ArkError to this array, ignoring duplicates. - */ - add(error) { - if (this.includes(error)) return; - this._add(error); - } - transform(f) { - const result = new ArkErrors(this.ctx); - for (const e of this) result.add(f(e)); - return result; - } - /** - * Add all errors from an ArkErrors instance, ignoring duplicates and - * prefixing their paths with that of the current Traversal. - */ - merge(errors) { - for (const e of errors) { - if (this.includes(e)) continue; - this._add(new ArkError({ - ...e, - path: [...this.ctx.path, ...e.path] - }, this.ctx)); - } - } - /** - * @internal - */ - affectsPath(path$1) { - if (this.length === 0) return false; - return path$1.stringifyAncestors().some((s) => s in this.byPath) || path$1.stringify() in this.byAncestorPath; - } - /** - * A human-readable summary of all errors. - */ - get summary() { - return this.toString(); - } - /** - * Alias of this ArkErrors instance for StandardSchema compatibility. - */ - get issues() { - return this; - } - toJSON() { - return [...this.map((e) => e.toJSON())]; - } - toString() { - return this.join("\n"); - } - _add(error) { - const existing = this.byPath[error.propString]; - if (existing) { - if (existing.hasCode("union") && existing.errors.length === 0) return; - const errorIntersection = error.hasCode("union") && error.errors.length === 0 ? error : new ArkError({ - code: "intersection", - errors: existing.hasCode("intersection") ? [...existing.errors, error] : [existing, error] - }, this.ctx); - const existingIndex = this.indexOf(existing); - this.mutable[existingIndex === -1 ? this.length : existingIndex] = errorIntersection; - this.byPath[error.propString] = errorIntersection; - this.addAncestorPaths(error); - } else { - this.byPath[error.propString] = error; - this.addAncestorPaths(error); - this.mutable.push(error); - } - this.count++; - } - addAncestorPaths(error) { - for (const propString of error.path.stringifyAncestors()) this.byAncestorPath[propString] = append(this.byAncestorPath[propString], error); - } -}; -var TraversalError = class extends Error { - name = "TraversalError"; - constructor(errors) { - if (errors.length === 1) super(errors.summary); - else super("\n" + errors.map((error) => ` • ${indent(error)}`).join("\n")); - Object.defineProperty(this, "arkErrors", { - value: errors, - enumerable: false - }); - } -}; -const indent = (error) => error.toString().split("\n").join("\n "); - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/shared/traversal.js -var Traversal = class { - /** - * #### the path being validated or morphed - * - * ✅ array indices represented as numbers - * ⚠️ mutated during traversal - use `path.slice(0)` to snapshot - * 🔗 use {@link propString} for a stringified version - */ - path = []; - /** - * #### {@link ArkErrors} that will be part of this traversal's finalized result - * - * ✅ will always be an empty array for a valid traversal - */ - errors = new ArkErrors(this); - /** - * #### the original value being traversed - */ - root; - /** - * #### configuration for this traversal - * - * ✅ options can affect traversal results and error messages - * ✅ defaults < global config < scope config - * ✅ does not include options configured on individual types - */ - config; - queuedMorphs = []; - branches = []; - seen = {}; - constructor(root, config) { - this.root = root; - this.config = config; - } - /** - * #### the data being validated or morphed - * - * ✅ extracted from {@link root} at {@link path} - */ - get data() { - let result = this.root; - for (const segment of this.path) result = result?.[segment]; - return result; - } - /** - * #### a string representing {@link path} - * - * @propString - */ - get propString() { - return stringifyPath(this.path); - } - /** - * #### add an {@link ArkError} and return `false` - * - * ✅ useful for predicates like `.narrow` - */ - reject(input) { - this.error(input); - return false; - } - /** - * #### add an {@link ArkError} from a description and return `false` - * - * ✅ useful for predicates like `.narrow` - * 🔗 equivalent to {@link reject}({ expected }) - */ - mustBe(expected) { - this.error(expected); - return false; - } - error(input) { - const errCtx = typeof input === "object" ? input.code ? input : { - ...input, - code: "predicate" - } : { - code: "predicate", - expected: input - }; - return this.errorFromContext(errCtx); - } - /** - * #### whether {@link currentBranch} (or the traversal root, outside a union) has one or more errors - */ - hasError() { - return this.currentErrorCount !== 0; - } - get currentBranch() { - return this.branches.at(-1); - } - queueMorphs(morphs) { - const input = { - path: new ReadonlyPath(...this.path), - morphs - }; - if (this.currentBranch) this.currentBranch.queuedMorphs.push(input); - else this.queuedMorphs.push(input); - } - finalize(onFail) { - if (this.queuedMorphs.length) { - if (typeof this.root === "object" && this.root !== null && this.config.clone) this.root = this.config.clone(this.root); - this.applyQueuedMorphs(); - } - if (this.hasError()) return onFail ? onFail(this.errors) : this.errors; - return this.root; - } - get currentErrorCount() { - return this.currentBranch ? this.currentBranch.error ? 1 : 0 : this.errors.count; - } - get failFast() { - return this.branches.length !== 0; - } - pushBranch() { - this.branches.push({ - error: void 0, - queuedMorphs: [] - }); - } - popBranch() { - return this.branches.pop(); - } - /** - * @internal - * Convenience for casting from InternalTraversal to Traversal - * for cases where the extra methods on the external type are expected, e.g. - * a morph or predicate. - */ - get external() { - return this; - } - errorFromNodeContext(input) { - return this.errorFromContext(input); - } - errorFromContext(errCtx) { - const error = new ArkError(errCtx, this); - if (this.currentBranch) this.currentBranch.error = error; - else this.errors.add(error); - return error; - } - applyQueuedMorphs() { - while (this.queuedMorphs.length) { - const queuedMorphs = this.queuedMorphs; - this.queuedMorphs = []; - for (const { path: path$1, morphs } of queuedMorphs) { - if (this.errors.affectsPath(path$1)) continue; - this.applyMorphsAtPath(path$1, morphs); - } - } - } - applyMorphsAtPath(path$1, morphs) { - const key = path$1.at(-1); - let parent; - if (key !== void 0) { - parent = this.root; - for (let pathIndex = 0; pathIndex < path$1.length - 1; pathIndex++) parent = parent[path$1[pathIndex]]; - } - this.path = [...path$1]; - for (const morph of morphs) { - const morphIsNode = isNode(morph); - const result = morph(parent === void 0 ? this.root : parent[key], this); - if (result instanceof ArkError) { - this.errors.add(result); - break; - } - if (result instanceof ArkErrors) { - if (!morphIsNode) this.errors.merge(result); - break; - } - if (parent === void 0) this.root = result; - else parent[key] = result; - this.applyQueuedMorphs(); - } - } -}; -const traverseKey = (key, fn, ctx) => { - if (!ctx) return fn(); - ctx.path.push(key); - const result = fn(); - ctx.path.pop(); - return result; -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/node.js -var BaseNode = class extends Callable { - attachments; - $; - onFail; - includesTransform; - includesContextualPredicate; - isCyclic; - allowsRequiresContext; - rootApplyStrategy; - contextFreeMorph; - rootApply; - referencesById; - shallowReferences; - flatRefs; - flatMorphs; - allows; - get shallowMorphs() { - return []; - } - constructor(attachments, $) { - super((data, pipedFromCtx, onFail = this.onFail) => { - if (pipedFromCtx) { - this.traverseApply(data, pipedFromCtx); - return pipedFromCtx.hasError() ? pipedFromCtx.errors : pipedFromCtx.data; - } - return this.rootApply(data, onFail); - }, { attach: attachments }); - this.attachments = attachments; - this.$ = $; - this.onFail = this.meta.onFail ?? this.$.resolvedConfig.onFail; - this.includesTransform = this.hasKind("morph") || this.hasKind("structure") && this.structuralMorph !== void 0; - this.includesContextualPredicate = this.hasKind("predicate") && this.inner.predicate.length !== 1; - this.isCyclic = this.kind === "alias"; - this.referencesById = { [this.id]: this }; - this.shallowReferences = this.hasKind("structure") ? [this, ...this.children] : this.children.reduce((acc, child) => appendUniqueNodes(acc, child.shallowReferences), [this]); - const isStructural = this.isStructural(); - this.flatRefs = []; - this.flatMorphs = []; - for (let i = 0; i < this.children.length; i++) { - this.includesTransform ||= this.children[i].includesTransform; - this.includesContextualPredicate ||= this.children[i].includesContextualPredicate; - this.isCyclic ||= this.children[i].isCyclic; - if (!isStructural) { - const childFlatRefs = this.children[i].flatRefs; - for (let j = 0; j < childFlatRefs.length; j++) { - const childRef = childFlatRefs[j]; - if (!this.flatRefs.some((existing) => flatRefsAreEqual(existing, childRef))) { - this.flatRefs.push(childRef); - for (const branch of childRef.node.branches) if (branch.hasKind("morph") || branch.hasKind("intersection") && branch.structure?.structuralMorph !== void 0) this.flatMorphs.push({ - path: childRef.path, - propString: childRef.propString, - node: branch - }); - } - } - } - Object.assign(this.referencesById, this.children[i].referencesById); - } - this.flatRefs.sort((l, r) => l.path.length > r.path.length ? 1 : l.path.length < r.path.length ? -1 : l.propString > r.propString ? 1 : l.propString < r.propString ? -1 : l.node.expression < r.node.expression ? -1 : 1); - this.allowsRequiresContext = this.includesContextualPredicate || this.isCyclic; - this.rootApplyStrategy = !this.allowsRequiresContext && this.flatMorphs.length === 0 ? this.shallowMorphs.length === 0 ? "allows" : this.shallowMorphs.every((morph) => morph.length === 1 || morph.name === "$arkStructuralMorph") ? this.hasKind("union") ? this.branches.some((branch) => branch.shallowMorphs.length > 1) ? "contextual" : "branchedOptimistic" : this.shallowMorphs.length > 1 ? "contextual" : "optimistic" : "contextual" : "contextual"; - this.rootApply = this.createRootApply(); - this.allows = this.allowsRequiresContext ? (data) => this.traverseAllows(data, new Traversal(data, this.$.resolvedConfig)) : (data) => this.traverseAllows(data); - } - createRootApply() { - switch (this.rootApplyStrategy) { - case "allows": return (data, onFail) => { - if (this.allows(data)) return data; - const ctx = new Traversal(data, this.$.resolvedConfig); - this.traverseApply(data, ctx); - return ctx.finalize(onFail); - }; - case "contextual": return (data, onFail) => { - const ctx = new Traversal(data, this.$.resolvedConfig); - this.traverseApply(data, ctx); - return ctx.finalize(onFail); - }; - case "optimistic": - this.contextFreeMorph = this.shallowMorphs[0]; - const clone = this.$.resolvedConfig.clone; - return (data, onFail) => { - if (this.allows(data)) return this.contextFreeMorph(clone && (typeof data === "object" && data !== null || typeof data === "function") ? clone(data) : data); - const ctx = new Traversal(data, this.$.resolvedConfig); - this.traverseApply(data, ctx); - return ctx.finalize(onFail); - }; - case "branchedOptimistic": return this.createBranchedOptimisticRootApply(); - default: - this.rootApplyStrategy; - return throwInternalError(`Unexpected rootApplyStrategy ${this.rootApplyStrategy}`); - } - } - compiledMeta = compileMeta(this.metaJson); - cacheGetter(name, value$1) { - Object.defineProperty(this, name, { value: value$1 }); - return value$1; - } - get description() { - return this.cacheGetter("description", this.meta?.description ?? this.$.resolvedConfig[this.kind].description(this)); - } - get references() { - return Object.values(this.referencesById); - } - precedence = precedenceOfKind(this.kind); - precompilation; - assert = (data, pipedFromCtx) => this(data, pipedFromCtx, (errors) => errors.throw()); - traverse(data, pipedFromCtx) { - return this(data, pipedFromCtx, null); - } - get in() { - return this.cacheGetter("in", this.getIo("in")); - } - get out() { - return this.cacheGetter("out", this.getIo("out")); - } - getIo(ioKind) { - if (!this.includesTransform) return this; - const ioInner = {}; - for (const [k, v] of this.innerEntries) { - const keySchemaImplementation = this.impl.keys[k]; - if (keySchemaImplementation.reduceIo) keySchemaImplementation.reduceIo(ioKind, ioInner, v); - else if (keySchemaImplementation.child) { - const childValue = v; - ioInner[k] = isArray(childValue) ? childValue.map((child) => child[ioKind]) : childValue[ioKind]; - } else ioInner[k] = v; - } - return this.$.node(this.kind, ioInner); - } - toJSON() { - return this.json; - } - toString() { - return `Type<${this.expression}>`; - } - equals(r) { - const rNode = isNode(r) ? r : this.$.parseDefinition(r); - return this.innerHash === rNode.innerHash; - } - ifEquals(r) { - return this.equals(r) ? this : void 0; - } - hasKind(kind) { - return this.kind === kind; - } - assertHasKind(kind) { - if (this.kind !== kind) throwError(`${this.kind} node was not of asserted kind ${kind}`); - return this; - } - hasKindIn(...kinds) { - return kinds.includes(this.kind); - } - assertHasKindIn(...kinds) { - if (!includes(kinds, this.kind)) throwError(`${this.kind} node was not one of asserted kinds ${kinds}`); - return this; - } - isBasis() { - return includes(basisKinds, this.kind); - } - isConstraint() { - return includes(constraintKinds, this.kind); - } - isStructural() { - return includes(structuralKinds, this.kind); - } - isRefinement() { - return includes(refinementKinds, this.kind); - } - isRoot() { - return includes(rootKinds, this.kind); - } - isUnknown() { - return this.hasKind("intersection") && this.children.length === 0; - } - isNever() { - return this.hasKind("union") && this.children.length === 0; - } - hasUnit(value$1) { - return this.hasKind("unit") && this.allows(value$1); - } - hasOpenIntersection() { - return this.impl.intersectionIsOpen; - } - get nestableExpression() { - return this.expression; - } - select(selector) { - const normalized = NodeSelector.normalize(selector); - return this._select(normalized); - } - _select(selector) { - let nodes = NodeSelector.applyBoundary[selector.boundary ?? "references"](this); - if (selector.kind) nodes = nodes.filter((n) => n.kind === selector.kind); - if (selector.where) nodes = nodes.filter(selector.where); - return NodeSelector.applyMethod[selector.method ?? "filter"](nodes, this, selector); - } - transform(mapper, opts) { - return this._transform(mapper, this._createTransformContext(opts)); - } - _createTransformContext(opts) { - return { - root: this, - selected: void 0, - seen: {}, - path: [], - parseOptions: { prereduced: opts?.prereduced ?? false }, - undeclaredKeyHandling: void 0, - ...opts - }; - } - _transform(mapper, ctx) { - const $ = ctx.bindScope ?? this.$; - if (ctx.seen[this.id]) return this.$.lazilyResolve(ctx.seen[this.id]); - if (ctx.shouldTransform?.(this, ctx) === false) return this; - let transformedNode; - ctx.seen[this.id] = () => transformedNode; - if (this.hasKind("structure") && this.undeclared !== ctx.undeclaredKeyHandling) ctx = { - ...ctx, - undeclaredKeyHandling: this.undeclared - }; - const innerWithTransformedChildren = flatMorph(this.inner, (k, v) => { - if (!this.impl.keys[k].child) return [k, v]; - const children = v; - if (!isArray(children)) { - const transformed$1 = children._transform(mapper, ctx); - return transformed$1 ? [k, transformed$1] : []; - } - if (children.length === 0) return [k, v]; - const transformed = children.flatMap((n) => { - const transformedChild = n._transform(mapper, ctx); - return transformedChild ?? []; - }); - return transformed.length ? [k, transformed] : []; - }); - delete ctx.seen[this.id]; - const innerWithMeta = Object.assign(innerWithTransformedChildren, { meta: this.meta }); - const transformedInner = ctx.selected && !ctx.selected.includes(this) ? innerWithMeta : mapper(this.kind, innerWithMeta, ctx); - if (transformedInner === null) return null; - if (isNode(transformedInner)) return transformedNode = transformedInner; - const transformedKeys = Object.keys(transformedInner); - const hasNoTypedKeys = transformedKeys.length === 0 || transformedKeys.length === 1 && transformedKeys[0] === "meta"; - if (hasNoTypedKeys && !isEmptyObject(this.inner)) return null; - if ((this.kind === "required" || this.kind === "optional" || this.kind === "index") && !("value" in transformedInner)) return ctx.undeclaredKeyHandling ? { - ...transformedInner, - value: $ark.intrinsic.unknown - } : null; - if (this.kind === "morph") transformedInner.in ??= $ark.intrinsic.unknown; - return transformedNode = $.node(this.kind, transformedInner, ctx.parseOptions); - } - configureReferences(meta, selector = "references") { - const normalized = NodeSelector.normalize(selector); - const mapper = typeof meta === "string" ? (kind, inner) => ({ - ...inner, - meta: { - ...inner.meta, - description: meta - } - }) : typeof meta === "function" ? (kind, inner) => ({ - ...inner, - meta: meta(inner.meta) - }) : (kind, inner) => ({ - ...inner, - meta: { - ...inner.meta, - ...meta - } - }); - if (normalized.boundary === "self") return this.$.node(this.kind, mapper(this.kind, { - ...this.inner, - meta: this.meta - })); - const rawSelected = this._select(normalized); - const selected = rawSelected && liftArray(rawSelected); - const shouldTransform = normalized.boundary === "child" ? (node$1, ctx) => ctx.root.children.includes(node$1) : normalized.boundary === "shallow" ? (node$1) => node$1.kind !== "structure" : () => true; - return this.$.finalize(this.transform(mapper, { - shouldTransform, - selected - })); - } -}; -const NodeSelector = { - applyBoundary: { - self: (node$1) => [node$1], - child: (node$1) => [...node$1.children], - shallow: (node$1) => [...node$1.shallowReferences], - references: (node$1) => [...node$1.references] - }, - applyMethod: { - filter: (nodes) => nodes, - assertFilter: (nodes, from, selector) => { - if (nodes.length === 0) throwError(writeSelectAssertionMessage(from, selector)); - return nodes; - }, - find: (nodes) => nodes[0], - assertFind: (nodes, from, selector) => { - if (nodes.length === 0) throwError(writeSelectAssertionMessage(from, selector)); - return nodes[0]; - } - }, - normalize: (selector) => typeof selector === "function" ? { - boundary: "references", - method: "filter", - where: selector - } : typeof selector === "string" ? isKeyOf(selector, NodeSelector.applyBoundary) ? { - method: "filter", - boundary: selector - } : { - boundary: "references", - method: "filter", - kind: selector - } : { - boundary: "references", - method: "filter", - ...selector - } -}; -const writeSelectAssertionMessage = (from, selector) => `${from} had no references matching ${printable(selector)}.`; -const typePathToPropString = (path$1) => stringifyPath(path$1, { stringifyNonKey: (node$1) => node$1.expression }); -const referenceMatcher = /"(\$ark\.[^"]+)"/g; -const compileMeta = (metaJson) => JSON.stringify(metaJson).replaceAll(referenceMatcher, "$1"); -const flatRef = (path$1, node$1) => ({ - path: path$1, - node: node$1, - propString: typePathToPropString(path$1) -}); -const flatRefsAreEqual = (l, r) => l.propString === r.propString && l.node.equals(r.node); -const appendUniqueFlatRefs = (existing, refs) => appendUnique(existing, refs, { isEqual: flatRefsAreEqual }); -const appendUniqueNodes = (existing, refs) => appendUnique(existing, refs, { isEqual: (l, r) => l.equals(r) }); - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/shared/disjoint.js -var Disjoint = class Disjoint extends Array { - static init(kind, l, r, ctx) { - return new Disjoint({ - kind, - l, - r, - path: ctx?.path ?? [], - optional: ctx?.optional ?? false - }); - } - add(kind, l, r, ctx) { - this.push({ - kind, - l, - r, - path: ctx?.path ?? [], - optional: ctx?.optional ?? false - }); - return this; - } - get summary() { - return this.describeReasons(); - } - describeReasons() { - if (this.length === 1) { - const { path: path$1, l, r } = this[0]; - const pathString = stringifyPath(path$1); - return writeUnsatisfiableExpressionError(`Intersection${pathString && ` at ${pathString}`} of ${describeReasons(l, r)}`); - } - return `The following intersections result in unsatisfiable types:\n• ${this.map(({ path: path$1, l, r }) => `${path$1}: ${describeReasons(l, r)}`).join("\n• ")}`; - } - throw() { - return throwParseError(this.describeReasons()); - } - invert() { - const result = this.map((entry) => ({ - ...entry, - l: entry.r, - r: entry.l - })); - if (!(result instanceof Disjoint)) return new Disjoint(...result); - return result; - } - withPrefixKey(key, kind) { - return this.map((entry) => ({ - ...entry, - path: [key, ...entry.path], - optional: entry.optional || kind === "optional" - })); - } - toNeverIfDisjoint() { - return $ark.intrinsic.never; - } -}; -const describeReasons = (l, r) => `${describeReason(l)} and ${describeReason(r)}`; -const describeReason = (value$1) => isNode(value$1) ? value$1.expression : isArray(value$1) ? value$1.map(describeReason).join(" | ") || "never" : String(value$1); -const writeUnsatisfiableExpressionError = (expression) => `${expression} results in an unsatisfiable type`; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/shared/intersections.js -const intersectionCache = {}; -const intersectNodesRoot = (l, r, $) => intersectOrPipeNodes(l, r, { - $, - invert: false, - pipe: false -}); -const pipeNodesRoot = (l, r, $) => intersectOrPipeNodes(l, r, { - $, - invert: false, - pipe: true -}); -const intersectOrPipeNodes = (l, r, ctx) => { - const operator = ctx.pipe ? "|>" : "&"; - const lrCacheKey = `${l.hash}${operator}${r.hash}`; - if (intersectionCache[lrCacheKey] !== void 0) return intersectionCache[lrCacheKey]; - if (!ctx.pipe) { - const rlCacheKey = `${r.hash}${operator}${l.hash}`; - if (intersectionCache[rlCacheKey] !== void 0) { - const rlResult = intersectionCache[rlCacheKey]; - const lrResult = rlResult instanceof Disjoint ? rlResult.invert() : rlResult; - intersectionCache[lrCacheKey] = lrResult; - return lrResult; - } - } - const isPureIntersection = !ctx.pipe || !l.includesTransform && !r.includesTransform; - if (isPureIntersection && l.equals(r)) return l; - let result = isPureIntersection ? _intersectNodes(l, r, ctx) : l.hasKindIn(...rootKinds) ? _pipeNodes(l, r, ctx) : _intersectNodes(l, r, ctx); - if (isNode(result)) { - if (l.equals(result)) result = l; - else if (r.equals(result)) result = r; - } - intersectionCache[lrCacheKey] = result; - return result; -}; -const _intersectNodes = (l, r, ctx) => { - const leftmostKind = l.precedence < r.precedence ? l.kind : r.kind; - const implementation$22 = l.impl.intersections[r.kind] ?? r.impl.intersections[l.kind]; - if (implementation$22 === void 0) return null; - else if (leftmostKind === l.kind) return implementation$22(l, r, ctx); - else { - let result = implementation$22(r, l, { - ...ctx, - invert: !ctx.invert - }); - if (result instanceof Disjoint) result = result.invert(); - return result; - } -}; -const _pipeNodes = (l, r, ctx) => l.includesTransform || r.includesTransform ? ctx.invert ? pipeMorphed(r, l, ctx) : pipeMorphed(l, r, ctx) : _intersectNodes(l, r, ctx); -const pipeMorphed = (from, to, ctx) => from.distribute((fromBranch) => _pipeMorphed(fromBranch, to, ctx), (results) => { - const viableBranches = results.filter(isNode); - if (viableBranches.length === 0) return Disjoint.init("union", from.branches, to.branches); - if (viableBranches.length < from.branches.length || !from.branches.every((branch, i) => branch.in.equals(viableBranches[i].in))) return ctx.$.parseSchema(viableBranches); - let meta; - if (viableBranches.length === 1) { - const onlyBranch = viableBranches[0]; - if (!meta) return onlyBranch; - return ctx.$.node("morph", { - ...onlyBranch.inner, - in: onlyBranch.in.configure(meta, "self") - }); - } - const schema$1 = { branches: viableBranches }; - if (meta) schema$1.meta = meta; - return ctx.$.parseSchema(schema$1); -}); -const _pipeMorphed = (from, to, ctx) => { - const fromIsMorph = from.hasKind("morph"); - if (fromIsMorph) { - const morphs = [...from.morphs]; - if (from.lastMorphIfNode) { - const outIntersection = intersectOrPipeNodes(from.lastMorphIfNode, to, ctx); - if (outIntersection instanceof Disjoint) return outIntersection; - morphs[morphs.length - 1] = outIntersection; - } else morphs.push(to); - return ctx.$.node("morph", { - morphs, - in: from.inner.in - }); - } - if (to.hasKind("morph")) { - const inTersection = intersectOrPipeNodes(from, to.in, ctx); - if (inTersection instanceof Disjoint) return inTersection; - return ctx.$.node("morph", { - morphs: [to], - in: inTersection - }); - } - return ctx.$.node("morph", { - morphs: [to], - in: from - }); -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/constraint.js -var BaseConstraint = class extends BaseNode { - constructor(attachments, $) { - super(attachments, $); - Object.defineProperty(this, arkKind, { - value: "constraint", - enumerable: false - }); - } - impliedSiblings; - intersect(r) { - return intersectNodesRoot(this, r, this.$); - } -}; -var InternalPrimitiveConstraint = class extends BaseConstraint { - traverseApply = (data, ctx) => { - if (!this.traverseAllows(data, ctx)) ctx.errorFromNodeContext(this.errorContext); - }; - compile(js) { - if (js.traversalKind === "Allows") js.return(this.compiledCondition); - else js.if(this.compiledNegation, () => js.line(`${js.ctx}.errorFromNodeContext(${this.compiledErrorContext})`)); - } - get errorContext() { - return { - code: this.kind, - description: this.description, - meta: this.meta, - ...this.inner - }; - } - get compiledErrorContext() { - return compileObjectLiteral(this.errorContext); - } -}; -const constraintKeyParser = (kind) => (schema$1, ctx) => { - if (isArray(schema$1)) { - if (schema$1.length === 0) return; - const nodes = schema$1.map((schema$2) => ctx.$.node(kind, schema$2)); - if (kind === "predicate") return nodes; - return nodes.sort((l, r) => l.hash < r.hash ? -1 : 1); - } - const child = ctx.$.node(kind, schema$1); - return child.hasOpenIntersection() ? [child] : child; -}; -const intersectConstraints = (s) => { - const head = s.r.shift(); - if (!head) { - let result = s.l.length === 0 && s.kind === "structure" ? $ark.intrinsic.unknown.internal : s.ctx.$.node(s.kind, Object.assign(s.baseInner, unflattenConstraints(s.l)), { prereduced: true }); - for (const root of s.roots) { - if (result instanceof Disjoint) return result; - result = intersectOrPipeNodes(root, result, s.ctx); - } - return result; - } - let matched = false; - for (let i = 0; i < s.l.length; i++) { - const result = intersectOrPipeNodes(s.l[i], head, s.ctx); - if (result === null) continue; - if (result instanceof Disjoint) return result; - if (!matched) { - if (result.isRoot()) { - s.roots.push(result); - s.l.splice(i); - return intersectConstraints(s); - } - s.l[i] = result; - matched = true; - } else if (!s.l.includes(result)) return throwInternalError(`Unexpectedly encountered multiple distinct intersection results for refinement ${result}`); - } - if (!matched) s.l.push(head); - if (s.kind === "intersection") { - if (head.impliedSiblings) for (const node$1 of head.impliedSiblings) appendUnique(s.r, node$1); - } - return intersectConstraints(s); -}; -const flattenConstraints = (inner) => { - const result = Object.entries(inner).flatMap(([k, v]) => k in constraintKeys ? v : []).sort((l, r) => l.precedence < r.precedence ? -1 : l.precedence > r.precedence ? 1 : l.kind === "predicate" && r.kind === "predicate" ? 0 : l.hash < r.hash ? -1 : 1); - return result; -}; -const unflattenConstraints = (constraints) => { - const inner = {}; - for (const constraint of constraints) if (constraint.hasOpenIntersection()) inner[constraint.kind] = append(inner[constraint.kind], constraint); - else { - if (inner[constraint.kind]) return throwInternalError(`Unexpected intersection of closed refinements of kind ${constraint.kind}`); - inner[constraint.kind] = constraint; - } - return inner; -}; -const throwInvalidOperandError = (...args$1) => throwParseError(writeInvalidOperandMessage(...args$1)); -const writeInvalidOperandMessage = (kind, expected, actual) => { - const actualDescription = actual.hasKind("morph") ? "a morph" : actual.isUnknown() ? "unknown" : actual.exclude(expected).defaultShortDescription; - return `${capitalize$1(kind)} operand must be ${expected.description} (was ${actualDescription})`; -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/generic.js -const parseGeneric = (paramDefs, bodyDef, $) => new GenericRoot(paramDefs, bodyDef, $, $, null); -var LazyGenericBody = class extends Callable {}; -var GenericRoot = class extends Callable { - [arkKind] = "generic"; - paramDefs; - bodyDef; - $; - arg$; - baseInstantiation; - hkt; - description; - constructor(paramDefs, bodyDef, $, arg$, hkt) { - super((...args$1) => { - const argNodes = flatMorph(this.names, (i, name) => { - const arg = this.arg$.parse(args$1[i]); - if (!arg.extends(this.constraints[i])) throwParseError(writeUnsatisfiedParameterConstraintMessage(name, this.constraints[i].expression, arg.expression)); - return [name, arg]; - }); - if (this.defIsLazy()) { - const def = this.bodyDef(argNodes); - return this.$.parse(def); - } - return this.$.parse(bodyDef, { args: argNodes }); - }); - this.paramDefs = paramDefs; - this.bodyDef = bodyDef; - this.$ = $; - this.arg$ = arg$; - this.hkt = hkt; - this.description = hkt ? new hkt().description ?? `a generic type for ${hkt.constructor.name}` : "a generic type"; - this.baseInstantiation = this(...this.constraints); - } - defIsLazy() { - return this.bodyDef instanceof LazyGenericBody; - } - cacheGetter(name, value$1) { - Object.defineProperty(this, name, { value: value$1 }); - return value$1; - } - get json() { - return this.cacheGetter("json", { - params: this.params.map((param) => param[1].isUnknown() ? param[0] : [param[0], param[1].json]), - body: snapshot(this.bodyDef) - }); - } - get params() { - return this.cacheGetter("params", this.paramDefs.map((param) => typeof param === "string" ? [param, $ark.intrinsic.unknown] : [param[0], this.$.parse(param[1])])); - } - get names() { - return this.cacheGetter("names", this.params.map((e) => e[0])); - } - get constraints() { - return this.cacheGetter("constraints", this.params.map((e) => e[1])); - } - get internal() { - return this; - } - get referencesById() { - return this.baseInstantiation.internal.referencesById; - } - get references() { - return this.baseInstantiation.internal.references; - } -}; -const writeUnsatisfiedParameterConstraintMessage = (name, constraint, arg) => `${name} must be assignable to ${constraint} (was ${arg})`; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/predicate.js -const implementation$21 = implementNode({ - kind: "predicate", - hasAssociatedError: true, - collapsibleKey: "predicate", - keys: { predicate: {} }, - normalize: (schema$1) => typeof schema$1 === "function" ? { predicate: schema$1 } : schema$1, - defaults: { description: (node$1) => `valid according to ${node$1.predicate.name || "an anonymous predicate"}` }, - intersectionIsOpen: true, - intersections: { predicate: () => null } -}); -var PredicateNode = class extends BaseConstraint { - serializedPredicate = registeredReference(this.predicate); - compiledCondition = `${this.serializedPredicate}(data, ctx)`; - compiledNegation = `!${this.compiledCondition}`; - impliedBasis = null; - expression = this.serializedPredicate; - traverseAllows = this.predicate; - errorContext = { - code: "predicate", - description: this.description, - meta: this.meta - }; - compiledErrorContext = compileObjectLiteral(this.errorContext); - traverseApply = (data, ctx) => { - if (!this.predicate(data, ctx.external) && !ctx.hasError()) ctx.errorFromNodeContext(this.errorContext); - }; - compile(js) { - if (js.traversalKind === "Allows") { - js.return(this.compiledCondition); - return; - } - js.if(`${this.compiledNegation} && !ctx.hasError()`, () => js.line(`ctx.errorFromNodeContext(${this.compiledErrorContext})`)); - } - reduceJsonSchema(base, ctx) { - return ctx.fallback.predicate({ - code: "predicate", - base, - predicate: this.predicate - }); - } -}; -const Predicate = { - implementation: implementation$21, - Node: PredicateNode -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/divisor.js -const implementation$20 = implementNode({ - kind: "divisor", - collapsibleKey: "rule", - keys: { rule: { parse: (divisor) => Number.isInteger(divisor) ? divisor : throwParseError(writeNonIntegerDivisorMessage(divisor)) } }, - normalize: (schema$1) => typeof schema$1 === "number" ? { rule: schema$1 } : schema$1, - hasAssociatedError: true, - defaults: { description: (node$1) => node$1.rule === 1 ? "an integer" : node$1.rule === 2 ? "even" : `a multiple of ${node$1.rule}` }, - intersections: { divisor: (l, r, ctx) => ctx.$.node("divisor", { rule: Math.abs(l.rule * r.rule / greatestCommonDivisor(l.rule, r.rule)) }) }, - obviatesBasisDescription: true -}); -var DivisorNode = class extends InternalPrimitiveConstraint { - traverseAllows = (data) => data % this.rule === 0; - compiledCondition = `data % ${this.rule} === 0`; - compiledNegation = `data % ${this.rule} !== 0`; - impliedBasis = $ark.intrinsic.number.internal; - expression = `% ${this.rule}`; - reduceJsonSchema(schema$1) { - schema$1.type = "integer"; - if (this.rule === 1) return schema$1; - schema$1.multipleOf = this.rule; - return schema$1; - } -}; -const Divisor = { - implementation: implementation$20, - Node: DivisorNode -}; -const writeNonIntegerDivisorMessage = (divisor) => `divisor must be an integer (was ${divisor})`; -const greatestCommonDivisor = (l, r) => { - let previous; - let greatestCommonDivisor$1 = l; - let current = r; - while (current !== 0) { - previous = current; - current = greatestCommonDivisor$1 % current; - greatestCommonDivisor$1 = previous; - } - return greatestCommonDivisor$1; -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/range.js -var BaseRange = class extends InternalPrimitiveConstraint { - boundOperandKind = operandKindsByBoundKind[this.kind]; - compiledActual = this.boundOperandKind === "value" ? `data` : this.boundOperandKind === "length" ? `data.length` : `data.valueOf()`; - comparator = compileComparator(this.kind, this.exclusive); - numericLimit = this.rule.valueOf(); - expression = `${this.comparator} ${this.rule}`; - compiledCondition = `${this.compiledActual} ${this.comparator} ${this.numericLimit}`; - compiledNegation = `${this.compiledActual} ${negatedComparators[this.comparator]} ${this.numericLimit}`; - stringLimit = this.boundOperandKind === "date" ? dateLimitToString(this.numericLimit) : `${this.numericLimit}`; - limitKind = this.comparator["0"] === "<" ? "upper" : "lower"; - isStricterThan(r) { - const thisLimitIsStricter = this.limitKind === "upper" ? this.numericLimit < r.numericLimit : this.numericLimit > r.numericLimit; - return thisLimitIsStricter || this.numericLimit === r.numericLimit && this.exclusive === true && !r.exclusive; - } - overlapsRange(r) { - if (this.isStricterThan(r)) return false; - if (this.numericLimit === r.numericLimit && (this.exclusive || r.exclusive)) return false; - return true; - } - overlapIsUnit(r) { - return this.numericLimit === r.numericLimit && !this.exclusive && !r.exclusive; - } -}; -const negatedComparators = { - "<": ">=", - "<=": ">", - ">": "<=", - ">=": "<" -}; -const boundKindPairsByLower = { - min: "max", - minLength: "maxLength", - after: "before" -}; -const parseExclusiveKey = { parse: (flag) => flag || void 0 }; -const createLengthSchemaNormalizer = (kind) => (schema$1) => { - if (typeof schema$1 === "number") return { rule: schema$1 }; - const { exclusive,...normalized } = schema$1; - return exclusive ? { - ...normalized, - rule: kind === "minLength" ? normalized.rule + 1 : normalized.rule - 1 - } : normalized; -}; -const createDateSchemaNormalizer = (kind) => (schema$1) => { - if (typeof schema$1 === "number" || typeof schema$1 === "string" || schema$1 instanceof Date) return { rule: schema$1 }; - const { exclusive,...normalized } = schema$1; - if (!exclusive) return normalized; - const numericLimit = typeof normalized.rule === "number" ? normalized.rule : typeof normalized.rule === "string" ? new Date(normalized.rule).valueOf() : normalized.rule.valueOf(); - return exclusive ? { - ...normalized, - rule: kind === "after" ? numericLimit + 1 : numericLimit - 1 - } : normalized; -}; -const parseDateLimit = (limit) => typeof limit === "string" || typeof limit === "number" ? new Date(limit) : limit; -const writeInvalidLengthBoundMessage = (kind, limit) => `${kind} bound must be a positive integer (was ${limit})`; -const createLengthRuleParser = (kind) => (limit) => { - if (!Number.isInteger(limit) || limit < 0) throwParseError(writeInvalidLengthBoundMessage(kind, limit)); - return limit; -}; -const operandKindsByBoundKind = { - min: "value", - max: "value", - minLength: "length", - maxLength: "length", - after: "date", - before: "date" -}; -const compileComparator = (kind, exclusive) => `${isKeyOf(kind, boundKindPairsByLower) ? ">" : "<"}${exclusive ? "" : "="}`; -const dateLimitToString = (limit) => typeof limit === "string" ? limit : new Date(limit).toLocaleString(); -const writeUnboundableMessage = (root) => `Bounded expression ${root} must be exactly one of number, string, Array, or Date`; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/after.js -const implementation$19 = implementNode({ - kind: "after", - collapsibleKey: "rule", - hasAssociatedError: true, - keys: { rule: { - parse: parseDateLimit, - serialize: (schema$1) => schema$1.toISOString() - } }, - normalize: createDateSchemaNormalizer("after"), - defaults: { - description: (node$1) => `${node$1.collapsibleLimitString} or later`, - actual: describeCollapsibleDate - }, - intersections: { after: (l, r) => l.isStricterThan(r) ? l : r } -}); -var AfterNode = class extends BaseRange { - impliedBasis = $ark.intrinsic.Date.internal; - collapsibleLimitString = describeCollapsibleDate(this.rule); - traverseAllows = (data) => data >= this.rule; - reduceJsonSchema(base, ctx) { - return ctx.fallback.date({ - code: "date", - base, - after: this.rule - }); - } -}; -const After = { - implementation: implementation$19, - Node: AfterNode -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/before.js -const implementation$18 = implementNode({ - kind: "before", - collapsibleKey: "rule", - hasAssociatedError: true, - keys: { rule: { - parse: parseDateLimit, - serialize: (schema$1) => schema$1.toISOString() - } }, - normalize: createDateSchemaNormalizer("before"), - defaults: { - description: (node$1) => `${node$1.collapsibleLimitString} or earlier`, - actual: describeCollapsibleDate - }, - intersections: { - before: (l, r) => l.isStricterThan(r) ? l : r, - after: (before, after, ctx) => before.overlapsRange(after) ? before.overlapIsUnit(after) ? ctx.$.node("unit", { unit: before.rule }) : null : Disjoint.init("range", before, after) - } -}); -var BeforeNode = class extends BaseRange { - collapsibleLimitString = describeCollapsibleDate(this.rule); - traverseAllows = (data) => data <= this.rule; - impliedBasis = $ark.intrinsic.Date.internal; - reduceJsonSchema(base, ctx) { - return ctx.fallback.date({ - code: "date", - base, - before: this.rule - }); - } -}; -const Before = { - implementation: implementation$18, - Node: BeforeNode -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/exactLength.js -const implementation$17 = implementNode({ - kind: "exactLength", - collapsibleKey: "rule", - keys: { rule: { parse: createLengthRuleParser("exactLength") } }, - normalize: (schema$1) => typeof schema$1 === "number" ? { rule: schema$1 } : schema$1, - hasAssociatedError: true, - defaults: { - description: (node$1) => `exactly length ${node$1.rule}`, - actual: (data) => `${data.length}` - }, - intersections: { - exactLength: (l, r, ctx) => Disjoint.init("unit", ctx.$.node("unit", { unit: l.rule }), ctx.$.node("unit", { unit: r.rule }), { path: ["length"] }), - minLength: (exactLength, minLength) => exactLength.rule >= minLength.rule ? exactLength : Disjoint.init("range", exactLength, minLength), - maxLength: (exactLength, maxLength) => exactLength.rule <= maxLength.rule ? exactLength : Disjoint.init("range", exactLength, maxLength) - } -}); -var ExactLengthNode = class extends InternalPrimitiveConstraint { - traverseAllows = (data) => data.length === this.rule; - compiledCondition = `data.length === ${this.rule}`; - compiledNegation = `data.length !== ${this.rule}`; - impliedBasis = $ark.intrinsic.lengthBoundable.internal; - expression = `== ${this.rule}`; - reduceJsonSchema(schema$1) { - switch (schema$1.type) { - case "string": - schema$1.minLength = this.rule; - schema$1.maxLength = this.rule; - return schema$1; - case "array": - schema$1.minItems = this.rule; - schema$1.maxItems = this.rule; - return schema$1; - default: return ToJsonSchema.throwInternalOperandError("exactLength", schema$1); - } - } -}; -const ExactLength = { - implementation: implementation$17, - Node: ExactLengthNode -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/max.js -const implementation$16 = implementNode({ - kind: "max", - collapsibleKey: "rule", - hasAssociatedError: true, - keys: { - rule: {}, - exclusive: parseExclusiveKey - }, - normalize: (schema$1) => typeof schema$1 === "number" ? { rule: schema$1 } : schema$1, - defaults: { description: (node$1) => { - if (node$1.rule === 0) return node$1.exclusive ? "negative" : "non-positive"; - return `${node$1.exclusive ? "less than" : "at most"} ${node$1.rule}`; - } }, - intersections: { - max: (l, r) => l.isStricterThan(r) ? l : r, - min: (max, min, ctx) => max.overlapsRange(min) ? max.overlapIsUnit(min) ? ctx.$.node("unit", { unit: max.rule }) : null : Disjoint.init("range", max, min) - }, - obviatesBasisDescription: true -}); -var MaxNode = class extends BaseRange { - impliedBasis = $ark.intrinsic.number.internal; - traverseAllows = this.exclusive ? (data) => data < this.rule : (data) => data <= this.rule; - reduceJsonSchema(schema$1) { - if (this.exclusive) schema$1.exclusiveMaximum = this.rule; - else schema$1.maximum = this.rule; - return schema$1; - } -}; -const Max = { - implementation: implementation$16, - Node: MaxNode -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/maxLength.js -const implementation$15 = implementNode({ - kind: "maxLength", - collapsibleKey: "rule", - hasAssociatedError: true, - keys: { rule: { parse: createLengthRuleParser("maxLength") } }, - reduce: (inner, $) => inner.rule === 0 ? $.node("exactLength", inner) : void 0, - normalize: createLengthSchemaNormalizer("maxLength"), - defaults: { - description: (node$1) => `at most length ${node$1.rule}`, - actual: (data) => `${data.length}` - }, - intersections: { - maxLength: (l, r) => l.isStricterThan(r) ? l : r, - minLength: (max, min, ctx) => max.overlapsRange(min) ? max.overlapIsUnit(min) ? ctx.$.node("exactLength", { rule: max.rule }) : null : Disjoint.init("range", max, min) - } -}); -var MaxLengthNode = class extends BaseRange { - impliedBasis = $ark.intrinsic.lengthBoundable.internal; - traverseAllows = (data) => data.length <= this.rule; - reduceJsonSchema(schema$1) { - switch (schema$1.type) { - case "string": - schema$1.maxLength = this.rule; - return schema$1; - case "array": - schema$1.maxItems = this.rule; - return schema$1; - default: return ToJsonSchema.throwInternalOperandError("maxLength", schema$1); - } - } -}; -const MaxLength = { - implementation: implementation$15, - Node: MaxLengthNode -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/min.js -const implementation$14 = implementNode({ - kind: "min", - collapsibleKey: "rule", - hasAssociatedError: true, - keys: { - rule: {}, - exclusive: parseExclusiveKey - }, - normalize: (schema$1) => typeof schema$1 === "number" ? { rule: schema$1 } : schema$1, - defaults: { description: (node$1) => { - if (node$1.rule === 0) return node$1.exclusive ? "positive" : "non-negative"; - return `${node$1.exclusive ? "more than" : "at least"} ${node$1.rule}`; - } }, - intersections: { min: (l, r) => l.isStricterThan(r) ? l : r }, - obviatesBasisDescription: true -}); -var MinNode = class extends BaseRange { - impliedBasis = $ark.intrinsic.number.internal; - traverseAllows = this.exclusive ? (data) => data > this.rule : (data) => data >= this.rule; - reduceJsonSchema(schema$1) { - if (this.exclusive) schema$1.exclusiveMinimum = this.rule; - else schema$1.minimum = this.rule; - return schema$1; - } -}; -const Min = { - implementation: implementation$14, - Node: MinNode -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/minLength.js -const implementation$13 = implementNode({ - kind: "minLength", - collapsibleKey: "rule", - hasAssociatedError: true, - keys: { rule: { parse: createLengthRuleParser("minLength") } }, - reduce: (inner) => inner.rule === 0 ? $ark.intrinsic.unknown : void 0, - normalize: createLengthSchemaNormalizer("minLength"), - defaults: { - description: (node$1) => node$1.rule === 1 ? "non-empty" : `at least length ${node$1.rule}`, - actual: (data) => data.length === 0 ? "" : `${data.length}` - }, - intersections: { minLength: (l, r) => l.isStricterThan(r) ? l : r } -}); -var MinLengthNode = class extends BaseRange { - impliedBasis = $ark.intrinsic.lengthBoundable.internal; - traverseAllows = (data) => data.length >= this.rule; - reduceJsonSchema(schema$1) { - switch (schema$1.type) { - case "string": - schema$1.minLength = this.rule; - return schema$1; - case "array": - schema$1.minItems = this.rule; - return schema$1; - default: return ToJsonSchema.throwInternalOperandError("minLength", schema$1); - } - } -}; -const MinLength = { - implementation: implementation$13, - Node: MinLengthNode -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/kinds.js -const boundImplementationsByKind = { - min: Min.implementation, - max: Max.implementation, - minLength: MinLength.implementation, - maxLength: MaxLength.implementation, - exactLength: ExactLength.implementation, - after: After.implementation, - before: Before.implementation -}; -const boundClassesByKind = { - min: Min.Node, - max: Max.Node, - minLength: MinLength.Node, - maxLength: MaxLength.Node, - exactLength: ExactLength.Node, - after: After.Node, - before: Before.Node -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/refinements/pattern.js -const implementation$12 = implementNode({ - kind: "pattern", - collapsibleKey: "rule", - keys: { - rule: {}, - flags: {} - }, - normalize: (schema$1) => typeof schema$1 === "string" ? { rule: schema$1 } : schema$1 instanceof RegExp ? schema$1.flags ? { - rule: schema$1.source, - flags: schema$1.flags - } : { rule: schema$1.source } : schema$1, - obviatesBasisDescription: true, - obviatesBasisExpression: true, - hasAssociatedError: true, - intersectionIsOpen: true, - defaults: { description: (node$1) => `matched by ${node$1.rule}` }, - intersections: { pattern: () => null } -}); -var PatternNode = class extends InternalPrimitiveConstraint { - instance = new RegExp(this.rule, this.flags); - expression = `${this.instance}`; - traverseAllows = this.instance.test.bind(this.instance); - compiledCondition = `${this.expression}.test(data)`; - compiledNegation = `!${this.compiledCondition}`; - impliedBasis = $ark.intrinsic.string.internal; - reduceJsonSchema(base, ctx) { - if (base.pattern) return ctx.fallback.patternIntersection({ - code: "patternIntersection", - base, - pattern: this.rule - }); - base.pattern = this.rule; - return base; - } -}; -const Pattern = { - implementation: implementation$12, - Node: PatternNode -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/parse.js -const schemaKindOf = (schema$1, allowedKinds) => { - const kind = discriminateRootKind(schema$1); - if (allowedKinds && !allowedKinds.includes(kind)) return throwParseError(`Root of kind ${kind} should be one of ${allowedKinds}`); - return kind; -}; -const discriminateRootKind = (schema$1) => { - if (hasArkKind(schema$1, "root")) return schema$1.kind; - if (typeof schema$1 === "string") return schema$1[0] === "$" ? "alias" : schema$1 in domainDescriptions ? "domain" : "proto"; - if (typeof schema$1 === "function") return "proto"; - if (typeof schema$1 !== "object" || schema$1 === null) return throwParseError(writeInvalidSchemaMessage(schema$1)); - if ("morphs" in schema$1) return "morph"; - if ("branches" in schema$1 || isArray(schema$1)) return "union"; - if ("unit" in schema$1) return "unit"; - if ("reference" in schema$1) return "alias"; - const schemaKeys = Object.keys(schema$1); - if (schemaKeys.length === 0 || schemaKeys.some((k) => k in constraintKeys)) return "intersection"; - if ("proto" in schema$1) return "proto"; - if ("domain" in schema$1) return "domain"; - return throwParseError(writeInvalidSchemaMessage(schema$1)); -}; -const writeInvalidSchemaMessage = (schema$1) => `${printable(schema$1)} is not a valid type schema`; -const nodeCountsByPrefix = {}; -const serializeListableChild = (listableNode) => isArray(listableNode) ? listableNode.map((node$1) => node$1.collapsibleJson) : listableNode.collapsibleJson; -const nodesByRegisteredId = {}; -$ark.nodesByRegisteredId = nodesByRegisteredId; -const registerNodeId = (prefix) => { - nodeCountsByPrefix[prefix] ??= 0; - return `${prefix}${++nodeCountsByPrefix[prefix]}`; -}; -const parseNode = (ctx) => { - const impl = nodeImplementationsByKind[ctx.kind]; - const configuredSchema = impl.applyConfig?.(ctx.def, ctx.$.resolvedConfig) ?? ctx.def; - const inner = {}; - const { meta: metaSchema,...innerSchema } = configuredSchema; - const meta = metaSchema === void 0 ? {} : typeof metaSchema === "string" ? { description: metaSchema } : metaSchema; - const innerSchemaEntries = entriesOf(innerSchema).sort(([lKey], [rKey]) => isNodeKind(lKey) ? isNodeKind(rKey) ? precedenceOfKind(lKey) - precedenceOfKind(rKey) : 1 : isNodeKind(rKey) ? -1 : lKey < rKey ? -1 : 1).filter(([k, v]) => { - if (k.startsWith("meta.")) { - const metaKey = k.slice(5); - meta[metaKey] = v; - return false; - } - return true; - }); - for (const entry of innerSchemaEntries) { - const k = entry[0]; - const keyImpl = impl.keys[k]; - if (!keyImpl) return throwParseError(`Key ${k} is not valid on ${ctx.kind} schema`); - const v = keyImpl.parse ? keyImpl.parse(entry[1], ctx) : entry[1]; - if (v !== unset && (v !== void 0 || keyImpl.preserveUndefined)) inner[k] = v; - } - if (impl.reduce && !ctx.prereduced) { - const reduced = impl.reduce(inner, ctx.$); - if (reduced) { - if (reduced instanceof Disjoint) return reduced.throw(); - return withMeta(reduced, meta); - } - } - const node$1 = createNode({ - id: ctx.id, - kind: ctx.kind, - inner, - meta, - $: ctx.$ - }); - return node$1; -}; -const createNode = ({ id, kind, inner, meta, $, ignoreCache }) => { - const impl = nodeImplementationsByKind[kind]; - const innerEntries = entriesOf(inner); - const children = []; - let innerJson = {}; - for (const [k, v] of innerEntries) { - const keyImpl = impl.keys[k]; - const serialize = keyImpl.serialize ?? (keyImpl.child ? serializeListableChild : defaultValueSerializer); - innerJson[k] = serialize(v); - if (keyImpl.child === true) { - const listableNode = v; - if (isArray(listableNode)) children.push(...listableNode); - else children.push(listableNode); - } else if (typeof keyImpl.child === "function") children.push(...keyImpl.child(v)); - } - if (impl.finalizeInnerJson) innerJson = impl.finalizeInnerJson(innerJson); - let json$2 = { ...innerJson }; - let metaJson = {}; - if (!isEmptyObject(meta)) { - metaJson = flatMorph(meta, (k, v) => [k, k === "examples" ? v : defaultValueSerializer(v)]); - json$2.meta = possiblyCollapse(metaJson, "description", true); - } - innerJson = possiblyCollapse(innerJson, impl.collapsibleKey, false); - const innerHash = JSON.stringify({ - kind, - ...innerJson - }); - json$2 = possiblyCollapse(json$2, impl.collapsibleKey, false); - const collapsibleJson = possiblyCollapse(json$2, impl.collapsibleKey, true); - const hash = JSON.stringify({ - kind, - ...json$2 - }); - if ($.nodesByHash[hash] && !ignoreCache) return $.nodesByHash[hash]; - const attachments = { - id, - kind, - impl, - inner, - innerEntries, - innerJson, - innerHash, - meta, - metaJson, - json: json$2, - hash, - collapsibleJson, - children - }; - if (kind !== "intersection") { - for (const k in inner) if (k !== "in" && k !== "out") attachments[k] = inner[k]; - } - const node$1 = new nodeClassesByKind[kind](attachments, $); - return $.nodesByHash[hash] = node$1; -}; -const withId = (node$1, id) => { - if (node$1.id === id) return node$1; - if (isNode(nodesByRegisteredId[id])) throwInternalError(`Unexpected attempt to overwrite node id ${id}`); - return createNode({ - id, - kind: node$1.kind, - inner: node$1.inner, - meta: node$1.meta, - $: node$1.$, - ignoreCache: true - }); -}; -const withMeta = (node$1, meta, id) => { - if (id && isNode(nodesByRegisteredId[id])) throwInternalError(`Unexpected attempt to overwrite node id ${id}`); - return createNode({ - id: id ?? registerNodeId(meta.alias ?? node$1.kind), - kind: node$1.kind, - inner: node$1.inner, - meta, - $: node$1.$ - }); -}; -const possiblyCollapse = (json$2, toKey, allowPrimitive) => { - const collapsibleKeys = Object.keys(json$2); - if (collapsibleKeys.length === 1 && collapsibleKeys[0] === toKey) { - const collapsed = json$2[toKey]; - if (allowPrimitive) return collapsed; - if (hasDomain(collapsed, "object") && (Object.keys(collapsed).length === 1 || Array.isArray(collapsed))) return collapsed; - } - return json$2; -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/structure/prop.js -const intersectProps = (l, r, ctx) => { - if (l.key !== r.key) return null; - const key = l.key; - let value$1 = intersectOrPipeNodes(l.value, r.value, ctx); - const kind = l.required || r.required ? "required" : "optional"; - if (value$1 instanceof Disjoint) if (kind === "optional") value$1 = $ark.intrinsic.never.internal; - else return value$1.withPrefixKey(l.key, l.required && r.required ? "required" : "optional"); - if (kind === "required") return ctx.$.node("required", { - key, - value: value$1 - }); - const defaultIntersection = l.hasDefault() ? r.hasDefault() ? l.default === r.default ? l.default : throwParseError(writeDefaultIntersectionMessage(l.default, r.default)) : l.default : r.hasDefault() ? r.default : unset; - return ctx.$.node("optional", { - key, - value: value$1, - default: defaultIntersection - }); -}; -var BaseProp = class extends BaseConstraint { - required = this.kind === "required"; - optional = this.kind === "optional"; - impliedBasis = $ark.intrinsic.object.internal; - serializedKey = compileSerializedValue(this.key); - compiledKey = typeof this.key === "string" ? this.key : this.serializedKey; - flatRefs = append(this.value.flatRefs.map((ref) => flatRef([this.key, ...ref.path], ref.node)), flatRef([this.key], this.value)); - _transform(mapper, ctx) { - ctx.path.push(this.key); - const result = super._transform(mapper, ctx); - ctx.path.pop(); - return result; - } - hasDefault() { - return "default" in this.inner; - } - traverseAllows = (data, ctx) => { - if (this.key in data) return traverseKey(this.key, () => this.value.traverseAllows(data[this.key], ctx), ctx); - return this.optional; - }; - traverseApply = (data, ctx) => { - if (this.key in data) traverseKey(this.key, () => this.value.traverseApply(data[this.key], ctx), ctx); - else if (this.hasKind("required")) ctx.errorFromNodeContext(this.errorContext); - }; - compile(js) { - js.if(`${this.serializedKey} in data`, () => js.traverseKey(this.serializedKey, `data${js.prop(this.key)}`, this.value)); - if (this.hasKind("required")) js.else(() => js.traversalKind === "Apply" ? js.line(`ctx.errorFromNodeContext(${this.compiledErrorContext})`) : js.return(false)); - if (js.traversalKind === "Allows") js.return(true); - } -}; -const writeDefaultIntersectionMessage = (lValue, rValue) => `Invalid intersection of default values ${printable(lValue)} & ${printable(rValue)}`; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/structure/optional.js -const implementation$11 = implementNode({ - kind: "optional", - hasAssociatedError: false, - intersectionIsOpen: true, - keys: { - key: {}, - value: { - child: true, - parse: (schema$1, ctx) => ctx.$.parseSchema(schema$1) - }, - default: { preserveUndefined: true } - }, - normalize: (schema$1) => schema$1, - reduce: (inner, $) => { - if ($.resolvedConfig.exactOptionalPropertyTypes === false) { - if (!inner.value.allows(void 0)) return $.node("optional", { - ...inner, - value: inner.value.or(intrinsic.undefined) - }, { prereduced: true }); - } - }, - defaults: { description: (node$1) => `${node$1.compiledKey}?: ${node$1.value.description}` }, - intersections: { optional: intersectProps } -}); -var OptionalNode = class extends BaseProp { - constructor(...args$1) { - super(...args$1); - if ("default" in this.inner) assertDefaultValueAssignability(this.value, this.inner.default, this.key); - } - get outProp() { - if (!this.hasDefault()) return this; - const { default: defaultValue,...requiredInner } = this.inner; - return this.cacheGetter("outProp", this.$.node("required", requiredInner, { prereduced: true })); - } - expression = this.hasDefault() ? `${this.compiledKey}: ${this.value.expression} = ${printable(this.inner.default)}` : `${this.compiledKey}?: ${this.value.expression}`; - defaultValueMorph = getDefaultableMorph(this); - defaultValueMorphRef = this.defaultValueMorph && registeredReference(this.defaultValueMorph); -}; -const Optional = { - implementation: implementation$11, - Node: OptionalNode -}; -const defaultableMorphCache = {}; -const getDefaultableMorph = (node$1) => { - if (!node$1.hasDefault()) return; - const cacheKey = `{${node$1.compiledKey}: ${node$1.value.id} = ${defaultValueSerializer(node$1.default)}}`; - return defaultableMorphCache[cacheKey] ??= computeDefaultValueMorph(node$1.key, node$1.value, node$1.default); -}; -const computeDefaultValueMorph = (key, value$1, defaultInput) => { - if (typeof defaultInput === "function") return value$1.includesTransform ? (data, ctx) => { - traverseKey(key, () => value$1(data[key] = defaultInput(), ctx), ctx); - return data; - } : (data) => { - data[key] = defaultInput(); - return data; - }; - const precomputedMorphedDefault = value$1.includesTransform ? value$1.assert(defaultInput) : defaultInput; - return hasDomain(precomputedMorphedDefault, "object") ? (data, ctx) => { - traverseKey(key, () => value$1(data[key] = defaultInput, ctx), ctx); - return data; - } : (data) => { - data[key] = precomputedMorphedDefault; - return data; - }; -}; -const assertDefaultValueAssignability = (node$1, value$1, key) => { - const wrapped = isThunk(value$1); - if (hasDomain(value$1, "object") && !wrapped) throwParseError(writeNonPrimitiveNonFunctionDefaultValueMessage(key)); - const out = node$1.in(wrapped ? value$1() : value$1); - if (out instanceof ArkErrors) { - if (key === null) throwParseError(`Default ${out.summary}`); - const atPath = out.transform((e) => e.transform((input) => ({ - ...input, - prefixPath: [key] - }))); - throwParseError(`Default for ${atPath.summary}`); - } - return value$1; -}; -const writeNonPrimitiveNonFunctionDefaultValueMessage = (key) => { - const keyDescription = key === null ? "" : typeof key === "number" ? `for value at [${key}] ` : `for ${compileSerializedValue(key)} `; - return `Non-primitive default ${keyDescription}must be specified as a function like () => ({my: 'object'})`; -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/roots/root.js -var BaseRoot = class extends BaseNode { - constructor(attachments, $) { - super(attachments, $); - Object.defineProperty(this, arkKind, { - value: "root", - enumerable: false - }); - } - get internal() { - return this; - } - get "~standard"() { - return { - vendor: "arktype", - version: 1, - validate: (input) => { - const out = this(input); - if (out instanceof ArkErrors) return out; - return { value: out }; - } - }; - } - as() { - return this; - } - brand(name) { - if (name === "") return throwParseError(emptyBrandNameMessage); - return this; - } - readonly() { - return this; - } - branches = this.hasKind("union") ? this.inner.branches : [this]; - distribute(mapBranch, reduceMapped) { - const mappedBranches = this.branches.map(mapBranch); - return reduceMapped?.(mappedBranches) ?? mappedBranches; - } - get shortDescription() { - return this.meta.description ?? this.defaultShortDescription; - } - toJsonSchema(opts = {}) { - const ctx = mergeToJsonSchemaConfigs(this.$.resolvedConfig.toJsonSchema, opts); - ctx.useRefs ||= this.isCyclic; - const schema$1 = typeof ctx.dialect === "string" ? { $schema: ctx.dialect } : {}; - Object.assign(schema$1, this.toJsonSchemaRecurse(ctx)); - if (ctx.useRefs) schema$1.$defs = flatMorph(this.references, (i, ref) => ref.isRoot() && !ref.alwaysExpandJsonSchema ? [ref.id, ref.toResolvedJsonSchema(ctx)] : []); - return schema$1; - } - toJsonSchemaRecurse(ctx) { - if (ctx.useRefs && !this.alwaysExpandJsonSchema) return { $ref: `#/$defs/${this.id}` }; - return this.toResolvedJsonSchema(ctx); - } - get alwaysExpandJsonSchema() { - return this.isBasis() || this.kind === "alias" || this.hasKind("union") && this.isBoolean; - } - toResolvedJsonSchema(ctx) { - const result = this.innerToJsonSchema(ctx); - return Object.assign(result, this.metaJson); - } - intersect(r) { - const rNode = this.$.parseDefinition(r); - const result = this.rawIntersect(rNode); - if (result instanceof Disjoint) return result; - return this.$.finalize(result); - } - rawIntersect(r) { - return intersectNodesRoot(this, r, this.$); - } - toNeverIfDisjoint() { - return this; - } - and(r) { - const result = this.intersect(r); - return result instanceof Disjoint ? result.throw() : result; - } - rawAnd(r) { - const result = this.rawIntersect(r); - return result instanceof Disjoint ? result.throw() : result; - } - or(r) { - const rNode = this.$.parseDefinition(r); - return this.$.finalize(this.rawOr(rNode)); - } - rawOr(r) { - const branches = [...this.branches, ...r.branches]; - return this.$.node("union", branches); - } - map(flatMapEntry) { - return this.$.schema(this.applyStructuralOperation("map", [flatMapEntry])); - } - pick(...keys) { - return this.$.schema(this.applyStructuralOperation("pick", keys)); - } - omit(...keys) { - return this.$.schema(this.applyStructuralOperation("omit", keys)); - } - required() { - return this.$.schema(this.applyStructuralOperation("required", [])); - } - partial() { - return this.$.schema(this.applyStructuralOperation("partial", [])); - } - _keyof; - keyof() { - if (this._keyof) return this._keyof; - const result = this.applyStructuralOperation("keyof", []).reduce((result$1, branch) => result$1.intersect(branch).toNeverIfDisjoint(), $ark.intrinsic.unknown.internal); - if (result.branches.length === 0) throwParseError(writeUnsatisfiableExpressionError(`keyof ${this.expression}`)); - return this._keyof = this.$.finalize(result); - } - get props() { - if (this.branches.length !== 1) return throwParseError(writeLiteralUnionEntriesMessage(this.expression)); - return [...this.applyStructuralOperation("props", [])[0]]; - } - merge(r) { - const rNode = this.$.parseDefinition(r); - return this.$.schema(rNode.distribute((branch) => this.applyStructuralOperation("merge", [structureOf(branch) ?? throwParseError(writeNonStructuralOperandMessage("merge", branch.expression))]))); - } - applyStructuralOperation(operation, args$1) { - return this.distribute((branch) => { - if (branch.equals($ark.intrinsic.object) && operation !== "merge") return branch; - const structure = structureOf(branch); - if (!structure) throwParseError(writeNonStructuralOperandMessage(operation, branch.expression)); - if (operation === "keyof") return structure.keyof(); - if (operation === "get") return structure.get(...args$1); - if (operation === "props") return structure.props; - const structuralMethodName = operation === "required" ? "require" : operation === "partial" ? "optionalize" : operation; - return this.$.node("intersection", { - ...branch.inner, - structure: structure[structuralMethodName](...args$1) - }); - }); - } - get(...path$1) { - if (path$1[0] === void 0) return this; - return this.$.schema(this.applyStructuralOperation("get", path$1)); - } - extract(r) { - const rNode = this.$.parseDefinition(r); - return this.$.schema(this.branches.filter((branch) => branch.extends(rNode))); - } - exclude(r) { - const rNode = this.$.parseDefinition(r); - return this.$.schema(this.branches.filter((branch) => !branch.extends(rNode))); - } - array() { - return this.$.schema(this.isUnknown() ? { proto: Array } : { - proto: Array, - sequence: this - }, { prereduced: true }); - } - overlaps(r) { - const intersection = this.intersect(r); - return !(intersection instanceof Disjoint); - } - extends(r) { - const intersection = this.intersect(r); - return !(intersection instanceof Disjoint) && this.equals(intersection); - } - ifExtends(r) { - return this.extends(r) ? this : void 0; - } - subsumes(r) { - const rNode = this.$.parseDefinition(r); - return rNode.extends(this); - } - configure(meta, selector = "shallow") { - return this.configureReferences(meta, selector); - } - describe(description, selector = "shallow") { - return this.configure({ description }, selector); - } - optional() { - return [this, "?"]; - } - default(thunkableValue) { - assertDefaultValueAssignability(this, thunkableValue, null); - return [ - this, - "=", - thunkableValue - ]; - } - from(input) { - return this.assert(input); - } - _pipe(...morphs) { - const result = morphs.reduce((acc, morph) => acc.rawPipeOnce(morph), this); - return this.$.finalize(result); - } - tryPipe(...morphs) { - const result = morphs.reduce((acc, morph) => acc.rawPipeOnce(hasArkKind(morph, "root") ? morph : (In, ctx) => { - try { - return morph(In, ctx); - } catch (e) { - return ctx.error({ - code: "predicate", - predicate: morph, - actual: `aborted due to error:\n ${e}\n` - }); - } - }), this); - return this.$.finalize(result); - } - pipe = Object.assign(this._pipe.bind(this), { try: this.tryPipe.bind(this) }); - to(def) { - return this.$.finalize(this.toNode(this.$.parseDefinition(def))); - } - toNode(root) { - const result = pipeNodesRoot(this, root, this.$); - if (result instanceof Disjoint) return result.throw(); - return result; - } - rawPipeOnce(morph) { - if (hasArkKind(morph, "root")) return this.toNode(morph); - return this.distribute((branch) => branch.hasKind("morph") ? this.$.node("morph", { - in: branch.inner.in, - morphs: [...branch.morphs, morph] - }) : this.$.node("morph", { - in: branch, - morphs: [morph] - }), this.$.parseSchema); - } - narrow(predicate) { - return this.constrainOut("predicate", predicate); - } - constrain(kind, schema$1) { - return this._constrain("root", kind, schema$1); - } - constrainIn(kind, schema$1) { - return this._constrain("in", kind, schema$1); - } - constrainOut(kind, schema$1) { - return this._constrain("out", kind, schema$1); - } - _constrain(io, kind, schema$1) { - const constraint = this.$.node(kind, schema$1); - if (constraint.isRoot()) return constraint.isUnknown() ? this : throwInternalError(`Unexpected constraint node ${constraint}`); - const operand = io === "root" ? this : this[io]; - if (operand.hasKind("morph") || constraint.impliedBasis && !operand.extends(constraint.impliedBasis)) return throwInvalidOperandError(kind, constraint.impliedBasis, this); - const partialIntersection = this.$.node("intersection", { [constraint.kind]: constraint }); - const result = io === "out" ? pipeNodesRoot(this, partialIntersection, this.$) : intersectNodesRoot(this, partialIntersection, this.$); - if (result instanceof Disjoint) result.throw(); - return this.$.finalize(result); - } - onUndeclaredKey(cfg) { - const rule = typeof cfg === "string" ? cfg : cfg.rule; - const deep = typeof cfg === "string" ? false : cfg.deep; - return this.$.finalize(this.transform((kind, inner) => kind === "structure" ? rule === "ignore" ? omit(inner, { undeclared: 1 }) : { - ...inner, - undeclared: rule - } : inner, deep ? void 0 : { shouldTransform: (node$1) => !includes(structuralKinds, node$1.kind) })); - } - hasEqualMorphs(r) { - if (!this.includesTransform && !r.includesTransform) return true; - if (!arrayEquals(this.shallowMorphs, r.shallowMorphs)) return false; - if (!arrayEquals(this.flatMorphs, r.flatMorphs, { isEqual: (l, r$1) => l.propString === r$1.propString && (l.node.hasKind("morph") && r$1.node.hasKind("morph") ? l.node.hasEqualMorphs(r$1.node) : l.node.hasKind("intersection") && r$1.node.hasKind("intersection") ? l.node.structure?.structuralMorphRef === r$1.node.structure?.structuralMorphRef : false) })) return false; - return true; - } - onDeepUndeclaredKey(behavior) { - return this.onUndeclaredKey({ - rule: behavior, - deep: true - }); - } - filter(predicate) { - return this.constrainIn("predicate", predicate); - } - divisibleBy(schema$1) { - return this.constrain("divisor", schema$1); - } - matching(schema$1) { - return this.constrain("pattern", schema$1); - } - atLeast(schema$1) { - return this.constrain("min", schema$1); - } - atMost(schema$1) { - return this.constrain("max", schema$1); - } - moreThan(schema$1) { - return this.constrain("min", exclusivizeRangeSchema(schema$1)); - } - lessThan(schema$1) { - return this.constrain("max", exclusivizeRangeSchema(schema$1)); - } - atLeastLength(schema$1) { - return this.constrain("minLength", schema$1); - } - atMostLength(schema$1) { - return this.constrain("maxLength", schema$1); - } - moreThanLength(schema$1) { - return this.constrain("minLength", exclusivizeRangeSchema(schema$1)); - } - lessThanLength(schema$1) { - return this.constrain("maxLength", exclusivizeRangeSchema(schema$1)); - } - exactlyLength(schema$1) { - return this.constrain("exactLength", schema$1); - } - atOrAfter(schema$1) { - return this.constrain("after", schema$1); - } - atOrBefore(schema$1) { - return this.constrain("before", schema$1); - } - laterThan(schema$1) { - return this.constrain("after", exclusivizeRangeSchema(schema$1)); - } - earlierThan(schema$1) { - return this.constrain("before", exclusivizeRangeSchema(schema$1)); - } -}; -const emptyBrandNameMessage = `Expected a non-empty brand name after #`; -const exclusivizeRangeSchema = (schema$1) => typeof schema$1 === "object" && !(schema$1 instanceof Date) ? { - ...schema$1, - exclusive: true -} : { - rule: schema$1, - exclusive: true -}; -const typeOrTermExtends = (t, base) => hasArkKind(base, "root") ? hasArkKind(t, "root") ? t.extends(base) : base.allows(t) : hasArkKind(t, "root") ? t.hasUnit(base) : base === t; -const structureOf = (branch) => { - if (branch.hasKind("morph")) return null; - if (branch.hasKind("intersection")) return branch.inner.structure ?? (branch.basis?.domain === "object" ? branch.$.bindReference($ark.intrinsic.emptyStructure) : null); - if (branch.isBasis() && branch.domain === "object") return branch.$.bindReference($ark.intrinsic.emptyStructure); - return null; -}; -const writeLiteralUnionEntriesMessage = (expression) => `Props cannot be extracted from a union. Use .distribute to extract props from each branch instead. Received: -${expression}`; -const writeNonStructuralOperandMessage = (operation, operand) => `${operation} operand must be an object (was ${operand})`; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/roots/utils.js -const defineRightwardIntersections = (kind, implementation$22) => flatMorph(schemaKindsRightOf(kind), (i, kind$1) => [kind$1, implementation$22]); - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/roots/alias.js -const normalizeAliasSchema = (schema$1) => typeof schema$1 === "string" ? { reference: schema$1 } : schema$1; -const neverIfDisjoint = (result) => result instanceof Disjoint ? $ark.intrinsic.never.internal : result; -const implementation$10 = implementNode({ - kind: "alias", - hasAssociatedError: false, - collapsibleKey: "reference", - keys: { - reference: { serialize: (s) => s.startsWith("$") ? s : `$ark.${s}` }, - resolve: {} - }, - normalize: normalizeAliasSchema, - defaults: { description: (node$1) => node$1.reference }, - intersections: { - alias: (l, r, ctx) => ctx.$.lazilyResolve(() => neverIfDisjoint(intersectOrPipeNodes(l.resolution, r.resolution, ctx)), `${l.reference}${ctx.pipe ? "=>" : "&"}${r.reference}`), - ...defineRightwardIntersections("alias", (l, r, ctx) => { - if (r.isUnknown()) return l; - if (r.isNever()) return r; - if (r.isBasis() && !r.overlaps($ark.intrinsic.object)) return Disjoint.init("assignability", $ark.intrinsic.object, r); - return ctx.$.lazilyResolve(() => neverIfDisjoint(intersectOrPipeNodes(l.resolution, r, ctx)), `${l.reference}${ctx.pipe ? "=>" : "&"}${r.id}`); - }) - } -}); -var AliasNode = class extends BaseRoot { - expression = this.reference; - structure = void 0; - get resolution() { - const result = this._resolve(); - return nodesByRegisteredId[this.id] = result; - } - _resolve() { - if (this.resolve) return this.resolve(); - if (this.reference[0] === "$") return this.$.resolveRoot(this.reference.slice(1)); - const id = this.reference; - let resolution = nodesByRegisteredId[id]; - const seen = []; - while (hasArkKind(resolution, "context")) { - if (seen.includes(resolution.id)) return throwParseError(writeShallowCycleErrorMessage(resolution.id, seen)); - seen.push(resolution.id); - resolution = nodesByRegisteredId[resolution.id]; - } - if (!hasArkKind(resolution, "root")) return throwInternalError(`Unexpected resolution for reference ${this.reference} -Seen: [${seen.join("->")}] -Resolution: ${printable(resolution)}`); - return resolution; - } - get resolutionId() { - if (this.reference.includes("&") || this.reference.includes("=>")) return this.resolution.id; - if (this.reference[0] !== "$") return this.reference; - const alias = this.reference.slice(1); - const resolution = this.$.resolutions[alias]; - if (typeof resolution === "string") return resolution; - if (hasArkKind(resolution, "root")) return resolution.id; - return throwInternalError(`Unexpected resolution for reference ${this.reference}: ${printable(resolution)}`); - } - get defaultShortDescription() { - return domainDescriptions.object; - } - innerToJsonSchema(ctx) { - return this.resolution.toJsonSchemaRecurse(ctx); - } - traverseAllows = (data, ctx) => { - const seen = ctx.seen[this.reference]; - if (seen?.includes(data)) return true; - ctx.seen[this.reference] = append(seen, data); - return this.resolution.traverseAllows(data, ctx); - }; - traverseApply = (data, ctx) => { - const seen = ctx.seen[this.reference]; - if (seen?.includes(data)) return; - ctx.seen[this.reference] = append(seen, data); - this.resolution.traverseApply(data, ctx); - }; - compile(js) { - const id = this.resolutionId; - js.if(`ctx.seen.${id} && ctx.seen.${id}.includes(data)`, () => js.return(true)); - js.if(`!ctx.seen.${id}`, () => js.line(`ctx.seen.${id} = []`)); - js.line(`ctx.seen.${id}.push(data)`); - js.return(js.invoke(id)); - } -}; -const writeShallowCycleErrorMessage = (name, seen) => `Alias '${name}' has a shallow resolution cycle: ${[...seen, name].join("->")}`; -const Alias = { - implementation: implementation$10, - Node: AliasNode -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/roots/basis.js -var InternalBasis = class extends BaseRoot { - traverseApply = (data, ctx) => { - if (!this.traverseAllows(data, ctx)) ctx.errorFromNodeContext(this.errorContext); - }; - get errorContext() { - return { - code: this.kind, - description: this.description, - meta: this.meta, - ...this.inner - }; - } - get compiledErrorContext() { - return compileObjectLiteral(this.errorContext); - } - compile(js) { - if (js.traversalKind === "Allows") js.return(this.compiledCondition); - else js.if(this.compiledNegation, () => js.line(`${js.ctx}.errorFromNodeContext(${this.compiledErrorContext})`)); - } -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/roots/domain.js -const implementation$9 = implementNode({ - kind: "domain", - hasAssociatedError: true, - collapsibleKey: "domain", - keys: { - domain: {}, - numberAllowsNaN: {} - }, - normalize: (schema$1) => typeof schema$1 === "string" ? { domain: schema$1 } : hasKey(schema$1, "numberAllowsNaN") && schema$1.domain !== "number" ? throwParseError(Domain.writeBadAllowNanMessage(schema$1.domain)) : schema$1, - applyConfig: (schema$1, config) => schema$1.numberAllowsNaN === void 0 && schema$1.domain === "number" && config.numberAllowsNaN ? { - ...schema$1, - numberAllowsNaN: true - } : schema$1, - defaults: { - description: (node$1) => domainDescriptions[node$1.domain], - actual: (data) => Number.isNaN(data) ? "NaN" : domainDescriptions[domainOf(data)] - }, - intersections: { domain: (l, r) => l.domain === "number" && r.domain === "number" ? l.numberAllowsNaN ? r : l : Disjoint.init("domain", l, r) } -}); -var DomainNode = class extends InternalBasis { - requiresNaNCheck = this.domain === "number" && !this.numberAllowsNaN; - traverseAllows = this.requiresNaNCheck ? (data) => typeof data === "number" && !Number.isNaN(data) : (data) => domainOf(data) === this.domain; - compiledCondition = this.domain === "object" ? `((typeof data === "object" && data !== null) || typeof data === "function")` : `typeof data === "${this.domain}"${this.requiresNaNCheck ? " && !Number.isNaN(data)" : ""}`; - compiledNegation = this.domain === "object" ? `((typeof data !== "object" || data === null) && typeof data !== "function")` : `typeof data !== "${this.domain}"${this.requiresNaNCheck ? " || Number.isNaN(data)" : ""}`; - expression = this.numberAllowsNaN ? "number | NaN" : this.domain; - get nestableExpression() { - return this.numberAllowsNaN ? `(${this.expression})` : this.expression; - } - get defaultShortDescription() { - return domainDescriptions[this.domain]; - } - innerToJsonSchema(ctx) { - if (this.domain === "bigint" || this.domain === "symbol") return ctx.fallback.domain({ - code: "domain", - base: {}, - domain: this.domain - }); - return { type: this.domain }; - } -}; -const Domain = { - implementation: implementation$9, - Node: DomainNode, - writeBadAllowNanMessage: (actual) => `numberAllowsNaN may only be specified with domain "number" (was ${actual})` -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/roots/intersection.js -const implementation$8 = implementNode({ - kind: "intersection", - hasAssociatedError: true, - normalize: (rawSchema) => { - if (isNode(rawSchema)) return rawSchema; - const { structure,...schema$1 } = rawSchema; - const hasRootStructureKey = !!structure; - const normalizedStructure = structure ?? {}; - const normalized = flatMorph(schema$1, (k, v) => { - if (isKeyOf(k, structureKeys)) { - if (hasRootStructureKey) throwParseError(`Flattened structure key ${k} cannot be specified alongside a root 'structure' key.`); - normalizedStructure[k] = v; - return []; - } - return [k, v]; - }); - if (hasArkKind(normalizedStructure, "constraint") || !isEmptyObject(normalizedStructure)) normalized.structure = normalizedStructure; - return normalized; - }, - finalizeInnerJson: ({ structure,...rest }) => hasDomain(structure, "object") ? { - ...structure, - ...rest - } : rest, - keys: { - domain: { - child: true, - parse: (schema$1, ctx) => ctx.$.node("domain", schema$1) - }, - proto: { - child: true, - parse: (schema$1, ctx) => ctx.$.node("proto", schema$1) - }, - structure: { - child: true, - parse: (schema$1, ctx) => ctx.$.node("structure", schema$1), - serialize: (node$1) => { - if (!node$1.sequence?.minLength) return node$1.collapsibleJson; - const { sequence,...structureJson } = node$1.collapsibleJson; - const { minVariadicLength,...sequenceJson } = sequence; - const collapsibleSequenceJson = sequenceJson.variadic && Object.keys(sequenceJson).length === 1 ? sequenceJson.variadic : sequenceJson; - return { - ...structureJson, - sequence: collapsibleSequenceJson - }; - } - }, - divisor: { - child: true, - parse: constraintKeyParser("divisor") - }, - max: { - child: true, - parse: constraintKeyParser("max") - }, - min: { - child: true, - parse: constraintKeyParser("min") - }, - maxLength: { - child: true, - parse: constraintKeyParser("maxLength") - }, - minLength: { - child: true, - parse: constraintKeyParser("minLength") - }, - exactLength: { - child: true, - parse: constraintKeyParser("exactLength") - }, - before: { - child: true, - parse: constraintKeyParser("before") - }, - after: { - child: true, - parse: constraintKeyParser("after") - }, - pattern: { - child: true, - parse: constraintKeyParser("pattern") - }, - predicate: { - child: true, - parse: constraintKeyParser("predicate") - } - }, - reduce: (inner, $) => intersectIntersections({}, inner, { - $, - invert: false, - pipe: false - }), - defaults: { - description: (node$1) => { - if (node$1.children.length === 0) return "unknown"; - if (node$1.structure) return node$1.structure.description; - const childDescriptions = []; - if (node$1.basis && !node$1.refinements.some((r) => r.impl.obviatesBasisDescription)) childDescriptions.push(node$1.basis.description); - if (node$1.refinements.length) { - const sortedRefinementDescriptions = node$1.refinements.toSorted((l, r) => l.kind === "min" && r.kind === "max" ? -1 : 0).map((r) => r.description); - childDescriptions.push(...sortedRefinementDescriptions); - } - if (node$1.inner.predicate) childDescriptions.push(...node$1.inner.predicate.map((p) => p.description)); - return childDescriptions.join(" and "); - }, - expected: (source) => ` ◦ ${source.errors.map((e) => e.expected).join("\n ◦ ")}`, - problem: (ctx) => `(${ctx.actual}) must be...\n${ctx.expected}` - }, - intersections: { - intersection: (l, r, ctx) => intersectIntersections(l.inner, r.inner, ctx), - ...defineRightwardIntersections("intersection", (l, r, ctx) => { - if (l.children.length === 0) return r; - const { domain, proto,...lInnerConstraints } = l.inner; - const lBasis = proto ?? domain; - const basis = lBasis ? intersectOrPipeNodes(lBasis, r, ctx) : r; - return basis instanceof Disjoint ? basis : l?.basis?.equals(basis) ? l : l.$.node("intersection", { - ...lInnerConstraints, - [basis.kind]: basis - }, { prereduced: true }); - }) - } -}); -var IntersectionNode = class extends BaseRoot { - basis = this.inner.domain ?? this.inner.proto ?? null; - refinements = this.children.filter((node$1) => node$1.isRefinement()); - structure = this.inner.structure; - expression = writeIntersectionExpression(this); - get shallowMorphs() { - return this.inner.structure?.structuralMorph ? [this.inner.structure.structuralMorph] : []; - } - get defaultShortDescription() { - return this.basis?.defaultShortDescription ?? "present"; - } - innerToJsonSchema(ctx) { - return this.children.reduce((schema$1, child) => child.isBasis() ? child.toJsonSchemaRecurse(ctx) : child.reduceJsonSchema(schema$1, ctx), {}); - } - traverseAllows = (data, ctx) => this.children.every((child) => child.traverseAllows(data, ctx)); - traverseApply = (data, ctx) => { - const errorCount = ctx.currentErrorCount; - if (this.basis) { - this.basis.traverseApply(data, ctx); - if (ctx.currentErrorCount > errorCount) return; - } - if (this.refinements.length) { - for (let i = 0; i < this.refinements.length - 1; i++) { - this.refinements[i].traverseApply(data, ctx); - if (ctx.failFast && ctx.currentErrorCount > errorCount) return; - } - this.refinements.at(-1).traverseApply(data, ctx); - if (ctx.currentErrorCount > errorCount) return; - } - if (this.structure) { - this.structure.traverseApply(data, ctx); - if (ctx.currentErrorCount > errorCount) return; - } - if (this.inner.predicate) { - for (let i = 0; i < this.inner.predicate.length - 1; i++) { - this.inner.predicate[i].traverseApply(data, ctx); - if (ctx.failFast && ctx.currentErrorCount > errorCount) return; - } - this.inner.predicate.at(-1).traverseApply(data, ctx); - } - }; - compile(js) { - if (js.traversalKind === "Allows") { - for (const child of this.children) js.check(child); - js.return(true); - return; - } - js.initializeErrorCount(); - if (this.basis) { - js.check(this.basis); - if (this.children.length > 1) js.returnIfFail(); - } - if (this.refinements.length) { - for (let i = 0; i < this.refinements.length - 1; i++) { - js.check(this.refinements[i]); - js.returnIfFailFast(); - } - js.check(this.refinements.at(-1)); - if (this.structure || this.inner.predicate) js.returnIfFail(); - } - if (this.structure) { - js.check(this.structure); - if (this.inner.predicate) js.returnIfFail(); - } - if (this.inner.predicate) { - for (let i = 0; i < this.inner.predicate.length - 1; i++) { - js.check(this.inner.predicate[i]); - js.returnIfFail(); - } - js.check(this.inner.predicate.at(-1)); - } - } -}; -const Intersection = { - implementation: implementation$8, - Node: IntersectionNode -}; -const writeIntersectionExpression = (node$1) => { - let expression = node$1.structure?.expression || `${node$1.basis && !node$1.refinements.some((n) => n.impl.obviatesBasisExpression) ? node$1.basis.nestableExpression + " " : ""}${node$1.refinements.map((n) => n.expression).join(" & ")}` || "unknown"; - if (expression === "Array == 0") expression = "[]"; - return expression; -}; -const intersectIntersections = (l, r, ctx) => { - const baseInner = {}; - const lBasis = l.proto ?? l.domain; - const rBasis = r.proto ?? r.domain; - const basisResult = lBasis ? rBasis ? intersectOrPipeNodes(lBasis, rBasis, ctx) : lBasis : rBasis; - if (basisResult instanceof Disjoint) return basisResult; - if (basisResult) baseInner[basisResult.kind] = basisResult; - return intersectConstraints({ - kind: "intersection", - baseInner, - l: flattenConstraints(l), - r: flattenConstraints(r), - roots: [], - ctx - }); -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/roots/morph.js -const implementation$7 = implementNode({ - kind: "morph", - hasAssociatedError: false, - keys: { - in: { - child: true, - parse: (schema$1, ctx) => ctx.$.parseSchema(schema$1) - }, - morphs: { - parse: liftArray, - serialize: (morphs) => morphs.map((m) => hasArkKind(m, "root") ? m.json : registeredReference(m)) - }, - declaredIn: { - child: false, - serialize: (node$1) => node$1.json - }, - declaredOut: { - child: false, - serialize: (node$1) => node$1.json - } - }, - normalize: (schema$1) => schema$1, - defaults: { description: (node$1) => `a morph from ${node$1.in.description} to ${node$1.out?.description ?? "unknown"}` }, - intersections: { - morph: (l, r, ctx) => { - if (!l.hasEqualMorphs(r)) return throwParseError(writeMorphIntersectionMessage(l.expression, r.expression)); - const inTersection = intersectOrPipeNodes(l.in, r.in, ctx); - if (inTersection instanceof Disjoint) return inTersection; - const baseInner = { morphs: l.morphs }; - if (l.declaredIn || r.declaredIn) { - const declaredIn = intersectOrPipeNodes(l.in, r.in, ctx); - if (declaredIn instanceof Disjoint) return declaredIn.throw(); - else baseInner.declaredIn = declaredIn; - } - if (l.declaredOut || r.declaredOut) { - const declaredOut = intersectOrPipeNodes(l.out, r.out, ctx); - if (declaredOut instanceof Disjoint) return declaredOut.throw(); - else baseInner.declaredOut = declaredOut; - } - return inTersection.distribute((inBranch) => ctx.$.node("morph", { - ...baseInner, - in: inBranch - }), ctx.$.parseSchema); - }, - ...defineRightwardIntersections("morph", (l, r, ctx) => { - const inTersection = l.inner.in ? intersectOrPipeNodes(l.inner.in, r, ctx) : r; - return inTersection instanceof Disjoint ? inTersection : inTersection.equals(l.inner.in) ? l : ctx.$.node("morph", { - ...l.inner, - in: inTersection - }); - }) - } -}); -var MorphNode = class extends BaseRoot { - serializedMorphs = this.morphs.map(registeredReference); - compiledMorphs = `[${this.serializedMorphs}]`; - lastMorph = this.inner.morphs.at(-1); - lastMorphIfNode = hasArkKind(this.lastMorph, "root") ? this.lastMorph : void 0; - introspectableIn = this.inner.in; - introspectableOut = this.lastMorphIfNode ? Object.assign(this.referencesById, this.lastMorphIfNode.referencesById) && this.lastMorphIfNode.out : void 0; - get shallowMorphs() { - return Array.isArray(this.inner.in?.shallowMorphs) ? [...this.inner.in.shallowMorphs, ...this.morphs] : this.morphs; - } - get in() { - return this.declaredIn ?? this.inner.in?.in ?? $ark.intrinsic.unknown.internal; - } - get out() { - return this.declaredOut ?? this.introspectableOut ?? $ark.intrinsic.unknown.internal; - } - declareIn(declaredIn) { - return this.$.node("morph", { - ...this.inner, - declaredIn - }); - } - declareOut(declaredOut) { - return this.$.node("morph", { - ...this.inner, - declaredOut - }); - } - expression = `(In: ${this.in.expression}) => ${this.lastMorphIfNode ? "To" : "Out"}<${this.out.expression}>`; - get defaultShortDescription() { - return this.in.meta.description ?? this.in.defaultShortDescription; - } - innerToJsonSchema(ctx) { - return ctx.fallback.morph({ - code: "morph", - base: this.in.toJsonSchemaRecurse(ctx), - out: this.introspectableOut?.toJsonSchemaRecurse(ctx) ?? null - }); - } - compile(js) { - if (js.traversalKind === "Allows") { - if (!this.introspectableIn) return; - js.return(js.invoke(this.introspectableIn)); - return; - } - if (this.introspectableIn) js.line(js.invoke(this.introspectableIn)); - js.line(`ctx.queueMorphs(${this.compiledMorphs})`); - } - traverseAllows = (data, ctx) => !this.introspectableIn || this.introspectableIn.traverseAllows(data, ctx); - traverseApply = (data, ctx) => { - if (this.introspectableIn) this.introspectableIn.traverseApply(data, ctx); - ctx.queueMorphs(this.morphs); - }; - /** Check if the morphs of r are equal to those of this node */ - hasEqualMorphs(r) { - return arrayEquals(this.morphs, r.morphs, { isEqual: (lMorph, rMorph) => lMorph === rMorph || hasArkKind(lMorph, "root") && hasArkKind(rMorph, "root") && lMorph.equals(rMorph) }); - } -}; -const Morph = { - implementation: implementation$7, - Node: MorphNode -}; -const writeMorphIntersectionMessage = (lDescription, rDescription) => `The intersection of distinct morphs at a single path is indeterminate: -Left: ${lDescription} -Right: ${rDescription}`; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/roots/proto.js -const implementation$6 = implementNode({ - kind: "proto", - hasAssociatedError: true, - collapsibleKey: "proto", - keys: { - proto: { serialize: (ctor) => getBuiltinNameOfConstructor(ctor) ?? defaultValueSerializer(ctor) }, - dateAllowsInvalid: {} - }, - normalize: (schema$1) => { - const normalized = typeof schema$1 === "string" ? { proto: builtinConstructors[schema$1] } : typeof schema$1 === "function" ? isNode(schema$1) ? schema$1 : { proto: schema$1 } : typeof schema$1.proto === "string" ? { - ...schema$1, - proto: builtinConstructors[schema$1.proto] - } : schema$1; - if (typeof normalized.proto !== "function") throwParseError(Proto.writeInvalidSchemaMessage(normalized.proto)); - if (hasKey(normalized, "dateAllowsInvalid") && normalized.proto !== Date) throwParseError(Proto.writeBadInvalidDateMessage(normalized.proto)); - return normalized; - }, - applyConfig: (schema$1, config) => { - if (schema$1.dateAllowsInvalid === void 0 && schema$1.proto === Date && config.dateAllowsInvalid) return { - ...schema$1, - dateAllowsInvalid: true - }; - return schema$1; - }, - defaults: { - description: (node$1) => node$1.builtinName ? objectKindDescriptions[node$1.builtinName] : `an instance of ${node$1.proto.name}`, - actual: (data) => data instanceof Date && data.toString() === "Invalid Date" ? "an invalid Date" : objectKindOrDomainOf(data) - }, - intersections: { - proto: (l, r) => l.proto === Date && r.proto === Date ? l.dateAllowsInvalid ? r : l : constructorExtends(l.proto, r.proto) ? l : constructorExtends(r.proto, l.proto) ? r : Disjoint.init("proto", l, r), - domain: (proto, domain) => domain.domain === "object" ? proto : Disjoint.init("domain", $ark.intrinsic.object.internal, domain) - } -}); -var ProtoNode = class extends InternalBasis { - builtinName = getBuiltinNameOfConstructor(this.proto); - serializedConstructor = this.json.proto; - requiresInvalidDateCheck = this.proto === Date && !this.dateAllowsInvalid; - traverseAllows = this.requiresInvalidDateCheck ? (data) => data instanceof Date && data.toString() !== "Invalid Date" : (data) => data instanceof this.proto; - compiledCondition = `data instanceof ${this.serializedConstructor}${this.requiresInvalidDateCheck ? ` && data.toString() !== "Invalid Date"` : ""}`; - compiledNegation = `!(${this.compiledCondition})`; - innerToJsonSchema(ctx) { - switch (this.builtinName) { - case "Array": return { type: "array" }; - case "Date": return ctx.fallback.date?.({ - code: "date", - base: {} - }) ?? ctx.fallback.proto({ - code: "proto", - base: {}, - proto: this.proto - }); - default: return ctx.fallback.proto({ - code: "proto", - base: {}, - proto: this.proto - }); - } - } - expression = this.dateAllowsInvalid ? "Date | InvalidDate" : this.proto.name; - get nestableExpression() { - return this.dateAllowsInvalid ? `(${this.expression})` : this.expression; - } - domain = "object"; - get defaultShortDescription() { - return this.description; - } -}; -const Proto = { - implementation: implementation$6, - Node: ProtoNode, - writeBadInvalidDateMessage: (actual) => `dateAllowsInvalid may only be specified with constructor Date (was ${actual.name})`, - writeInvalidSchemaMessage: (actual) => `instanceOf operand must be a function (was ${domainOf(actual)})` -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/roots/union.js -const implementation$5 = implementNode({ - kind: "union", - hasAssociatedError: true, - collapsibleKey: "branches", - keys: { - ordered: {}, - branches: { - child: true, - parse: (schema$1, ctx) => { - const branches = []; - for (const branchSchema of schema$1) { - const branchNodes = hasArkKind(branchSchema, "root") ? branchSchema.branches : ctx.$.parseSchema(branchSchema).branches; - for (const node$1 of branchNodes) if (node$1.hasKind("morph")) { - const matchingMorphIndex = branches.findIndex((matching) => matching.hasKind("morph") && matching.hasEqualMorphs(node$1)); - if (matchingMorphIndex === -1) branches.push(node$1); - else { - const matchingMorph = branches[matchingMorphIndex]; - branches[matchingMorphIndex] = ctx.$.node("morph", { - ...matchingMorph.inner, - in: matchingMorph.in.rawOr(node$1.in) - }); - } - } else branches.push(node$1); - } - if (!ctx.def.ordered) branches.sort((l, r) => l.hash < r.hash ? -1 : 1); - return branches; - } - } - }, - normalize: (schema$1) => isArray(schema$1) ? { branches: schema$1 } : schema$1, - reduce: (inner, $) => { - const reducedBranches = reduceBranches(inner); - if (reducedBranches.length === 1) return reducedBranches[0]; - if (reducedBranches.length === inner.branches.length) return; - return $.node("union", { - ...inner, - branches: reducedBranches - }, { prereduced: true }); - }, - defaults: { - description: (node$1) => node$1.distribute((branch) => branch.description, describeBranches), - expected: (ctx) => { - const byPath = groupBy(ctx.errors, "propString"); - const pathDescriptions = Object.entries(byPath).map(([path$1, errors]) => { - const branchesAtPath = []; - for (const errorAtPath of errors) appendUnique(branchesAtPath, errorAtPath.expected); - const expected = describeBranches(branchesAtPath); - const actual = errors.every((e) => e.actual === errors[0].actual) ? errors[0].actual : printable(errors[0].data); - return `${path$1 && `${path$1} `}must be ${expected}${actual && ` (was ${actual})`}`; - }); - return describeBranches(pathDescriptions); - }, - problem: (ctx) => ctx.expected, - message: (ctx) => ctx.problem - }, - intersections: { - union: (l, r, ctx) => { - if (l.isNever !== r.isNever) return Disjoint.init("presence", l, r); - let resultBranches; - if (l.ordered) { - if (r.ordered) throwParseError(writeOrderedIntersectionMessage(l.expression, r.expression)); - resultBranches = intersectBranches(r.branches, l.branches, ctx); - if (resultBranches instanceof Disjoint) resultBranches.invert(); - } else resultBranches = intersectBranches(l.branches, r.branches, ctx); - if (resultBranches instanceof Disjoint) return resultBranches; - return ctx.$.parseSchema(l.ordered || r.ordered ? { - branches: resultBranches, - ordered: true - } : { branches: resultBranches }); - }, - ...defineRightwardIntersections("union", (l, r, ctx) => { - const branches = intersectBranches(l.branches, [r], ctx); - if (branches instanceof Disjoint) return branches; - if (branches.length === 1) return branches[0]; - return ctx.$.parseSchema(l.ordered ? { - branches, - ordered: true - } : { branches }); - }) - } -}); -var UnionNode = class extends BaseRoot { - isBoolean = this.branches.length === 2 && this.branches[0].hasUnit(false) && this.branches[1].hasUnit(true); - get branchGroups() { - const branchGroups = []; - let firstBooleanIndex = -1; - for (const branch of this.branches) { - if (branch.hasKind("unit") && branch.domain === "boolean") { - if (firstBooleanIndex === -1) { - firstBooleanIndex = branchGroups.length; - branchGroups.push(branch); - } else branchGroups[firstBooleanIndex] = $ark.intrinsic.boolean; - continue; - } - branchGroups.push(branch); - } - return branchGroups; - } - unitBranches = this.branches.filter((n) => n.in.hasKind("unit")); - discriminant = this.discriminate(); - discriminantJson = this.discriminant ? discriminantToJson(this.discriminant) : null; - expression = this.distribute((n) => n.nestableExpression, expressBranches); - createBranchedOptimisticRootApply() { - return (data, onFail) => { - const optimisticResult = this.traverseOptimistic(data); - if (optimisticResult !== unset) return optimisticResult; - const ctx = new Traversal(data, this.$.resolvedConfig); - this.traverseApply(data, ctx); - return ctx.finalize(onFail); - }; - } - get shallowMorphs() { - return this.branches.reduce((morphs, branch) => appendUnique(morphs, branch.shallowMorphs), []); - } - get defaultShortDescription() { - return this.distribute((branch) => branch.defaultShortDescription, describeBranches); - } - innerToJsonSchema(ctx) { - if (this.branchGroups.length === 1 && this.branchGroups[0].equals($ark.intrinsic.boolean)) return { type: "boolean" }; - const jsonSchemaBranches = this.branchGroups.map((group) => group.toJsonSchemaRecurse(ctx)); - if (jsonSchemaBranches.every((branch) => Object.keys(branch).length === 1 && hasKey(branch, "const"))) return { enum: jsonSchemaBranches.map((branch) => branch.const) }; - return { anyOf: jsonSchemaBranches }; - } - traverseAllows = (data, ctx) => this.branches.some((b) => b.traverseAllows(data, ctx)); - traverseApply = (data, ctx) => { - const errors = []; - for (let i = 0; i < this.branches.length; i++) { - ctx.pushBranch(); - this.branches[i].traverseApply(data, ctx); - if (!ctx.hasError()) { - if (this.branches[i].includesTransform) return ctx.queuedMorphs.push(...ctx.popBranch().queuedMorphs); - return ctx.popBranch(); - } - errors.push(ctx.popBranch().error); - } - ctx.errorFromNodeContext({ - code: "union", - errors, - meta: this.meta - }); - }; - traverseOptimistic = (data) => { - for (let i = 0; i < this.branches.length; i++) { - const branch = this.branches[i]; - if (branch.traverseAllows(data)) { - if (branch.contextFreeMorph) return branch.contextFreeMorph(data); - return data; - } - } - return unset; - }; - compile(js) { - if (!this.discriminant || this.unitBranches.length === this.branches.length && this.branches.length === 2) return this.compileIndiscriminable(js); - let condition = this.discriminant.optionallyChainedPropString; - if (this.discriminant.kind === "domain") condition = `typeof ${condition} === "object" ? ${condition} === null ? "null" : "object" : typeof ${condition} === "function" ? "object" : typeof ${condition}`; - const cases = this.discriminant.cases; - const caseKeys = Object.keys(cases); - const { optimistic } = js; - js.optimistic = false; - js.block(`switch(${condition})`, () => { - for (const k in cases) { - const v = cases[k]; - const caseCondition = k === "default" ? k : `case ${k}`; - js.line(`${caseCondition}: return ${v === true ? optimistic ? js.data : v : optimistic ? `${js.invoke(v)} ? ${v.contextFreeMorph ? `${registeredReference(v.contextFreeMorph)}(${js.data})` : js.data} : "${unset}"` : js.invoke(v)}`); - } - return js; - }); - if (js.traversalKind === "Allows") { - js.return(optimistic ? `"${unset}"` : false); - return; - } - const expected = describeBranches(this.discriminant.kind === "domain" ? caseKeys.map((k) => { - const jsTypeOf = k.slice(1, -1); - return jsTypeOf === "function" ? domainDescriptions.object : domainDescriptions[jsTypeOf]; - }) : caseKeys); - const serializedPathSegments = this.discriminant.path.map((k) => typeof k === "symbol" ? registeredReference(k) : JSON.stringify(k)); - const serializedExpected = JSON.stringify(expected); - const serializedActual = this.discriminant.kind === "domain" ? `${serializedTypeOfDescriptions}[${condition}]` : `${serializedPrintable}(${condition})`; - js.line(`ctx.errorFromNodeContext({ - code: "predicate", - expected: ${serializedExpected}, - actual: ${serializedActual}, - relativePath: [${serializedPathSegments}], - meta: ${this.compiledMeta} -})`); - } - compileIndiscriminable(js) { - if (js.traversalKind === "Apply") { - js.const("errors", "[]"); - for (const branch of this.branches) js.line("ctx.pushBranch()").line(js.invoke(branch)).if("!ctx.hasError()", () => js.return(branch.includesTransform ? "ctx.queuedMorphs.push(...ctx.popBranch().queuedMorphs)" : "ctx.popBranch()")).line("errors.push(ctx.popBranch().error)"); - js.line(`ctx.errorFromNodeContext({ code: "union", errors, meta: ${this.compiledMeta} })`); - } else { - const { optimistic } = js; - js.optimistic = false; - for (const branch of this.branches) js.if(`${js.invoke(branch)}`, () => js.return(optimistic ? branch.contextFreeMorph ? `${registeredReference(branch.contextFreeMorph)}(${js.data})` : js.data : true)); - js.return(optimistic ? `"${unset}"` : false); - } - } - get nestableExpression() { - return this.isBoolean ? "boolean" : `(${this.expression})`; - } - discriminate() { - if (this.branches.length < 2 || this.isCyclic) return null; - if (this.unitBranches.length === this.branches.length) { - const cases$1 = flatMorph(this.unitBranches, (i, n) => [`${n.in.serializedValue}`, n.hasKind("morph") ? n : true]); - return { - kind: "unit", - path: [], - optionallyChainedPropString: "data", - cases: cases$1 - }; - } - const candidates = []; - for (let lIndex = 0; lIndex < this.branches.length - 1; lIndex++) { - const l = this.branches[lIndex]; - for (let rIndex = lIndex + 1; rIndex < this.branches.length; rIndex++) { - const r = this.branches[rIndex]; - const result = intersectNodesRoot(l.in, r.in, l.$); - if (!(result instanceof Disjoint)) continue; - for (const entry of result) { - if (!entry.kind || entry.optional) continue; - let lSerialized; - let rSerialized; - if (entry.kind === "domain") { - const lValue = entry.l; - const rValue = entry.r; - lSerialized = `"${typeof lValue === "string" ? lValue : lValue.domain}"`; - rSerialized = `"${typeof rValue === "string" ? rValue : rValue.domain}"`; - } else if (entry.kind === "unit") { - lSerialized = entry.l.serializedValue; - rSerialized = entry.r.serializedValue; - } else continue; - const matching = candidates.find((d) => arrayEquals(d.path, entry.path) && d.kind === entry.kind); - if (!matching) candidates.push({ - kind: entry.kind, - cases: { - [lSerialized]: { - branchIndices: [lIndex], - condition: entry.l - }, - [rSerialized]: { - branchIndices: [rIndex], - condition: entry.r - } - }, - path: entry.path - }); - else { - if (matching.cases[lSerialized]) matching.cases[lSerialized].branchIndices = appendUnique(matching.cases[lSerialized].branchIndices, lIndex); - else matching.cases[lSerialized] ??= { - branchIndices: [lIndex], - condition: entry.l - }; - if (matching.cases[rSerialized]) matching.cases[rSerialized].branchIndices = appendUnique(matching.cases[rSerialized].branchIndices, rIndex); - else matching.cases[rSerialized] ??= { - branchIndices: [rIndex], - condition: entry.r - }; - } - } - } - } - const orderedCandidates = this.ordered ? orderCandidates(candidates, this.branches) : candidates; - if (!orderedCandidates.length) return null; - const ctx = createCaseResolutionContext(orderedCandidates, this); - const cases = {}; - for (const k in ctx.best.cases) { - const resolution = resolveCase(ctx, k); - if (resolution === null) { - cases[k] = true; - continue; - } - if (resolution.length === this.branches.length) return null; - if (this.ordered) resolution.sort((l, r) => l.originalIndex - r.originalIndex); - const branches = resolution.map((entry) => entry.branch); - const caseNode = branches.length === 1 ? branches[0] : this.$.node("union", this.ordered ? { - branches, - ordered: true - } : branches); - Object.assign(this.referencesById, caseNode.referencesById); - cases[k] = caseNode; - } - if (ctx.defaultEntries.length) { - const branches = ctx.defaultEntries.map((entry) => entry.branch); - cases.default = this.$.node("union", this.ordered ? { - branches, - ordered: true - } : branches, { prereduced: true }); - Object.assign(this.referencesById, cases.default.referencesById); - } - return Object.assign(ctx.location, { cases }); - } -}; -const createCaseResolutionContext = (orderedCandidates, node$1) => { - const best = orderedCandidates.sort((l, r) => Object.keys(r.cases).length - Object.keys(l.cases).length)[0]; - const location = { - kind: best.kind, - path: best.path, - optionallyChainedPropString: optionallyChainPropString(best.path) - }; - const defaultEntries = node$1.branches.map((branch, originalIndex) => ({ - originalIndex, - branch - })); - return { - best, - location, - defaultEntries, - node: node$1 - }; -}; -const resolveCase = (ctx, key) => { - const caseCtx = ctx.best.cases[key]; - const discriminantNode = discriminantCaseToNode(caseCtx.condition, ctx.location.path, ctx.node.$); - let resolvedEntries = []; - const nextDefaults = []; - for (let i = 0; i < ctx.defaultEntries.length; i++) { - const entry = ctx.defaultEntries[i]; - if (caseCtx.branchIndices.includes(entry.originalIndex)) { - const pruned = pruneDiscriminant(ctx.node.branches[entry.originalIndex], ctx.location); - if (pruned === null) resolvedEntries = null; - else resolvedEntries?.push({ - originalIndex: entry.originalIndex, - branch: pruned - }); - } else if (entry.branch.hasKind("alias") && discriminantNode.hasKind("domain") && discriminantNode.domain === "object") resolvedEntries?.push(entry); - else { - if (entry.branch.in.overlaps(discriminantNode)) { - const overlapping = pruneDiscriminant(entry.branch, ctx.location); - resolvedEntries?.push({ - originalIndex: entry.originalIndex, - branch: overlapping - }); - } - nextDefaults.push(entry); - } - } - ctx.defaultEntries = nextDefaults; - return resolvedEntries; -}; -const orderCandidates = (candidates, originalBranches) => { - const viableCandidates = candidates.filter((candidate) => { - const caseGroups = Object.values(candidate.cases).map((caseCtx) => caseCtx.branchIndices); - for (let i = 0; i < caseGroups.length - 1; i++) { - const currentGroup = caseGroups[i]; - for (let j = i + 1; j < caseGroups.length; j++) { - const nextGroup = caseGroups[j]; - for (const currentIndex of currentGroup) for (const nextIndex of nextGroup) if (currentIndex > nextIndex) { - if (originalBranches[currentIndex].overlaps(originalBranches[nextIndex])) return false; - } - } - } - return true; - }); - return viableCandidates; -}; -const discriminantCaseToNode = (caseDiscriminant, path$1, $) => { - let node$1 = caseDiscriminant === "undefined" ? $.node("unit", { unit: void 0 }) : caseDiscriminant === "null" ? $.node("unit", { unit: null }) : caseDiscriminant === "boolean" ? $.units([true, false]) : caseDiscriminant; - for (let i = path$1.length - 1; i >= 0; i--) { - const key = path$1[i]; - node$1 = $.node("intersection", typeof key === "number" ? { - proto: "Array", - sequence: [...range(key).map((_) => ({})), node$1] - } : { - domain: "object", - required: [{ - key, - value: node$1 - }] - }); - } - return node$1; -}; -const optionallyChainPropString = (path$1) => path$1.reduce((acc, k) => acc + compileLiteralPropAccess(k, true), "data"); -const serializedTypeOfDescriptions = registeredReference(jsTypeOfDescriptions); -const serializedPrintable = registeredReference(printable); -const Union = { - implementation: implementation$5, - Node: UnionNode -}; -const discriminantToJson = (discriminant) => ({ - kind: discriminant.kind, - path: discriminant.path.map((k) => typeof k === "string" ? k : compileSerializedValue(k)), - cases: flatMorph(discriminant.cases, (k, node$1) => [k, node$1 === true ? node$1 : node$1.hasKind("union") && node$1.discriminantJson ? node$1.discriminantJson : node$1.json]) -}); -const describeExpressionOptions = { - delimiter: " | ", - finalDelimiter: " | " -}; -const expressBranches = (expressions) => describeBranches(expressions, describeExpressionOptions); -const describeBranches = (descriptions, opts) => { - const delimiter = opts?.delimiter ?? ", "; - const finalDelimiter = opts?.finalDelimiter ?? " or "; - if (descriptions.length === 0) return "never"; - if (descriptions.length === 1) return descriptions[0]; - if (descriptions.length === 2 && descriptions[0] === "false" && descriptions[1] === "true" || descriptions[0] === "true" && descriptions[1] === "false") return "boolean"; - const seen = {}; - const unique = descriptions.filter((s) => seen[s] ? false : seen[s] = true); - const last = unique.pop(); - return `${unique.join(delimiter)}${unique.length ? finalDelimiter : ""}${last}`; -}; -const intersectBranches = (l, r, ctx) => { - const batchesByR = r.map(() => []); - for (let lIndex = 0; lIndex < l.length; lIndex++) { - let candidatesByR = {}; - for (let rIndex = 0; rIndex < r.length; rIndex++) { - if (batchesByR[rIndex] === null) continue; - if (l[lIndex].equals(r[rIndex])) { - batchesByR[rIndex] = null; - candidatesByR = {}; - break; - } - const branchIntersection = intersectOrPipeNodes(l[lIndex], r[rIndex], ctx); - if (branchIntersection instanceof Disjoint) continue; - if (branchIntersection.equals(l[lIndex])) { - batchesByR[rIndex].push(l[lIndex]); - candidatesByR = {}; - break; - } - if (branchIntersection.equals(r[rIndex])) batchesByR[rIndex] = null; - else candidatesByR[rIndex] = branchIntersection; - } - for (const rIndex in candidatesByR) batchesByR[rIndex][lIndex] = candidatesByR[rIndex]; - } - const resultBranches = batchesByR.flatMap((batch, i) => batch?.flatMap((branch) => branch.branches) ?? r[i]); - return resultBranches.length === 0 ? Disjoint.init("union", l, r) : resultBranches; -}; -const reduceBranches = ({ branches, ordered }) => { - if (branches.length < 2) return branches; - const uniquenessByIndex = branches.map(() => true); - for (let i = 0; i < branches.length; i++) for (let j = i + 1; j < branches.length && uniquenessByIndex[i] && uniquenessByIndex[j]; j++) { - if (branches[i].equals(branches[j])) { - uniquenessByIndex[j] = false; - continue; - } - const intersection = intersectNodesRoot(branches[i].in, branches[j].in, branches[0].$); - if (intersection instanceof Disjoint) continue; - if (!ordered) assertDeterminateOverlap(branches[i], branches[j]); - if (intersection.equals(branches[i].in)) uniquenessByIndex[i] = !!ordered; - else if (intersection.equals(branches[j].in)) uniquenessByIndex[j] = false; - } - return branches.filter((_, i) => uniquenessByIndex[i]); -}; -const assertDeterminateOverlap = (l, r) => { - if (!l.includesTransform && !r.includesTransform) return; - if (!arrayEquals(l.shallowMorphs, r.shallowMorphs)) throwParseError(writeIndiscriminableMorphMessage(l.expression, r.expression)); - if (!arrayEquals(l.flatMorphs, r.flatMorphs, { isEqual: (l$1, r$1) => l$1.propString === r$1.propString && (l$1.node.hasKind("morph") && r$1.node.hasKind("morph") ? l$1.node.hasEqualMorphs(r$1.node) : l$1.node.hasKind("intersection") && r$1.node.hasKind("intersection") ? l$1.node.structure?.structuralMorphRef === r$1.node.structure?.structuralMorphRef : false) })) throwParseError(writeIndiscriminableMorphMessage(l.expression, r.expression)); -}; -const pruneDiscriminant = (discriminantBranch, discriminantCtx) => discriminantBranch.transform((nodeKind, inner) => { - if (nodeKind === "domain" || nodeKind === "unit") return null; - return inner; -}, { shouldTransform: (node$1, ctx) => { - const propString = optionallyChainPropString(ctx.path); - if (!discriminantCtx.optionallyChainedPropString.startsWith(propString)) return false; - if (node$1.hasKind("domain") && node$1.domain === "object") return true; - if ((node$1.hasKind("domain") || discriminantCtx.kind === "unit") && propString === discriminantCtx.optionallyChainedPropString) return true; - return node$1.children.length !== 0 && node$1.kind !== "index"; -} }); -const writeIndiscriminableMorphMessage = (lDescription, rDescription) => `An unordered union of a type including a morph and a type with overlapping input is indeterminate: -Left: ${lDescription} -Right: ${rDescription}`; -const writeOrderedIntersectionMessage = (lDescription, rDescription) => `The intersection of two ordered unions is indeterminate: -Left: ${lDescription} -Right: ${rDescription}`; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/roots/unit.js -const implementation$4 = implementNode({ - kind: "unit", - hasAssociatedError: true, - keys: { unit: { - preserveUndefined: true, - serialize: (schema$1) => schema$1 instanceof Date ? schema$1.toISOString() : defaultValueSerializer(schema$1) - } }, - normalize: (schema$1) => schema$1, - defaults: { - description: (node$1) => printable(node$1.unit), - problem: ({ expected, actual }) => `${expected === actual ? `must be reference equal to ${expected} (serialized to the same value)` : `must be ${expected} (was ${actual})`}` - }, - intersections: { - unit: (l, r) => Disjoint.init("unit", l, r), - ...defineRightwardIntersections("unit", (l, r) => { - if (r.allows(l.unit)) return l; - const rBasis = r.hasKind("intersection") ? r.basis : r; - if (rBasis) { - const rDomain = rBasis.hasKind("domain") ? rBasis : $ark.intrinsic.object; - if (l.domain !== rDomain.domain) { - const lDomainDisjointValue = l.domain === "undefined" || l.domain === "null" || l.domain === "boolean" ? l.domain : $ark.intrinsic[l.domain]; - return Disjoint.init("domain", lDomainDisjointValue, rDomain); - } - } - return Disjoint.init("assignability", l, r.hasKind("intersection") ? r.children.find((rConstraint) => !rConstraint.allows(l.unit)) : r); - }) - } -}); -var UnitNode = class extends InternalBasis { - compiledValue = this.json.unit; - serializedValue = typeof this.unit === "string" || this.unit instanceof Date ? JSON.stringify(this.compiledValue) : `${this.compiledValue}`; - compiledCondition = compileEqualityCheck(this.unit, this.serializedValue); - compiledNegation = compileEqualityCheck(this.unit, this.serializedValue, "negated"); - expression = printable(this.unit); - domain = domainOf(this.unit); - get defaultShortDescription() { - return this.domain === "object" ? domainDescriptions.object : this.description; - } - innerToJsonSchema(ctx) { - return this.unit === null ? { type: "null" } : $ark.intrinsic.jsonPrimitive.allows(this.unit) ? { const: this.unit } : ctx.fallback.unit({ - code: "unit", - base: {}, - unit: this.unit - }); - } - traverseAllows = this.unit instanceof Date ? (data) => data instanceof Date && data.toISOString() === this.compiledValue : Number.isNaN(this.unit) ? (data) => Number.isNaN(data) : (data) => data === this.unit; -}; -const Unit = { - implementation: implementation$4, - Node: UnitNode -}; -const compileEqualityCheck = (unit, serializedValue, negated) => { - if (unit instanceof Date) { - const condition = `data instanceof Date && data.toISOString() === ${serializedValue}`; - return negated ? `!(${condition})` : condition; - } - if (Number.isNaN(unit)) return `${negated ? "!" : ""}Number.isNaN(data)`; - return `data ${negated ? "!" : "="}== ${serializedValue}`; -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/structure/index.js -const implementation$3 = implementNode({ - kind: "index", - hasAssociatedError: false, - intersectionIsOpen: true, - keys: { - signature: { - child: true, - parse: (schema$1, ctx) => { - const key = ctx.$.parseSchema(schema$1); - if (!key.extends($ark.intrinsic.key)) return throwParseError(writeInvalidPropertyKeyMessage(key.expression)); - const enumerableBranches = key.branches.filter((b) => b.hasKind("unit")); - if (enumerableBranches.length) return throwParseError(writeEnumerableIndexBranches(enumerableBranches.map((b) => printable(b.unit)))); - return key; - } - }, - value: { - child: true, - parse: (schema$1, ctx) => ctx.$.parseSchema(schema$1) - } - }, - normalize: (schema$1) => schema$1, - defaults: { description: (node$1) => `[${node$1.signature.expression}]: ${node$1.value.description}` }, - intersections: { index: (l, r, ctx) => { - if (l.signature.equals(r.signature)) { - const valueIntersection = intersectOrPipeNodes(l.value, r.value, ctx); - const value$1 = valueIntersection instanceof Disjoint ? $ark.intrinsic.never.internal : valueIntersection; - return ctx.$.node("index", { - signature: l.signature, - value: value$1 - }); - } - if (l.signature.extends(r.signature) && l.value.subsumes(r.value)) return r; - if (r.signature.extends(l.signature) && r.value.subsumes(l.value)) return l; - return null; - } } -}); -var IndexNode = class extends BaseConstraint { - impliedBasis = $ark.intrinsic.object.internal; - expression = `[${this.signature.expression}]: ${this.value.expression}`; - flatRefs = append(this.value.flatRefs.map((ref) => flatRef([this.signature, ...ref.path], ref.node)), flatRef([this.signature], this.value)); - traverseAllows = (data, ctx) => stringAndSymbolicEntriesOf(data).every((entry) => { - if (this.signature.traverseAllows(entry[0], ctx)) return traverseKey(entry[0], () => this.value.traverseAllows(entry[1], ctx), ctx); - return true; - }); - traverseApply = (data, ctx) => { - for (const entry of stringAndSymbolicEntriesOf(data)) if (this.signature.traverseAllows(entry[0], ctx)) traverseKey(entry[0], () => this.value.traverseApply(entry[1], ctx), ctx); - }; - _transform(mapper, ctx) { - ctx.path.push(this.signature); - const result = super._transform(mapper, ctx); - ctx.path.pop(); - return result; - } - compile() {} -}; -const Index = { - implementation: implementation$3, - Node: IndexNode -}; -const writeEnumerableIndexBranches = (keys) => `Index keys ${keys.join(", ")} should be specified as named props.`; -const writeInvalidPropertyKeyMessage = (indexSchema) => `Indexed key definition '${indexSchema}' must be a string or symbol`; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/structure/required.js -const implementation$2 = implementNode({ - kind: "required", - hasAssociatedError: true, - intersectionIsOpen: true, - keys: { - key: {}, - value: { - child: true, - parse: (schema$1, ctx) => ctx.$.parseSchema(schema$1) - } - }, - normalize: (schema$1) => schema$1, - defaults: { - description: (node$1) => `${node$1.compiledKey}: ${node$1.value.description}`, - expected: (ctx) => ctx.missingValueDescription, - actual: () => "missing" - }, - intersections: { - required: intersectProps, - optional: intersectProps - } -}); -var RequiredNode = class extends BaseProp { - expression = `${this.compiledKey}: ${this.value.expression}`; - errorContext = Object.freeze({ - code: "required", - missingValueDescription: this.value.defaultShortDescription, - relativePath: [this.key], - meta: this.meta - }); - compiledErrorContext = compileObjectLiteral(this.errorContext); -}; -const Required$1 = { - implementation: implementation$2, - Node: RequiredNode -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/structure/sequence.js -const implementation$1 = implementNode({ - kind: "sequence", - hasAssociatedError: false, - collapsibleKey: "variadic", - keys: { - prefix: { - child: true, - parse: (schema$1, ctx) => { - if (schema$1.length === 0) return void 0; - return schema$1.map((element) => ctx.$.parseSchema(element)); - } - }, - optionals: { - child: true, - parse: (schema$1, ctx) => { - if (schema$1.length === 0) return void 0; - return schema$1.map((element) => ctx.$.parseSchema(element)); - } - }, - defaultables: { - child: (defaultables) => defaultables.map((element) => element[0]), - parse: (defaultables, ctx) => { - if (defaultables.length === 0) return void 0; - return defaultables.map((element) => { - const node$1 = ctx.$.parseSchema(element[0]); - assertDefaultValueAssignability(node$1, element[1], null); - return [node$1, element[1]]; - }); - }, - serialize: (defaults) => defaults.map((element) => [element[0].collapsibleJson, defaultValueSerializer(element[1])]) - }, - variadic: { - child: true, - parse: (schema$1, ctx) => ctx.$.parseSchema(schema$1, ctx) - }, - minVariadicLength: { parse: (min) => min === 0 ? void 0 : min }, - postfix: { - child: true, - parse: (schema$1, ctx) => { - if (schema$1.length === 0) return void 0; - return schema$1.map((element) => ctx.$.parseSchema(element)); - } - } - }, - normalize: (schema$1) => { - if (typeof schema$1 === "string") return { variadic: schema$1 }; - if ("variadic" in schema$1 || "prefix" in schema$1 || "defaultables" in schema$1 || "optionals" in schema$1 || "postfix" in schema$1 || "minVariadicLength" in schema$1) { - if (schema$1.postfix?.length) { - if (!schema$1.variadic) return throwParseError(postfixWithoutVariadicMessage); - if (schema$1.optionals?.length || schema$1.defaultables?.length) return throwParseError(postfixAfterOptionalOrDefaultableMessage); - } - if (schema$1.minVariadicLength && !schema$1.variadic) return throwParseError("minVariadicLength may not be specified without a variadic element"); - return schema$1; - } - return { variadic: schema$1 }; - }, - reduce: (raw, $) => { - let minVariadicLength = raw.minVariadicLength ?? 0; - const prefix = raw.prefix?.slice() ?? []; - const defaultables = raw.defaultables?.slice() ?? []; - const optionals = raw.optionals?.slice() ?? []; - const postfix = raw.postfix?.slice() ?? []; - if (raw.variadic) { - while (optionals.at(-1)?.equals(raw.variadic)) optionals.pop(); - if (optionals.length === 0 && defaultables.length === 0) while (prefix.at(-1)?.equals(raw.variadic)) { - prefix.pop(); - minVariadicLength++; - } - while (postfix[0]?.equals(raw.variadic)) { - postfix.shift(); - minVariadicLength++; - } - } else if (optionals.length === 0 && defaultables.length === 0) prefix.push(...postfix.splice(0)); - if (minVariadicLength !== raw.minVariadicLength || raw.prefix && raw.prefix.length !== prefix.length) return $.node("sequence", { - ...raw, - prefix, - defaultables, - optionals, - postfix, - minVariadicLength - }, { prereduced: true }); - }, - defaults: { description: (node$1) => { - if (node$1.isVariadicOnly) return `${node$1.variadic.nestableExpression}[]`; - const innerDescription = node$1.tuple.map((element) => element.kind === "defaultables" ? `${element.node.nestableExpression} = ${printable(element.default)}` : element.kind === "optionals" ? `${element.node.nestableExpression}?` : element.kind === "variadic" ? `...${element.node.nestableExpression}[]` : element.node.expression).join(", "); - return `[${innerDescription}]`; - } }, - intersections: { sequence: (l, r, ctx) => { - const rootState = _intersectSequences({ - l: l.tuple, - r: r.tuple, - disjoint: new Disjoint(), - result: [], - fixedVariants: [], - ctx - }); - const viableBranches = rootState.disjoint.length === 0 ? [rootState, ...rootState.fixedVariants] : rootState.fixedVariants; - return viableBranches.length === 0 ? rootState.disjoint : viableBranches.length === 1 ? ctx.$.node("sequence", sequenceTupleToInner(viableBranches[0].result)) : ctx.$.node("union", viableBranches.map((state) => ({ - proto: Array, - sequence: sequenceTupleToInner(state.result) - }))); - } } -}); -var SequenceNode = class extends BaseConstraint { - impliedBasis = $ark.intrinsic.Array.internal; - tuple = sequenceInnerToTuple(this.inner); - prefixLength = this.prefix?.length ?? 0; - defaultablesLength = this.defaultables?.length ?? 0; - optionalsLength = this.optionals?.length ?? 0; - postfixLength = this.postfix?.length ?? 0; - defaultablesAndOptionals = []; - prevariadic = this.tuple.filter((el) => { - if (el.kind === "defaultables" || el.kind === "optionals") { - this.defaultablesAndOptionals.push(el.node); - return true; - } - return el.kind === "prefix"; - }); - variadicOrPostfix = conflatenate(this.variadic && [this.variadic], this.postfix); - flatRefs = this.addFlatRefs(); - addFlatRefs() { - appendUniqueFlatRefs(this.flatRefs, this.prevariadic.flatMap((element, i) => append(element.node.flatRefs.map((ref) => flatRef([`${i}`, ...ref.path], ref.node)), flatRef([`${i}`], element.node)))); - appendUniqueFlatRefs(this.flatRefs, this.variadicOrPostfix.flatMap((element) => append(element.flatRefs.map((ref) => flatRef([$ark.intrinsic.nonNegativeIntegerString.internal, ...ref.path], ref.node)), flatRef([$ark.intrinsic.nonNegativeIntegerString.internal], element)))); - return this.flatRefs; - } - isVariadicOnly = this.prevariadic.length + this.postfixLength === 0; - minVariadicLength = this.inner.minVariadicLength ?? 0; - minLength = this.prefixLength + this.minVariadicLength + this.postfixLength; - minLengthNode = this.minLength === 0 ? null : this.$.node("minLength", this.minLength); - maxLength = this.variadic ? null : this.tuple.length; - maxLengthNode = this.maxLength === null ? null : this.$.node("maxLength", this.maxLength); - impliedSiblings = this.minLengthNode ? this.maxLengthNode ? [this.minLengthNode, this.maxLengthNode] : [this.minLengthNode] : this.maxLengthNode ? [this.maxLengthNode] : []; - defaultValueMorphs = getDefaultableMorphs(this); - defaultValueMorphsReference = this.defaultValueMorphs.length ? registeredReference(this.defaultValueMorphs) : void 0; - elementAtIndex(data, index) { - if (index < this.prevariadic.length) return this.tuple[index]; - const firstPostfixIndex = data.length - this.postfixLength; - if (index >= firstPostfixIndex) return { - kind: "postfix", - node: this.postfix[index - firstPostfixIndex] - }; - return { - kind: "variadic", - node: this.variadic ?? throwInternalError(`Unexpected attempt to access index ${index} on ${this}`) - }; - } - traverseAllows = (data, ctx) => { - for (let i = 0; i < data.length; i++) if (!this.elementAtIndex(data, i).node.traverseAllows(data[i], ctx)) return false; - return true; - }; - traverseApply = (data, ctx) => { - let i = 0; - for (; i < data.length; i++) traverseKey(i, () => this.elementAtIndex(data, i).node.traverseApply(data[i], ctx), ctx); - }; - get element() { - return this.cacheGetter("element", this.$.node("union", this.children)); - } - compile(js) { - if (this.prefix) for (const [i, node$1] of this.prefix.entries()) js.traverseKey(`${i}`, `data[${i}]`, node$1); - for (const [i, node$1] of this.defaultablesAndOptionals.entries()) { - const dataIndex = `${i + this.prefixLength}`; - js.if(`${dataIndex} >= ${js.data}.length`, () => js.traversalKind === "Allows" ? js.return(true) : js.return()); - js.traverseKey(dataIndex, `data[${dataIndex}]`, node$1); - } - if (this.variadic) { - if (this.postfix) js.const("firstPostfixIndex", `${js.data}.length${this.postfix ? `- ${this.postfix.length}` : ""}`); - js.for(`i < ${this.postfix ? "firstPostfixIndex" : "data.length"}`, () => js.traverseKey("i", "data[i]", this.variadic), this.prevariadic.length); - if (this.postfix) for (const [i, node$1] of this.postfix.entries()) { - const keyExpression = `firstPostfixIndex + ${i}`; - js.traverseKey(keyExpression, `data[${keyExpression}]`, node$1); - } - } - if (js.traversalKind === "Allows") js.return(true); - } - _transform(mapper, ctx) { - ctx.path.push($ark.intrinsic.nonNegativeIntegerString.internal); - const result = super._transform(mapper, ctx); - ctx.path.pop(); - return result; - } - expression = this.description; - reduceJsonSchema(schema$1, ctx) { - if (this.prevariadic.length) schema$1.prefixItems = this.prevariadic.map((el) => { - const valueSchema = el.node.toJsonSchemaRecurse(ctx); - if (el.kind === "defaultables") { - const value$1 = typeof el.default === "function" ? el.default() : el.default; - valueSchema.default = $ark.intrinsic.jsonData.allows(value$1) ? value$1 : ctx.fallback.defaultValue({ - code: "defaultValue", - base: valueSchema, - value: value$1 - }); - } - return valueSchema; - }); - if (this.minLength) schema$1.minItems = this.minLength; - if (this.variadic) { - const variadicSchema = Object.assign(schema$1, { items: this.variadic.toJsonSchemaRecurse(ctx) }); - if (this.maxLength) variadicSchema.maxItems = this.maxLength; - if (this.postfix) { - const elements = this.postfix.map((el) => el.toJsonSchemaRecurse(ctx)); - schema$1 = ctx.fallback.arrayPostfix({ - code: "arrayPostfix", - base: variadicSchema, - elements - }); - } - } else { - schema$1.items = false; - delete schema$1.maxItems; - } - return schema$1; - } -}; -const defaultableMorphsCache$1 = {}; -const getDefaultableMorphs = (node$1) => { - if (!node$1.defaultables) return []; - const morphs = []; - let cacheKey = "["; - const lastDefaultableIndex = node$1.prefixLength + node$1.defaultablesLength - 1; - for (let i = node$1.prefixLength; i <= lastDefaultableIndex; i++) { - const [elementNode, defaultValue] = node$1.defaultables[i - node$1.prefixLength]; - morphs.push(computeDefaultValueMorph(i, elementNode, defaultValue)); - cacheKey += `${i}: ${elementNode.id} = ${defaultValueSerializer(defaultValue)}, `; - } - cacheKey += "]"; - return defaultableMorphsCache$1[cacheKey] ??= morphs; -}; -const Sequence = { - implementation: implementation$1, - Node: SequenceNode -}; -const sequenceInnerToTuple = (inner) => { - const tuple = []; - if (inner.prefix) for (const node$1 of inner.prefix) tuple.push({ - kind: "prefix", - node: node$1 - }); - if (inner.defaultables) for (const [node$1, defaultValue] of inner.defaultables) tuple.push({ - kind: "defaultables", - node: node$1, - default: defaultValue - }); - if (inner.optionals) for (const node$1 of inner.optionals) tuple.push({ - kind: "optionals", - node: node$1 - }); - if (inner.variadic) tuple.push({ - kind: "variadic", - node: inner.variadic - }); - if (inner.postfix) for (const node$1 of inner.postfix) tuple.push({ - kind: "postfix", - node: node$1 - }); - return tuple; -}; -const sequenceTupleToInner = (tuple) => tuple.reduce((result, element) => { - if (element.kind === "variadic") result.variadic = element.node; - else if (element.kind === "defaultables") result.defaultables = append(result.defaultables, [[element.node, element.default]]); - else result[element.kind] = append(result[element.kind], element.node); - return result; -}, {}); -const postfixAfterOptionalOrDefaultableMessage = "A postfix required element cannot follow an optional or defaultable element"; -const postfixWithoutVariadicMessage = "A postfix element requires a variadic element"; -const _intersectSequences = (s) => { - const [lHead, ...lTail] = s.l; - const [rHead, ...rTail] = s.r; - if (!lHead || !rHead) return s; - const lHasPostfix = lTail.at(-1)?.kind === "postfix"; - const rHasPostfix = rTail.at(-1)?.kind === "postfix"; - const kind = lHead.kind === "prefix" || rHead.kind === "prefix" ? "prefix" : lHead.kind === "postfix" || rHead.kind === "postfix" ? "postfix" : lHead.kind === "variadic" && rHead.kind === "variadic" ? "variadic" : lHasPostfix || rHasPostfix ? "prefix" : lHead.kind === "defaultables" || rHead.kind === "defaultables" ? "defaultables" : "optionals"; - if (lHead.kind === "prefix" && rHead.kind === "variadic" && rHasPostfix) { - const postfixBranchResult = _intersectSequences({ - ...s, - fixedVariants: [], - r: rTail.map((element) => ({ - ...element, - kind: "prefix" - })) - }); - if (postfixBranchResult.disjoint.length === 0) s.fixedVariants.push(postfixBranchResult); - } else if (rHead.kind === "prefix" && lHead.kind === "variadic" && lHasPostfix) { - const postfixBranchResult = _intersectSequences({ - ...s, - fixedVariants: [], - l: lTail.map((element) => ({ - ...element, - kind: "prefix" - })) - }); - if (postfixBranchResult.disjoint.length === 0) s.fixedVariants.push(postfixBranchResult); - } - const result = intersectOrPipeNodes(lHead.node, rHead.node, s.ctx); - if (result instanceof Disjoint) if (kind === "prefix" || kind === "postfix") { - s.disjoint.push(...result.withPrefixKey(kind === "prefix" ? s.result.length : `-${lTail.length + 1}`, "required")); - s.result = [...s.result, { - kind, - node: $ark.intrinsic.never.internal - }]; - } else if (kind === "optionals" || kind === "defaultables") return s; - else return _intersectSequences({ - ...s, - fixedVariants: [], - l: lTail.map((element) => ({ - ...element, - kind: "prefix" - })), - r: lTail.map((element) => ({ - ...element, - kind: "prefix" - })) - }); - else if (kind === "defaultables") { - if (lHead.kind === "defaultables" && rHead.kind === "defaultables" && lHead.default !== rHead.default) throwParseError(writeDefaultIntersectionMessage(lHead.default, rHead.default)); - s.result = [...s.result, { - kind, - node: result, - default: lHead.kind === "defaultables" ? lHead.default : rHead.kind === "defaultables" ? rHead.default : throwInternalError(`Unexpected defaultable intersection from ${lHead.kind} and ${rHead.kind} elements.`) - }]; - } else s.result = [...s.result, { - kind, - node: result - }]; - const lRemaining = s.l.length; - const rRemaining = s.r.length; - if (lHead.kind !== "variadic" || lRemaining >= rRemaining && (rHead.kind === "variadic" || rRemaining === 1)) s.l = lTail; - if (rHead.kind !== "variadic" || rRemaining >= lRemaining && (lHead.kind === "variadic" || lRemaining === 1)) s.r = rTail; - return _intersectSequences(s); -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/structure/structure.js -const createStructuralWriter = (childStringProp) => (node$1) => { - if (node$1.props.length || node$1.index) { - const parts = node$1.index?.map((index) => index[childStringProp]) ?? []; - for (const prop of node$1.props) parts.push(prop[childStringProp]); - if (node$1.undeclared) parts.push(`+ (undeclared): ${node$1.undeclared}`); - const objectLiteralDescription = `{ ${parts.join(", ")} }`; - return node$1.sequence ? `${objectLiteralDescription} & ${node$1.sequence.description}` : objectLiteralDescription; - } - return node$1.sequence?.description ?? "{}"; -}; -const structuralDescription = createStructuralWriter("description"); -const structuralExpression = createStructuralWriter("expression"); -const intersectPropsAndIndex = (l, r, $) => { - const kind = l.required ? "required" : "optional"; - if (!r.signature.allows(l.key)) return null; - const value$1 = intersectNodesRoot(l.value, r.value, $); - if (value$1 instanceof Disjoint) return kind === "optional" ? $.node("optional", { - key: l.key, - value: $ark.intrinsic.never.internal - }) : value$1.withPrefixKey(l.key, l.kind); - return null; -}; -const implementation = implementNode({ - kind: "structure", - hasAssociatedError: false, - normalize: (schema$1) => schema$1, - applyConfig: (schema$1, config) => { - if (!schema$1.undeclared && config.onUndeclaredKey !== "ignore") return { - ...schema$1, - undeclared: config.onUndeclaredKey - }; - return schema$1; - }, - keys: { - required: { - child: true, - parse: constraintKeyParser("required"), - reduceIo: (ioKind, inner, nodes) => { - inner.required = append(inner.required, nodes.map((node$1) => node$1[ioKind])); - return; - } - }, - optional: { - child: true, - parse: constraintKeyParser("optional"), - reduceIo: (ioKind, inner, nodes) => { - if (ioKind === "in") { - inner.optional = nodes.map((node$1) => node$1.in); - return; - } - for (const node$1 of nodes) inner[node$1.outProp.kind] = append(inner[node$1.outProp.kind], node$1.outProp.out); - } - }, - index: { - child: true, - parse: constraintKeyParser("index") - }, - sequence: { - child: true, - parse: constraintKeyParser("sequence") - }, - undeclared: { - parse: (behavior) => behavior === "ignore" ? void 0 : behavior, - reduceIo: (ioKind, inner, value$1) => { - if (value$1 !== "delete") return; - if (ioKind === "in") delete inner.undeclared; - else inner.undeclared = "reject"; - } - } - }, - defaults: { description: structuralDescription }, - intersections: { structure: (l, r, ctx) => { - const lInner = { ...l.inner }; - const rInner = { ...r.inner }; - const disjointResult = new Disjoint(); - if (l.undeclared) { - const lKey = l.keyof(); - for (const k of r.requiredKeys) if (!lKey.allows(k)) disjointResult.add("presence", $ark.intrinsic.never.internal, r.propsByKey[k].value, { path: [k] }); - if (rInner.optional) rInner.optional = rInner.optional.filter((n) => lKey.allows(n.key)); - if (rInner.index) rInner.index = rInner.index.flatMap((n) => { - if (n.signature.extends(lKey)) return n; - const indexOverlap = intersectNodesRoot(lKey, n.signature, ctx.$); - if (indexOverlap instanceof Disjoint) return []; - const normalized = normalizeIndex(indexOverlap, n.value, ctx.$); - if (normalized.required) rInner.required = conflatenate(rInner.required, normalized.required); - if (normalized.optional) rInner.optional = conflatenate(rInner.optional, normalized.optional); - return normalized.index ?? []; - }); - } - if (r.undeclared) { - const rKey = r.keyof(); - for (const k of l.requiredKeys) if (!rKey.allows(k)) disjointResult.add("presence", l.propsByKey[k].value, $ark.intrinsic.never.internal, { path: [k] }); - if (lInner.optional) lInner.optional = lInner.optional.filter((n) => rKey.allows(n.key)); - if (lInner.index) lInner.index = lInner.index.flatMap((n) => { - if (n.signature.extends(rKey)) return n; - const indexOverlap = intersectNodesRoot(rKey, n.signature, ctx.$); - if (indexOverlap instanceof Disjoint) return []; - const normalized = normalizeIndex(indexOverlap, n.value, ctx.$); - if (normalized.required) lInner.required = conflatenate(lInner.required, normalized.required); - if (normalized.optional) lInner.optional = conflatenate(lInner.optional, normalized.optional); - return normalized.index ?? []; - }); - } - const baseInner = {}; - if (l.undeclared || r.undeclared) baseInner.undeclared = l.undeclared === "reject" || r.undeclared === "reject" ? "reject" : "delete"; - const childIntersectionResult = intersectConstraints({ - kind: "structure", - baseInner, - l: flattenConstraints(lInner), - r: flattenConstraints(rInner), - roots: [], - ctx - }); - if (childIntersectionResult instanceof Disjoint) disjointResult.push(...childIntersectionResult); - if (disjointResult.length) return disjointResult; - return childIntersectionResult; - } }, - reduce: (inner, $) => { - if (inner.index) { - if (!(inner.required || inner.optional)) return; - let updated = false; - const requiredProps = inner.required ?? []; - const optionalProps = inner.optional ?? []; - const newOptionalProps = [...optionalProps]; - for (const index of inner.index) { - for (const requiredProp of requiredProps) { - const intersection = intersectPropsAndIndex(requiredProp, index, $); - if (intersection instanceof Disjoint) return intersection; - } - for (const [indx, optionalProp] of optionalProps.entries()) { - const intersection = intersectPropsAndIndex(optionalProp, index, $); - if (intersection instanceof Disjoint) return intersection; - if (intersection === null) continue; - newOptionalProps[indx] = intersection; - updated = true; - } - } - if (updated) return $.node("structure", { - ...inner, - optional: newOptionalProps - }, { prereduced: true }); - } - } -}); -var StructureNode = class extends BaseConstraint { - impliedBasis = $ark.intrinsic.object.internal; - impliedSiblings = this.children.flatMap((n) => n.impliedSiblings ?? []); - props = conflatenate(this.required, this.optional); - propsByKey = flatMorph(this.props, (i, node$1) => [node$1.key, node$1]); - propsByKeyReference = registeredReference(this.propsByKey); - expression = structuralExpression(this); - requiredKeys = this.required?.map((node$1) => node$1.key) ?? []; - optionalKeys = this.optional?.map((node$1) => node$1.key) ?? []; - literalKeys = [...this.requiredKeys, ...this.optionalKeys]; - _keyof; - keyof() { - if (this._keyof) return this._keyof; - let branches = this.$.units(this.literalKeys).branches; - if (this.index) for (const { signature } of this.index) branches = branches.concat(signature.branches); - return this._keyof = this.$.node("union", branches); - } - map(flatMapProp) { - return this.$.node("structure", this.props.flatMap(flatMapProp).reduce((structureInner, mapped) => { - const originalProp = this.propsByKey[mapped.key]; - if (isNode(mapped)) { - if (mapped.kind !== "required" && mapped.kind !== "optional") return throwParseError(`Map result must have kind "required" or "optional" (was ${mapped.kind})`); - structureInner[mapped.kind] = append(structureInner[mapped.kind], mapped); - return structureInner; - } - const mappedKind = mapped.kind ?? originalProp?.kind ?? "required"; - const mappedPropInner = flatMorph(mapped, (k, v) => k in Optional.implementation.keys ? [k, v] : []); - structureInner[mappedKind] = append(structureInner[mappedKind], this.$.node(mappedKind, mappedPropInner)); - return structureInner; - }, {})); - } - assertHasKeys(keys) { - const invalidKeys = keys.filter((k) => !typeOrTermExtends(k, this.keyof())); - if (invalidKeys.length) return throwParseError(writeInvalidKeysMessage(this.expression, invalidKeys)); - } - get(indexer, ...path$1) { - let value$1; - let required = false; - const key = indexerToKey(indexer); - if ((typeof key === "string" || typeof key === "symbol") && this.propsByKey[key]) { - value$1 = this.propsByKey[key].value; - required = this.propsByKey[key].required; - } - if (this.index) { - for (const n of this.index) if (typeOrTermExtends(key, n.signature)) value$1 = value$1?.and(n.value) ?? n.value; - } - if (this.sequence && typeOrTermExtends(key, $ark.intrinsic.nonNegativeIntegerString)) if (hasArkKind(key, "root")) { - if (this.sequence.variadic) value$1 = value$1?.and(this.sequence.element) ?? this.sequence.element; - } else { - const index = Number.parseInt(key); - if (index < this.sequence.prevariadic.length) { - const fixedElement = this.sequence.prevariadic[index].node; - value$1 = value$1?.and(fixedElement) ?? fixedElement; - required ||= index < this.sequence.prefixLength; - } else if (this.sequence.variadic) { - const nonFixedElement = this.$.node("union", this.sequence.variadicOrPostfix); - value$1 = value$1?.and(nonFixedElement) ?? nonFixedElement; - } - } - if (!value$1) { - if (this.sequence?.variadic && hasArkKind(key, "root") && key.extends($ark.intrinsic.number)) return throwParseError(writeNumberIndexMessage(key.expression, this.sequence.expression)); - return throwParseError(writeInvalidKeysMessage(this.expression, [key])); - } - const result = value$1.get(...path$1); - return required ? result : result.or($ark.intrinsic.undefined); - } - pick(...keys) { - this.assertHasKeys(keys); - return this.$.node("structure", this.filterKeys("pick", keys)); - } - omit(...keys) { - this.assertHasKeys(keys); - return this.$.node("structure", this.filterKeys("omit", keys)); - } - optionalize() { - const { required,...inner } = this.inner; - return this.$.node("structure", { - ...inner, - optional: this.props.map((prop) => prop.hasKind("required") ? this.$.node("optional", prop.inner) : prop) - }); - } - require() { - const { optional,...inner } = this.inner; - return this.$.node("structure", { - ...inner, - required: this.props.map((prop) => prop.hasKind("optional") ? { - key: prop.key, - value: prop.value - } : prop) - }); - } - merge(r) { - const inner = this.filterKeys("omit", [r.keyof()]); - if (r.required) inner.required = append(inner.required, r.required); - if (r.optional) inner.optional = append(inner.optional, r.optional); - if (r.index) inner.index = append(inner.index, r.index); - if (r.sequence) inner.sequence = r.sequence; - if (r.undeclared) inner.undeclared = r.undeclared; - else delete inner.undeclared; - return this.$.node("structure", inner); - } - filterKeys(operation, keys) { - const result = makeRootAndArrayPropertiesMutable(this.inner); - const shouldKeep = (key) => { - const matchesKey = keys.some((k) => typeOrTermExtends(key, k)); - return operation === "pick" ? matchesKey : !matchesKey; - }; - if (result.required) result.required = result.required.filter((prop) => shouldKeep(prop.key)); - if (result.optional) result.optional = result.optional.filter((prop) => shouldKeep(prop.key)); - if (result.index) result.index = result.index.filter((index) => shouldKeep(index.signature)); - return result; - } - traverseAllows = (data, ctx) => this._traverse("Allows", data, ctx); - traverseApply = (data, ctx) => this._traverse("Apply", data, ctx); - _traverse = (traversalKind, data, ctx) => { - const errorCount = ctx?.currentErrorCount ?? 0; - for (let i = 0; i < this.props.length; i++) if (traversalKind === "Allows") { - if (!this.props[i].traverseAllows(data, ctx)) return false; - } else { - this.props[i].traverseApply(data, ctx); - if (ctx.failFast && ctx.currentErrorCount > errorCount) return false; - } - if (this.sequence) if (traversalKind === "Allows") { - if (!this.sequence.traverseAllows(data, ctx)) return false; - } else { - this.sequence.traverseApply(data, ctx); - if (ctx.failFast && ctx.currentErrorCount > errorCount) return false; - } - if (this.index || this.undeclared === "reject") { - const keys = Object.keys(data); - keys.push(...Object.getOwnPropertySymbols(data)); - for (let i = 0; i < keys.length; i++) { - const k = keys[i]; - if (this.index) { - for (const node$1 of this.index) if (node$1.signature.traverseAllows(k, ctx)) if (traversalKind === "Allows") { - const result = traverseKey(k, () => node$1.value.traverseAllows(data[k], ctx), ctx); - if (!result) return false; - } else { - traverseKey(k, () => node$1.value.traverseApply(data[k], ctx), ctx); - if (ctx.failFast && ctx.currentErrorCount > errorCount) return false; - } - } - if (this.undeclared === "reject" && !this.declaresKey(k)) { - if (traversalKind === "Allows") return false; - ctx.errorFromNodeContext({ - code: "predicate", - expected: "removed", - actual: "", - relativePath: [k], - meta: this.meta - }); - if (ctx.failFast) return false; - } - } - } - if (this.structuralMorph && ctx && !ctx.hasError()) ctx.queueMorphs([this.structuralMorph]); - return true; - }; - get defaultable() { - return this.cacheGetter("defaultable", this.optional?.filter((o) => o.hasDefault()) ?? []); - } - declaresKey = (k) => k in this.propsByKey || this.index?.some((n) => n.signature.allows(k)) || this.sequence !== void 0 && $ark.intrinsic.nonNegativeIntegerString.allows(k); - _compileDeclaresKey(js) { - const parts = []; - if (this.props.length) parts.push(`k in ${this.propsByKeyReference}`); - if (this.index) for (const index of this.index) parts.push(js.invoke(index.signature, { - kind: "Allows", - arg: "k" - })); - if (this.sequence) parts.push("$ark.intrinsic.nonNegativeIntegerString.allows(k)"); - return parts.join(" || ") || "false"; - } - get structuralMorph() { - return this.cacheGetter("structuralMorph", getPossibleMorph(this)); - } - structuralMorphRef = this.structuralMorph && registeredReference(this.structuralMorph); - compile(js) { - if (js.traversalKind === "Apply") js.initializeErrorCount(); - for (const prop of this.props) { - js.check(prop); - if (js.traversalKind === "Apply") js.returnIfFailFast(); - } - if (this.sequence) { - js.check(this.sequence); - if (js.traversalKind === "Apply") js.returnIfFailFast(); - } - if (this.index || this.undeclared === "reject") { - js.const("keys", "Object.keys(data)"); - js.line("keys.push(...Object.getOwnPropertySymbols(data))"); - js.for("i < keys.length", () => this.compileExhaustiveEntry(js)); - } - if (js.traversalKind === "Allows") return js.return(true); - if (this.structuralMorphRef) js.if("ctx && !ctx.hasError()", () => { - js.line(`ctx.queueMorphs([`); - precompileMorphs(js, this); - return js.line("])"); - }); - } - compileExhaustiveEntry(js) { - js.const("k", "keys[i]"); - if (this.index) for (const node$1 of this.index) js.if(`${js.invoke(node$1.signature, { - arg: "k", - kind: "Allows" - })}`, () => js.traverseKey("k", "data[k]", node$1.value)); - if (this.undeclared === "reject") js.if(`!(${this._compileDeclaresKey(js)})`, () => { - if (js.traversalKind === "Allows") return js.return(false); - return js.line(`ctx.errorFromNodeContext({ code: "predicate", expected: "removed", actual: "", relativePath: [k], meta: ${this.compiledMeta} })`).if("ctx.failFast", () => js.return()); - }); - return js; - } - reduceJsonSchema(schema$1, ctx) { - switch (schema$1.type) { - case "object": return this.reduceObjectJsonSchema(schema$1, ctx); - case "array": - const arraySchema = this.sequence?.reduceJsonSchema(schema$1, ctx) ?? schema$1; - if (this.props.length || this.index) return ctx.fallback.arrayObject({ - code: "arrayObject", - base: arraySchema, - object: this.reduceObjectJsonSchema({ type: "object" }, ctx) - }); - return arraySchema; - default: return ToJsonSchema.throwInternalOperandError("structure", schema$1); - } - } - reduceObjectJsonSchema(schema$1, ctx) { - if (this.props.length) { - schema$1.properties = {}; - for (const prop of this.props) { - const valueSchema = prop.value.toJsonSchemaRecurse(ctx); - if (typeof prop.key === "symbol") { - ctx.fallback.symbolKey({ - code: "symbolKey", - base: schema$1, - key: prop.key, - value: valueSchema, - optional: prop.optional - }); - continue; - } - if (prop.hasDefault()) { - const value$1 = typeof prop.default === "function" ? prop.default() : prop.default; - valueSchema.default = $ark.intrinsic.jsonData.allows(value$1) ? value$1 : ctx.fallback.defaultValue({ - code: "defaultValue", - base: valueSchema, - value: value$1 - }); - } - schema$1.properties[prop.key] = valueSchema; - } - if (this.requiredKeys.length && schema$1.properties) schema$1.required = this.requiredKeys.filter((k) => typeof k === "string" && k in schema$1.properties); - } - if (this.index) for (const index of this.index) { - const valueJsonSchema = index.value.toJsonSchemaRecurse(ctx); - if (index.signature.equals($ark.intrinsic.string)) { - schema$1.additionalProperties = valueJsonSchema; - continue; - } - for (const keyBranch of index.signature.branches) { - if (!keyBranch.extends($ark.intrinsic.string)) { - schema$1 = ctx.fallback.symbolKey({ - code: "symbolKey", - base: schema$1, - key: null, - value: valueJsonSchema, - optional: false - }); - continue; - } - let keySchema = { type: "string" }; - if (keyBranch.hasKind("morph")) keySchema = ctx.fallback.morph({ - code: "morph", - base: keyBranch.in.toJsonSchemaRecurse(ctx), - out: keyBranch.out.toJsonSchemaRecurse(ctx) - }); - if (!keyBranch.hasKind("intersection")) return throwInternalError(`Unexpected index branch kind ${keyBranch.kind}.`); - const { pattern } = keyBranch.inner; - if (pattern) { - const keySchemaWithPattern = Object.assign(keySchema, { pattern: pattern[0].rule }); - for (let i = 1; i < pattern.length; i++) keySchema = ctx.fallback.patternIntersection({ - code: "patternIntersection", - base: keySchemaWithPattern, - pattern: pattern[i].rule - }); - schema$1.patternProperties ??= {}; - schema$1.patternProperties[keySchemaWithPattern.pattern] = valueJsonSchema; - } - } - } - if (this.undeclared && !schema$1.additionalProperties) schema$1.additionalProperties = false; - return schema$1; - } -}; -const defaultableMorphsCache = {}; -const constructStructuralMorphCacheKey = (node$1) => { - let cacheKey = ""; - for (let i = 0; i < node$1.defaultable.length; i++) cacheKey += node$1.defaultable[i].defaultValueMorphRef; - if (node$1.sequence?.defaultValueMorphsReference) cacheKey += node$1.sequence?.defaultValueMorphsReference; - if (node$1.undeclared === "delete") { - cacheKey += "delete !("; - if (node$1.required) for (const n of node$1.required) cacheKey += n.compiledKey + " | "; - if (node$1.optional) for (const n of node$1.optional) cacheKey += n.compiledKey + " | "; - if (node$1.index) for (const index of node$1.index) cacheKey += index.signature.id + " | "; - if (node$1.sequence) if (node$1.sequence.maxLength === null) cacheKey += intrinsic.nonNegativeIntegerString.id; - else for (let i = 0; i < node$1.sequence.tuple.length; i++) cacheKey += i + " | "; - cacheKey += ")"; - } - return cacheKey; -}; -const getPossibleMorph = (node$1) => { - const cacheKey = constructStructuralMorphCacheKey(node$1); - if (!cacheKey) return void 0; - if (defaultableMorphsCache[cacheKey]) return defaultableMorphsCache[cacheKey]; - const $arkStructuralMorph = (data, ctx) => { - for (let i = 0; i < node$1.defaultable.length; i++) if (!(node$1.defaultable[i].key in data)) node$1.defaultable[i].defaultValueMorph(data, ctx); - if (node$1.sequence?.defaultables) for (let i = data.length - node$1.sequence.prefixLength; i < node$1.sequence.defaultables.length; i++) node$1.sequence.defaultValueMorphs[i](data, ctx); - if (node$1.undeclared === "delete") { - for (const k in data) if (!node$1.declaresKey(k)) delete data[k]; - } - return data; - }; - return defaultableMorphsCache[cacheKey] = $arkStructuralMorph; -}; -const precompileMorphs = (js, node$1) => { - const requiresContext = node$1.defaultable.some((node$2) => node$2.defaultValueMorph.length === 2) || node$1.sequence?.defaultValueMorphs.some((morph) => morph.length === 2); - const args$1 = `(data${requiresContext ? ", ctx" : ""})`; - return js.block(`${args$1} => `, (js$1) => { - for (let i = 0; i < node$1.defaultable.length; i++) { - const { serializedKey, defaultValueMorphRef } = node$1.defaultable[i]; - js$1.if(`!(${serializedKey} in data)`, (js$2) => js$2.line(`${defaultValueMorphRef}${args$1}`)); - } - if (node$1.sequence?.defaultables) js$1.for(`i < ${node$1.sequence.defaultables.length}`, (js$2) => js$2.set(`data[i]`, 5), `data.length - ${node$1.sequence.prefixLength}`); - if (node$1.undeclared === "delete") js$1.forIn("data", (js$2) => js$2.if(`!(${node$1._compileDeclaresKey(js$2)})`, (js$3) => js$3.line(`delete data[k]`))); - return js$1.return("data"); - }); -}; -const Structure = { - implementation, - Node: StructureNode -}; -const indexerToKey = (indexable) => { - if (hasArkKind(indexable, "root") && indexable.hasKind("unit")) indexable = indexable.unit; - if (typeof indexable === "number") indexable = `${indexable}`; - return indexable; -}; -const writeNumberIndexMessage = (indexExpression, sequenceExpression) => `${indexExpression} is not allowed as an array index on ${sequenceExpression}. Use the 'nonNegativeIntegerString' keyword instead.`; -/** extract enumerable named props from an index signature */ -const normalizeIndex = (signature, value$1, $) => { - const [enumerableBranches, nonEnumerableBranches] = spliterate(signature.branches, (k) => k.hasKind("unit")); - if (!enumerableBranches.length) return { index: $.node("index", { - signature, - value: value$1 - }) }; - const normalized = {}; - for (const n of enumerableBranches) { - const prop = $.node("required", { - key: n.unit, - value: value$1 - }); - normalized[prop.kind] = append(normalized[prop.kind], prop); - } - if (nonEnumerableBranches.length) normalized.index = $.node("index", { - signature: nonEnumerableBranches, - value: value$1 - }); - return normalized; -}; -const typeKeyToString = (k) => hasArkKind(k, "root") ? k.expression : printable(k); -const writeInvalidKeysMessage = (o, keys) => `Key${keys.length === 1 ? "" : "s"} ${keys.map(typeKeyToString).join(", ")} ${keys.length === 1 ? "does" : "do"} not exist on ${o}`; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/kinds.js -const nodeImplementationsByKind = { - ...boundImplementationsByKind, - alias: Alias.implementation, - domain: Domain.implementation, - unit: Unit.implementation, - proto: Proto.implementation, - union: Union.implementation, - morph: Morph.implementation, - intersection: Intersection.implementation, - divisor: Divisor.implementation, - pattern: Pattern.implementation, - predicate: Predicate.implementation, - required: Required$1.implementation, - optional: Optional.implementation, - index: Index.implementation, - sequence: Sequence.implementation, - structure: Structure.implementation -}; -$ark.defaultConfig = withAlphabetizedKeys(Object.assign(flatMorph(nodeImplementationsByKind, (kind, implementation$22) => [kind, implementation$22.defaults]), { - jitless: envHasCsp(), - clone: deepClone, - onUndeclaredKey: "ignore", - exactOptionalPropertyTypes: true, - numberAllowsNaN: false, - dateAllowsInvalid: false, - onFail: null, - keywords: {}, - toJsonSchema: ToJsonSchema.defaultConfig -})); -$ark.resolvedConfig = mergeConfigs($ark.defaultConfig, $ark.config); -const nodeClassesByKind = { - ...boundClassesByKind, - alias: Alias.Node, - domain: Domain.Node, - unit: Unit.Node, - proto: Proto.Node, - union: Union.Node, - morph: Morph.Node, - intersection: Intersection.Node, - divisor: Divisor.Node, - pattern: Pattern.Node, - predicate: Predicate.Node, - required: Required$1.Node, - optional: Optional.Node, - index: Index.Node, - sequence: Sequence.Node, - structure: Structure.Node -}; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/module.js -var RootModule = class extends DynamicBase { - get [arkKind]() { - return "module"; - } -}; -const bindModule = (module, $) => new RootModule(flatMorph(module, (alias, value$1) => [alias, hasArkKind(value$1, "module") ? bindModule(value$1, $) : $.bindReference(value$1)])); - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/scope.js -const schemaBranchesOf = (schema$1) => isArray(schema$1) ? schema$1 : "branches" in schema$1 && isArray(schema$1.branches) ? schema$1.branches : void 0; -const throwMismatchedNodeRootError = (expected, actual) => throwParseError(`Node of kind ${actual} is not valid as a ${expected} definition`); -const writeDuplicateAliasError = (alias) => `#${alias} duplicates public alias ${alias}`; -const scopesByName = {}; -$ark.ambient ??= {}; -let rawUnknownUnion; -const rootScopeFnName = "function $"; -const precompile = (references) => bindPrecompilation(references, precompileReferences(references)); -const bindPrecompilation = (references, precompiler) => { - const precompilation = precompiler.write(rootScopeFnName, 4); - const compiledTraversals = precompiler.compile()(); - for (const node$1 of references) { - if (node$1.precompilation) continue; - node$1.traverseAllows = compiledTraversals[`${node$1.id}Allows`].bind(compiledTraversals); - if (node$1.isRoot() && !node$1.allowsRequiresContext) node$1.allows = node$1.traverseAllows; - node$1.traverseApply = compiledTraversals[`${node$1.id}Apply`].bind(compiledTraversals); - if (compiledTraversals[`${node$1.id}Optimistic`]) node$1.traverseOptimistic = compiledTraversals[`${node$1.id}Optimistic`].bind(compiledTraversals); - node$1.precompilation = precompilation; - } -}; -const precompileReferences = (references) => new CompiledFunction().return(references.reduce((js, node$1) => { - const allowsCompiler = new NodeCompiler({ kind: "Allows" }).indent(); - node$1.compile(allowsCompiler); - const allowsJs = allowsCompiler.write(`${node$1.id}Allows`); - const applyCompiler = new NodeCompiler({ kind: "Apply" }).indent(); - node$1.compile(applyCompiler); - const applyJs = applyCompiler.write(`${node$1.id}Apply`); - const result = `${js}${allowsJs},\n${applyJs},\n`; - if (!node$1.hasKind("union")) return result; - const optimisticCompiler = new NodeCompiler({ - kind: "Allows", - optimistic: true - }).indent(); - node$1.compile(optimisticCompiler); - const optimisticJs = optimisticCompiler.write(`${node$1.id}Optimistic`); - return `${result}${optimisticJs},\n`; -}, "{\n") + "}"); -var BaseScope = class { - config; - resolvedConfig; - name; - get [arkKind]() { - return "scope"; - } - referencesById = {}; - references = []; - resolutions = {}; - exportedNames = []; - aliases = {}; - resolved = false; - nodesByHash = {}; - intrinsic; - constructor(def, config) { - this.config = mergeConfigs($ark.config, config); - this.resolvedConfig = mergeConfigs($ark.resolvedConfig, config); - this.name = this.resolvedConfig.name ?? `anonymousScope${Object.keys(scopesByName).length}`; - if (this.name in scopesByName) throwParseError(`A Scope already named ${this.name} already exists`); - scopesByName[this.name] = this; - const aliasEntries = Object.entries(def).map((entry) => this.preparseOwnAliasEntry(...entry)); - for (const [k, v] of aliasEntries) { - let name = k; - if (k[0] === "#") { - name = k.slice(1); - if (name in this.aliases) throwParseError(writeDuplicateAliasError(name)); - this.aliases[name] = v; - } else { - if (name in this.aliases) throwParseError(writeDuplicateAliasError(k)); - this.aliases[name] = v; - this.exportedNames.push(name); - } - if (!hasArkKind(v, "module") && !hasArkKind(v, "generic") && !isThunk(v)) { - const preparsed = this.preparseOwnDefinitionFormat(v, { alias: name }); - this.resolutions[name] = hasArkKind(preparsed, "root") ? this.bindReference(preparsed) : this.createParseContext(preparsed).id; - } - } - rawUnknownUnion ??= this.node("union", { branches: [ - "string", - "number", - "object", - "bigint", - "symbol", - { unit: true }, - { unit: false }, - { unit: void 0 }, - { unit: null } - ] }, { prereduced: true }); - this.nodesByHash[rawUnknownUnion.hash] = this.node("intersection", {}, { prereduced: true }); - this.intrinsic = $ark.intrinsic ? flatMorph($ark.intrinsic, (k, v) => k.startsWith("json") ? [] : [k, this.bindReference(v)]) : {}; - } - cacheGetter(name, value$1) { - Object.defineProperty(this, name, { value: value$1 }); - return value$1; - } - get internal() { - return this; - } - _json; - get json() { - if (!this._json) this.export(); - return this._json; - } - defineSchema(def) { - return def; - } - generic = (...params) => { - const $ = this; - return (def, possibleHkt) => new GenericRoot(params, possibleHkt ? new LazyGenericBody(def) : def, $, $, possibleHkt ?? null); - }; - units = (values, opts) => { - const uniqueValues = []; - for (const value$1 of values) if (!uniqueValues.includes(value$1)) uniqueValues.push(value$1); - const branches = uniqueValues.map((unit) => this.node("unit", { unit }, opts)); - return this.node("union", branches, { - ...opts, - prereduced: true - }); - }; - lazyResolutions = []; - lazilyResolve(resolve, syntheticAlias) { - const node$1 = this.node("alias", { - reference: syntheticAlias ?? "synthetic", - resolve - }, { prereduced: true }); - if (!this.resolved) this.lazyResolutions.push(node$1); - return node$1; - } - schema = (schema$1, opts) => this.finalize(this.parseSchema(schema$1, opts)); - parseSchema = (schema$1, opts) => this.node(schemaKindOf(schema$1), schema$1, opts); - preparseNode(kinds, schema$1, opts) { - let kind = typeof kinds === "string" ? kinds : schemaKindOf(schema$1, kinds); - if (isNode(schema$1) && schema$1.kind === kind) return schema$1; - if (kind === "alias" && !opts?.prereduced) { - const { reference: reference$1 } = Alias.implementation.normalize(schema$1, this); - if (reference$1.startsWith("$")) { - const resolution = this.resolveRoot(reference$1.slice(1)); - schema$1 = resolution; - kind = resolution.kind; - } - } else if (kind === "union" && hasDomain(schema$1, "object")) { - const branches = schemaBranchesOf(schema$1); - if (branches?.length === 1) { - schema$1 = branches[0]; - kind = schemaKindOf(schema$1); - } - } - if (isNode(schema$1) && schema$1.kind === kind) return schema$1; - const impl = nodeImplementationsByKind[kind]; - const normalizedSchema = impl.normalize?.(schema$1, this) ?? schema$1; - if (isNode(normalizedSchema)) return normalizedSchema.kind === kind ? normalizedSchema : throwMismatchedNodeRootError(kind, normalizedSchema.kind); - return { - ...opts, - $: this, - kind, - def: normalizedSchema, - prefix: opts.alias ?? kind - }; - } - bindReference(reference$1) { - let bound; - if (isNode(reference$1)) bound = reference$1.$ === this ? reference$1 : new reference$1.constructor(reference$1.attachments, this); - else bound = reference$1.$ === this ? reference$1 : new GenericRoot(reference$1.params, reference$1.bodyDef, reference$1.$, this, reference$1.hkt); - if (!this.resolved) Object.assign(this.referencesById, bound.referencesById); - return bound; - } - resolveRoot(name) { - return this.maybeResolveRoot(name) ?? throwParseError(writeUnresolvableMessage(name)); - } - maybeResolveRoot(name) { - const result = this.maybeResolve(name); - if (hasArkKind(result, "generic")) return; - return result; - } - /** If name is a valid reference to a submodule alias, return its resolution */ - maybeResolveSubalias(name) { - return maybeResolveSubalias(this.aliases, name) ?? maybeResolveSubalias(this.ambient, name); - } - get ambient() { - return $ark.ambient; - } - maybeResolve(name) { - const cached$1 = this.resolutions[name]; - if (cached$1) { - if (typeof cached$1 !== "string") return this.bindReference(cached$1); - const v = nodesByRegisteredId[cached$1]; - if (hasArkKind(v, "root")) return this.resolutions[name] = v; - if (hasArkKind(v, "context")) { - if (v.phase === "resolving") return this.node("alias", { reference: `$${name}` }, { prereduced: true }); - if (v.phase === "resolved") return throwInternalError(`Unexpected resolved context for was uncached by its scope: ${printable(v)}`); - v.phase = "resolving"; - const node$1 = this.bindReference(this.parseOwnDefinitionFormat(v.def, v)); - v.phase = "resolved"; - nodesByRegisteredId[node$1.id] = node$1; - nodesByRegisteredId[v.id] = node$1; - return this.resolutions[name] = node$1; - } - return throwInternalError(`Unexpected nodesById entry for ${cached$1}: ${printable(v)}`); - } - let def = this.aliases[name] ?? this.ambient?.[name]; - if (!def) return this.maybeResolveSubalias(name); - def = this.normalizeRootScopeValue(def); - if (hasArkKind(def, "generic")) return this.resolutions[name] = this.bindReference(def); - if (hasArkKind(def, "module")) { - if (!def.root) throwParseError(writeMissingSubmoduleAccessMessage(name)); - return this.resolutions[name] = this.bindReference(def.root); - } - return this.resolutions[name] = this.parse(def, { alias: name }); - } - createParseContext(input) { - const id = input.id ?? registerNodeId(input.prefix); - return nodesByRegisteredId[id] = Object.assign(input, { - [arkKind]: "context", - $: this, - id, - phase: "unresolved" - }); - } - traversal(root) { - return new Traversal(root, this.resolvedConfig); - } - import(...names) { - return new RootModule(flatMorph(this.export(...names), (alias, value$1) => [`#${alias}`, value$1])); - } - precompilation; - _exportedResolutions; - _exports; - export(...names) { - if (!this._exports) { - this._exports = {}; - for (const name of this.exportedNames) { - const def = this.aliases[name]; - this._exports[name] = hasArkKind(def, "module") ? bindModule(def, this) : bootstrapAliasReferences(this.maybeResolve(name)); - } - for (const node$1 of this.lazyResolutions) node$1.resolution; - this._exportedResolutions = resolutionsOfModule(this, this._exports); - this._json = resolutionsToJson(this._exportedResolutions); - Object.assign(this.resolutions, this._exportedResolutions); - this.references = Object.values(this.referencesById); - if (!this.resolvedConfig.jitless) { - const precompiler = precompileReferences(this.references); - this.precompilation = precompiler.write(rootScopeFnName, 4); - bindPrecompilation(this.references, precompiler); - } - this.resolved = true; - } - const namesToExport = names.length ? names : this.exportedNames; - return new RootModule(flatMorph(namesToExport, (_, name) => [name, this._exports[name]])); - } - resolve(name) { - return this.export()[name]; - } - node = (kinds, nodeSchema, opts = {}) => { - const ctxOrNode = this.preparseNode(kinds, nodeSchema, opts); - if (isNode(ctxOrNode)) return this.bindReference(ctxOrNode); - const ctx = this.createParseContext(ctxOrNode); - const node$1 = parseNode(ctx); - const bound = this.bindReference(node$1); - return nodesByRegisteredId[ctx.id] = bound; - }; - parse = (def, opts = {}) => this.finalize(this.parseDefinition(def, opts)); - parseDefinition(def, opts = {}) { - if (hasArkKind(def, "root")) return this.bindReference(def); - const ctxInputOrNode = this.preparseOwnDefinitionFormat(def, opts); - if (hasArkKind(ctxInputOrNode, "root")) return this.bindReference(ctxInputOrNode); - const ctx = this.createParseContext(ctxInputOrNode); - nodesByRegisteredId[ctx.id] = ctx; - let node$1 = this.bindReference(this.parseOwnDefinitionFormat(def, ctx)); - if (node$1.isCyclic) node$1 = withId(node$1, ctx.id); - nodesByRegisteredId[ctx.id] = node$1; - return node$1; - } - finalize(node$1) { - bootstrapAliasReferences(node$1); - if (!node$1.precompilation && !this.resolvedConfig.jitless) precompile(node$1.references); - return node$1; - } -}; -var SchemaScope = class extends BaseScope { - parseOwnDefinitionFormat(def, ctx) { - return parseNode(ctx); - } - preparseOwnDefinitionFormat(schema$1, opts) { - return this.preparseNode(schemaKindOf(schema$1), schema$1, opts); - } - preparseOwnAliasEntry(k, v) { - return [k, v]; - } - normalizeRootScopeValue(v) { - return v; - } -}; -const bootstrapAliasReferences = (resolution) => { - const aliases = resolution.references.filter((node$1) => node$1.hasKind("alias")); - for (const aliasNode of aliases) { - Object.assign(aliasNode.referencesById, aliasNode.resolution.referencesById); - for (const ref of resolution.references) if (aliasNode.id in ref.referencesById) Object.assign(ref.referencesById, aliasNode.referencesById); - } - return resolution; -}; -const resolutionsToJson = (resolutions) => flatMorph(resolutions, (k, v) => [k, hasArkKind(v, "root") || hasArkKind(v, "generic") ? v.json : hasArkKind(v, "module") ? resolutionsToJson(v) : throwInternalError(`Unexpected resolution ${printable(v)}`)]); -const maybeResolveSubalias = (base, name) => { - const dotIndex = name.indexOf("."); - if (dotIndex === -1) return; - const dotPrefix = name.slice(0, dotIndex); - const prefixSchema = base[dotPrefix]; - if (prefixSchema === void 0) return; - if (!hasArkKind(prefixSchema, "module")) return throwParseError(writeNonSubmoduleDotMessage(dotPrefix)); - const subalias = name.slice(dotIndex + 1); - const resolution = prefixSchema[subalias]; - if (resolution === void 0) return maybeResolveSubalias(prefixSchema, subalias); - if (hasArkKind(resolution, "root") || hasArkKind(resolution, "generic")) return resolution; - if (hasArkKind(resolution, "module")) return resolution.root ?? throwParseError(writeMissingSubmoduleAccessMessage(name)); - throwInternalError(`Unexpected resolution for alias '${name}': ${printable(resolution)}`); -}; -const schemaScope = (aliases, config) => new SchemaScope(aliases, config); -const rootSchemaScope = new SchemaScope({}); -const resolutionsOfModule = ($, typeSet) => { - const result = {}; - for (const k in typeSet) { - const v = typeSet[k]; - if (hasArkKind(v, "module")) { - const innerResolutions = resolutionsOfModule($, v); - const prefixedResolutions = flatMorph(innerResolutions, (innerK, innerV) => [`${k}.${innerK}`, innerV]); - Object.assign(result, prefixedResolutions); - } else if (hasArkKind(v, "root") || hasArkKind(v, "generic")) result[k] = v; - else throwInternalError(`Unexpected scope resolution ${printable(v)}`); - } - return result; -}; -const writeUnresolvableMessage = (token) => `'${token}' is unresolvable`; -const writeNonSubmoduleDotMessage = (name) => `'${name}' must reference a module to be accessed using dot syntax`; -const writeMissingSubmoduleAccessMessage = (name) => `Reference to submodule '${name}' must specify an alias`; -rootSchemaScope.export(); -const rootSchema = rootSchemaScope.schema; -const node = rootSchemaScope.node; -const defineSchema = rootSchemaScope.defineSchema; -const genericNode = rootSchemaScope.generic; - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/structure/shared.js -const arrayIndexSource = `^(?:0|[1-9]\\d*)$`; -const arrayIndexMatcher = new RegExp(arrayIndexSource); -const arrayIndexMatcherReference = registeredReference(arrayIndexMatcher); - -//#endregion -//#region node_modules/.pnpm/@ark+schema@0.46.0/node_modules/@ark/schema/out/intrinsic.js -const intrinsicBases = schemaScope({ - bigint: "bigint", - boolean: [{ unit: false }, { unit: true }], - false: { unit: false }, - never: [], - null: { unit: null }, - number: "number", - object: "object", - string: "string", - symbol: "symbol", - true: { unit: true }, - unknown: {}, - undefined: { unit: void 0 }, - Array, - Date -}, { prereducedAliases: true }).export(); -$ark.intrinsic = { ...intrinsicBases }; -const intrinsicRoots = schemaScope({ - integer: { - domain: "number", - divisor: 1 - }, - lengthBoundable: ["string", Array], - key: ["string", "symbol"], - nonNegativeIntegerString: { - domain: "string", - pattern: arrayIndexSource - } -}, { prereducedAliases: true }).export(); -Object.assign($ark.intrinsic, intrinsicRoots); -const intrinsicJson = schemaScope({ - jsonPrimitive: [ - "string", - "number", - { unit: true }, - { unit: false }, - { unit: null } - ], - jsonObject: { - domain: "object", - index: { - signature: "string", - value: "$jsonData" - } - }, - jsonData: ["$jsonPrimitive", "$jsonObject"] -}, { prereducedAliases: true }).export(); -const intrinsic = { - ...intrinsicBases, - ...intrinsicRoots, - ...intrinsicJson, - emptyStructure: node("structure", {}, { prereduced: true }) -}; -$ark.intrinsic = { ...intrinsic }; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/operand/date.js -const isDateLiteral = (value$1) => typeof value$1 === "string" && value$1[0] === "d" && (value$1[1] === "'" || value$1[1] === "\"") && value$1.at(-1) === value$1[1]; -const isValidDate = (d) => d.toString() !== "Invalid Date"; -const extractDateLiteralSource = (literal) => literal.slice(2, -1); -const writeInvalidDateMessage = (source) => `'${source}' could not be parsed by the Date constructor`; -const tryParseDate = (source, errorOnFail) => maybeParseDate(source, errorOnFail); -const maybeParseDate = (source, errorOnFail) => { - const stringParsedDate = new Date(source); - if (isValidDate(stringParsedDate)) return stringParsedDate; - const epochMillis = tryParseNumber(source); - if (epochMillis !== void 0) { - const numberParsedDate = new Date(epochMillis); - if (isValidDate(numberParsedDate)) return numberParsedDate; - } - return errorOnFail ? throwParseError(errorOnFail === true ? writeInvalidDateMessage(source) : errorOnFail) : void 0; -}; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/operand/enclosed.js -const parseEnclosed = (s, enclosing) => { - const enclosed = s.scanner.shiftUntil(untilLookaheadIsClosing[enclosingTokens[enclosing]]); - if (s.scanner.lookahead === "") return s.error(writeUnterminatedEnclosedMessage(enclosed, enclosing)); - s.scanner.shift(); - if (enclosing === "/") { - try { - new RegExp(enclosed); - } catch (e) { - throwParseError(String(e)); - } - s.root = s.ctx.$.node("intersection", { - domain: "string", - pattern: enclosed - }, { prereduced: true }); - } else if (isKeyOf(enclosing, enclosingQuote)) s.root = s.ctx.$.node("unit", { unit: enclosed }); - else { - const date = tryParseDate(enclosed, writeInvalidDateMessage(enclosed)); - s.root = s.ctx.$.node("unit", { - meta: enclosed, - unit: date - }); - } -}; -const enclosingQuote = { - "'": 1, - "\"": 1 -}; -const enclosingChar = { - "/": 1, - "'": 1, - "\"": 1 -}; -const enclosingTokens = { - "d'": "'", - "d\"": "\"", - "'": "'", - "\"": "\"", - "/": "/" -}; -const untilLookaheadIsClosing = { - "'": (scanner) => scanner.lookahead === `'`, - "\"": (scanner) => scanner.lookahead === `"`, - "/": (scanner) => scanner.lookahead === `/` -}; -const enclosingCharDescriptions = { - "\"": "double-quote", - "'": "single-quote", - "/": "forward slash" -}; -const writeUnterminatedEnclosedMessage = (fragment, enclosingStart) => `${enclosingStart}${fragment} requires a closing ${enclosingCharDescriptions[enclosingTokens[enclosingStart]]}`; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/ast/validate.js -const writePrefixedPrivateReferenceMessage = (name) => `Private type references should not include '#'. Use '${name}' instead.`; -const shallowOptionalMessage = "Optional definitions like 'string?' are only valid as properties in an object or tuple"; -const shallowDefaultableMessage = "Defaultable definitions like 'number = 0' are only valid as properties in an object or tuple"; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/reduce/shared.js -const minComparators = { - ">": true, - ">=": true -}; -const maxComparators = { - "<": true, - "<=": true -}; -const invertedComparators = { - "<": ">", - ">": "<", - "<=": ">=", - ">=": "<=", - "==": "==" -}; -const writeUnmatchedGroupCloseMessage = (unscanned) => `Unmatched )${unscanned === "" ? "" : ` before ${unscanned}`}`; -const writeUnclosedGroupMessage = (missingChar) => `Missing ${missingChar}`; -const writeOpenRangeMessage = (min, comparator) => `Left bounds are only valid when paired with right bounds (try ...${comparator}${min})`; -const writeUnpairableComparatorMessage = (comparator) => `Left-bounded expressions must specify their limits using < or <= (was ${comparator})`; -const writeMultipleLeftBoundsMessage = (openLimit, openComparator, limit, comparator) => `An expression may have at most one left bound (parsed ${openLimit}${invertedComparators[openComparator]}, ${limit}${invertedComparators[comparator]})`; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/operand/genericArgs.js -const parseGenericArgs = (name, g, s) => _parseGenericArgs(name, g, s, []); -const _parseGenericArgs = (name, g, s, argNodes) => { - const argState = s.parseUntilFinalizer(); - argNodes.push(argState.root); - if (argState.finalizer === ">") { - if (argNodes.length !== g.params.length) return s.error(writeInvalidGenericArgCountMessage(name, g.names, argNodes.map((arg) => arg.expression))); - return argNodes; - } - if (argState.finalizer === ",") return _parseGenericArgs(name, g, s, argNodes); - return argState.error(writeUnclosedGroupMessage(">")); -}; -const writeInvalidGenericArgCountMessage = (name, params, argDefs) => `${name}<${params.join(", ")}> requires exactly ${params.length} args (got ${argDefs.length}${argDefs.length === 0 ? "" : `: ${argDefs.join(", ")}`})`; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/operand/unenclosed.js -const parseUnenclosed = (s) => { - const token = s.scanner.shiftUntilNextTerminator(); - if (token === "keyof") s.addPrefix("keyof"); - else s.root = unenclosedToNode(s, token); -}; -const parseGenericInstantiation = (name, g, s) => { - s.scanner.shiftUntilNonWhitespace(); - const lookahead = s.scanner.shift(); - if (lookahead !== "<") return s.error(writeInvalidGenericArgCountMessage(name, g.names, [])); - const parsedArgs = parseGenericArgs(name, g, s); - return g(...parsedArgs); -}; -const unenclosedToNode = (s, token) => maybeParseReference(s, token) ?? maybeParseUnenclosedLiteral(s, token) ?? s.error(token === "" ? s.scanner.lookahead === "#" ? writePrefixedPrivateReferenceMessage(s.shiftedByOne().scanner.shiftUntilNextTerminator()) : writeMissingOperandMessage(s) : writeUnresolvableMessage(token)); -const maybeParseReference = (s, token) => { - if (s.ctx.args?.[token]) { - const arg = s.ctx.args[token]; - if (typeof arg !== "string") return arg; - return s.ctx.$.node("alias", { reference: arg }, { prereduced: true }); - } - const resolution = s.ctx.$.maybeResolve(token); - if (hasArkKind(resolution, "root")) return resolution; - if (resolution === void 0) return; - if (hasArkKind(resolution, "generic")) return parseGenericInstantiation(token, resolution, s); - return throwParseError(`Unexpected resolution ${printable(resolution)}`); -}; -const maybeParseUnenclosedLiteral = (s, token) => { - const maybeNumber = tryParseWellFormedNumber(token); - if (maybeNumber !== void 0) return s.ctx.$.node("unit", { unit: maybeNumber }); - const maybeBigint = tryParseWellFormedBigint(token); - if (maybeBigint !== void 0) return s.ctx.$.node("unit", { unit: maybeBigint }); -}; -const writeMissingOperandMessage = (s) => { - const operator = s.previousOperator(); - return operator ? writeMissingRightOperandMessage(operator, s.scanner.unscanned) : writeExpressionExpectedMessage(s.scanner.unscanned); -}; -const writeMissingRightOperandMessage = (token, unscanned = "") => `Token '${token}' requires a right operand${unscanned ? ` before '${unscanned}'` : ""}`; -const writeExpressionExpectedMessage = (unscanned) => `Expected an expression${unscanned ? ` before '${unscanned}'` : ""}`; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/operand/operand.js -const parseOperand = (s) => s.scanner.lookahead === "" ? s.error(writeMissingOperandMessage(s)) : s.scanner.lookahead === "(" ? s.shiftedByOne().reduceGroupOpen() : s.scanner.lookaheadIsIn(enclosingChar) ? parseEnclosed(s, s.scanner.shift()) : s.scanner.lookaheadIsIn(whitespaceChars) ? parseOperand(s.shiftedByOne()) : s.scanner.lookahead === "d" ? s.scanner.nextLookahead in enclosingQuote ? parseEnclosed(s, `${s.scanner.shift()}${s.scanner.shift()}`) : parseUnenclosed(s) : parseUnenclosed(s); - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/scanner.js -var ArkTypeScanner = class ArkTypeScanner extends Scanner { - shiftUntilNextTerminator() { - this.shiftUntilNonWhitespace(); - return this.shiftUntil(() => this.lookahead in ArkTypeScanner.terminatingChars); - } - static terminatingChars = { - "<": 1, - ">": 1, - "=": 1, - "|": 1, - "&": 1, - ")": 1, - "[": 1, - "%": 1, - ",": 1, - ":": 1, - "?": 1, - "#": 1, - ...whitespaceChars - }; - static finalizingLookaheads = { - ">": 1, - ",": 1, - "": 1, - "=": 1, - "?": 1 - }; - static lookaheadIsFinalizing = (lookahead, unscanned) => lookahead === ">" ? unscanned[0] === "=" ? unscanned[1] === "=" : unscanned.trimStart() === "" || isKeyOf(unscanned.trimStart()[0], ArkTypeScanner.terminatingChars) : lookahead === "=" ? unscanned[0] !== "=" : lookahead === "," || lookahead === "?"; -}; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/operator/bounds.js -const parseBound = (s, start) => { - const comparator = shiftComparator(s, start); - if (s.root.hasKind("unit")) { - if (typeof s.root.unit === "number") { - s.reduceLeftBound(s.root.unit, comparator); - s.unsetRoot(); - return; - } - if (s.root.unit instanceof Date) { - const literal = `d'${s.root.description ?? s.root.unit.toISOString()}'`; - s.unsetRoot(); - s.reduceLeftBound(literal, comparator); - return; - } - } - return parseRightBound(s, comparator); -}; -const comparatorStartChars = { - "<": 1, - ">": 1, - "=": 1 -}; -const shiftComparator = (s, start) => s.scanner.lookaheadIs("=") ? `${start}${s.scanner.shift()}` : start; -const getBoundKinds = (comparator, limit, root, boundKind) => { - if (root.extends($ark.intrinsic.number)) { - if (typeof limit !== "number") return throwParseError(writeInvalidLimitMessage(comparator, limit, boundKind)); - return comparator === "==" ? ["min", "max"] : comparator[0] === ">" ? ["min"] : ["max"]; - } - if (root.extends($ark.intrinsic.lengthBoundable)) { - if (typeof limit !== "number") return throwParseError(writeInvalidLimitMessage(comparator, limit, boundKind)); - return comparator === "==" ? ["exactLength"] : comparator[0] === ">" ? ["minLength"] : ["maxLength"]; - } - if (root.extends($ark.intrinsic.Date)) return comparator === "==" ? ["after", "before"] : comparator[0] === ">" ? ["after"] : ["before"]; - return throwParseError(writeUnboundableMessage(root.expression)); -}; -const openLeftBoundToRoot = (leftBound) => ({ - rule: isDateLiteral(leftBound.limit) ? extractDateLiteralSource(leftBound.limit) : leftBound.limit, - exclusive: leftBound.comparator.length === 1 -}); -const parseRightBound = (s, comparator) => { - const previousRoot = s.unsetRoot(); - const previousScannerIndex = s.scanner.location; - s.parseOperand(); - const limitNode = s.unsetRoot(); - const limitToken = s.scanner.sliceChars(previousScannerIndex, s.scanner.location); - s.root = previousRoot; - if (!limitNode.hasKind("unit") || typeof limitNode.unit !== "number" && !(limitNode.unit instanceof Date)) return s.error(writeInvalidLimitMessage(comparator, limitToken, "right")); - const limit = limitNode.unit; - const exclusive = comparator.length === 1; - const boundKinds = getBoundKinds(comparator, typeof limit === "number" ? limit : limitToken, previousRoot, "right"); - for (const kind of boundKinds) s.constrainRoot(kind, comparator === "==" ? { rule: limit } : { - rule: limit, - exclusive - }); - if (!s.branches.leftBound) return; - if (!isKeyOf(comparator, maxComparators)) return s.error(writeUnpairableComparatorMessage(comparator)); - const lowerBoundKind = getBoundKinds(s.branches.leftBound.comparator, s.branches.leftBound.limit, previousRoot, "left"); - s.constrainRoot(lowerBoundKind[0], openLeftBoundToRoot(s.branches.leftBound)); - s.branches.leftBound = null; -}; -const writeInvalidLimitMessage = (comparator, limit, boundKind) => `Comparator ${boundKind === "left" ? invertedComparators[comparator] : comparator} must be ${boundKind === "left" ? "preceded" : "followed"} by a corresponding literal (was ${limit})`; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/operator/brand.js -const parseBrand = (s) => { - s.scanner.shiftUntilNonWhitespace(); - const brandName = s.scanner.shiftUntilNextTerminator(); - s.root = s.root.brand(brandName); -}; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/operator/divisor.js -const parseDivisor = (s) => { - const divisorToken = s.scanner.shiftUntilNextTerminator(); - const divisor = tryParseInteger(divisorToken, { errorOnFail: writeInvalidDivisorMessage(divisorToken) }); - if (divisor === 0) s.error(writeInvalidDivisorMessage(0)); - s.root = s.root.constrain("divisor", divisor); -}; -const writeInvalidDivisorMessage = (divisor) => `% operator must be followed by a non-zero integer literal (was ${divisor})`; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/operator/operator.js -const parseOperator = (s) => { - const lookahead = s.scanner.shift(); - return lookahead === "" ? s.finalize("") : lookahead === "[" ? s.scanner.shift() === "]" ? s.setRoot(s.root.array()) : s.error(incompleteArrayTokenMessage) : lookahead === "|" ? s.scanner.lookahead === ">" ? s.shiftedByOne().pushRootToBranch("|>") : s.pushRootToBranch(lookahead) : lookahead === "&" ? s.pushRootToBranch(lookahead) : lookahead === ")" ? s.finalizeGroup() : ArkTypeScanner.lookaheadIsFinalizing(lookahead, s.scanner.unscanned) ? s.finalize(lookahead) : isKeyOf(lookahead, comparatorStartChars) ? parseBound(s, lookahead) : lookahead === "%" ? parseDivisor(s) : lookahead === "#" ? parseBrand(s) : lookahead in whitespaceChars ? parseOperator(s) : s.error(writeUnexpectedCharacterMessage(lookahead)); -}; -const writeUnexpectedCharacterMessage = (char, shouldBe = "") => `'${char}' is not allowed here${shouldBe && ` (should be ${shouldBe})`}`; -const incompleteArrayTokenMessage = `Missing expected ']'`; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/shift/operator/default.js -const parseDefault = (s) => { - const baseNode = s.unsetRoot(); - s.parseOperand(); - const defaultNode = s.unsetRoot(); - if (!defaultNode.hasKind("unit")) return s.error(writeNonLiteralDefaultMessage(defaultNode.expression)); - const defaultValue = defaultNode.unit instanceof Date ? () => new Date(defaultNode.unit) : defaultNode.unit; - return [ - baseNode, - "=", - defaultValue - ]; -}; -const writeNonLiteralDefaultMessage = (defaultDef) => `Default value '${defaultDef}' must a literal value`; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/string.js -const parseString = (def, ctx) => { - const aliasResolution = ctx.$.maybeResolveRoot(def); - if (aliasResolution) return aliasResolution; - if (def.endsWith("[]")) { - const possibleElementResolution = ctx.$.maybeResolveRoot(def.slice(0, -2)); - if (possibleElementResolution) return possibleElementResolution.array(); - } - const s = new DynamicState(new ArkTypeScanner(def), ctx); - const node$1 = fullStringParse(s); - if (s.finalizer === ">") throwParseError(writeUnexpectedCharacterMessage(">")); - return node$1; -}; -const fullStringParse = (s) => { - s.parseOperand(); - let result = parseUntilFinalizer(s).root; - if (!result) return throwInternalError(`Root was unexpectedly unset after parsing string '${s.scanner.scanned}'`); - if (s.finalizer === "=") result = parseDefault(s); - else if (s.finalizer === "?") result = [result, "?"]; - s.scanner.shiftUntilNonWhitespace(); - if (s.scanner.lookahead) throwParseError(writeUnexpectedCharacterMessage(s.scanner.lookahead)); - return result; -}; -const parseUntilFinalizer = (s) => { - while (s.finalizer === void 0) next(s); - return s; -}; -const next = (s) => s.hasRoot() ? s.parseOperator() : s.parseOperand(); - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/reduce/dynamic.js -var DynamicState = class DynamicState { - root; - branches = { - prefixes: [], - leftBound: null, - intersection: null, - union: null, - pipe: null - }; - finalizer; - groups = []; - scanner; - ctx; - constructor(scanner, ctx) { - this.scanner = scanner; - this.ctx = ctx; - } - error(message) { - return throwParseError(message); - } - hasRoot() { - return this.root !== void 0; - } - setRoot(root) { - this.root = root; - } - unsetRoot() { - const value$1 = this.root; - this.root = void 0; - return value$1; - } - constrainRoot(...args$1) { - this.root = this.root.constrain(args$1[0], args$1[1]); - } - finalize(finalizer) { - if (this.groups.length) return this.error(writeUnclosedGroupMessage(")")); - this.finalizeBranches(); - this.finalizer = finalizer; - } - reduceLeftBound(limit, comparator) { - const invertedComparator = invertedComparators[comparator]; - if (!isKeyOf(invertedComparator, minComparators)) return this.error(writeUnpairableComparatorMessage(comparator)); - if (this.branches.leftBound) return this.error(writeMultipleLeftBoundsMessage(this.branches.leftBound.limit, this.branches.leftBound.comparator, limit, invertedComparator)); - this.branches.leftBound = { - comparator: invertedComparator, - limit - }; - } - finalizeBranches() { - this.assertRangeUnset(); - if (this.branches.pipe) { - this.pushRootToBranch("|>"); - this.root = this.branches.pipe; - return; - } - if (this.branches.union) { - this.pushRootToBranch("|"); - this.root = this.branches.union; - return; - } - if (this.branches.intersection) { - this.pushRootToBranch("&"); - this.root = this.branches.intersection; - return; - } - this.applyPrefixes(); - } - finalizeGroup() { - this.finalizeBranches(); - const topBranchState = this.groups.pop(); - if (!topBranchState) return this.error(writeUnmatchedGroupCloseMessage(this.scanner.unscanned)); - this.branches = topBranchState; - } - addPrefix(prefix) { - this.branches.prefixes.push(prefix); - } - applyPrefixes() { - while (this.branches.prefixes.length) { - const lastPrefix = this.branches.prefixes.pop(); - this.root = lastPrefix === "keyof" ? this.root.keyof() : throwInternalError(`Unexpected prefix '${lastPrefix}'`); - } - } - pushRootToBranch(token) { - this.assertRangeUnset(); - this.applyPrefixes(); - const root = this.root; - this.root = void 0; - this.branches.intersection = this.branches.intersection?.rawAnd(root) ?? root; - if (token === "&") return; - this.branches.union = this.branches.union?.rawOr(this.branches.intersection) ?? this.branches.intersection; - this.branches.intersection = null; - if (token === "|") return; - this.branches.pipe = this.branches.pipe?.rawPipeOnce(this.branches.union) ?? this.branches.union; - this.branches.union = null; - } - parseUntilFinalizer() { - return parseUntilFinalizer(new DynamicState(this.scanner, this.ctx)); - } - parseOperator() { - return parseOperator(this); - } - parseOperand() { - return parseOperand(this); - } - assertRangeUnset() { - if (this.branches.leftBound) return this.error(writeOpenRangeMessage(this.branches.leftBound.limit, this.branches.leftBound.comparator)); - } - reduceGroupOpen() { - this.groups.push(this.branches); - this.branches = { - prefixes: [], - leftBound: null, - union: null, - intersection: null, - pipe: null - }; - } - previousOperator() { - return this.branches.leftBound?.comparator ?? this.branches.prefixes.at(-1) ?? (this.branches.intersection ? "&" : this.branches.union ? "|" : this.branches.pipe ? "|>" : void 0); - } - shiftedByOne() { - this.scanner.shift(); - return this; - } -}; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/generic.js -const emptyGenericParameterMessage = "An empty string is not a valid generic parameter name"; -const parseGenericParamName = (scanner, result, ctx) => { - scanner.shiftUntilNonWhitespace(); - const name = scanner.shiftUntilNextTerminator(); - if (name === "") { - if (scanner.lookahead === "" && result.length) return result; - return throwParseError(emptyGenericParameterMessage); - } - scanner.shiftUntilNonWhitespace(); - return _parseOptionalConstraint(scanner, name, result, ctx); -}; -const extendsToken = "extends "; -const _parseOptionalConstraint = (scanner, name, result, ctx) => { - scanner.shiftUntilNonWhitespace(); - if (scanner.unscanned.startsWith(extendsToken)) scanner.jumpForward(8); - else { - if (scanner.lookahead === ",") scanner.shift(); - result.push(name); - return parseGenericParamName(scanner, result, ctx); - } - const s = parseUntilFinalizer(new DynamicState(scanner, ctx)); - result.push([name, s.root]); - return parseGenericParamName(scanner, result, ctx); -}; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/match.js -var InternalMatchParser = class extends Callable { - $; - constructor($) { - super((...args$1) => new InternalChainedMatchParser($)(...args$1), { bind: $ }); - this.$ = $; - } - in(def) { - return new InternalChainedMatchParser(this.$, def === void 0 ? void 0 : this.$.parse(def)); - } - at(key, cases) { - return new InternalChainedMatchParser(this.$).at(key, cases); - } - case(when, then) { - return new InternalChainedMatchParser(this.$).case(when, then); - } -}; -var InternalChainedMatchParser = class extends Callable { - $; - in; - key; - branches = []; - constructor($, In) { - super((cases) => this.caseEntries(Object.entries(cases).map(([k, v]) => k === "default" ? [k, v] : [this.$.parse(k), v]))); - this.$ = $; - this.in = In; - } - at(key, cases) { - if (this.key) throwParseError(doubleAtMessage); - if (this.branches.length) throwParseError(chainedAtMessage); - this.key = key; - return cases ? this.match(cases) : this; - } - case(def, resolver) { - return this.caseEntry(this.$.parse(def), resolver); - } - caseEntry(node$1, resolver) { - const wrappableNode = this.key ? this.$.parse({ [this.key]: node$1 }) : node$1; - const branch = wrappableNode.pipe(resolver); - this.branches.push(branch); - return this; - } - match(cases) { - return this(cases); - } - strings(cases) { - return this.caseEntries(Object.entries(cases).map(([k, v]) => k === "default" ? [k, v] : [this.$.node("unit", { unit: k }), v])); - } - caseEntries(entries) { - for (let i = 0; i < entries.length; i++) { - const [k, v] = entries[i]; - if (k === "default") { - if (i !== entries.length - 1) throwParseError(`default may only be specified as the last key of a switch definition`); - return this.default(v); - } - if (typeof v !== "function") return throwParseError(`Value for case "${k}" must be a function (was ${domainOf(v)})`); - this.caseEntry(k, v); - } - return this; - } - default(defaultCase) { - if (typeof defaultCase === "function") this.case(intrinsic.unknown, defaultCase); - const schema$1 = { - branches: this.branches, - ordered: true - }; - if (defaultCase === "never" || defaultCase === "assert") schema$1.meta = { onFail: throwOnDefault }; - const cases = this.$.node("union", schema$1); - if (!this.in) return this.$.finalize(cases); - let inputValidatedCases = this.in.pipe(cases); - if (defaultCase === "never" || defaultCase === "assert") inputValidatedCases = inputValidatedCases.configureReferences({ onFail: throwOnDefault }, "self"); - return this.$.finalize(inputValidatedCases); - } -}; -const throwOnDefault = (errors) => errors.throw(); -const chainedAtMessage = `A key matcher must be specified before the first case i.e. match.at('foo') or match.in().at('bar')`; -const doubleAtMessage = `At most one key matcher may be specified per expression`; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/property.js -const parseProperty = (def, ctx) => { - if (isArray(def)) { - if (def[1] === "=") return [ - ctx.$.parseOwnDefinitionFormat(def[0], ctx), - "=", - def[2] - ]; - if (def[1] === "?") return [ctx.$.parseOwnDefinitionFormat(def[0], ctx), "?"]; - } - return parseInnerDefinition(def, ctx); -}; -const invalidOptionalKeyKindMessage = `Only required keys may make their values optional, e.g. { [mySymbol]: ['number', '?'] }`; -const invalidDefaultableKeyKindMessage = `Only required keys may specify default values, e.g. { value: 'number = 0' }`; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/objectLiteral.js -const parseObjectLiteral = (def, ctx) => { - let spread; - const structure = {}; - const defEntries = stringAndSymbolicEntriesOf(def); - for (const [k, v] of defEntries) { - const parsedKey = preparseKey(k); - if (parsedKey.kind === "spread") { - if (!isEmptyObject(structure)) return throwParseError(nonLeadingSpreadError); - const operand = ctx.$.parseOwnDefinitionFormat(v, ctx); - if (operand.equals(intrinsic.object)) continue; - if (!operand.hasKind("intersection") || !operand.basis?.equals(intrinsic.object)) return throwParseError(writeInvalidSpreadTypeMessage(operand.expression)); - spread = operand.structure; - continue; - } - if (parsedKey.kind === "undeclared") { - if (v !== "reject" && v !== "delete" && v !== "ignore") throwParseError(writeInvalidUndeclaredBehaviorMessage(v)); - structure.undeclared = v; - continue; - } - const parsedValue = parseProperty(v, ctx); - const parsedEntryKey = parsedKey; - if (parsedKey.kind === "required") { - if (!isArray(parsedValue)) appendNamedProp(structure, "required", { - key: parsedKey.normalized, - value: parsedValue - }, ctx); - else appendNamedProp(structure, "optional", parsedValue[1] === "=" ? { - key: parsedKey.normalized, - value: parsedValue[0], - default: parsedValue[2] - } : { - key: parsedKey.normalized, - value: parsedValue[0] - }, ctx); - continue; - } - if (isArray(parsedValue)) { - if (parsedValue[1] === "?") throwParseError(invalidOptionalKeyKindMessage); - if (parsedValue[1] === "=") throwParseError(invalidDefaultableKeyKindMessage); - } - if (parsedKey.kind === "optional") { - appendNamedProp(structure, "optional", { - key: parsedKey.normalized, - value: parsedValue - }, ctx); - continue; - } - const signature = ctx.$.parseOwnDefinitionFormat(parsedEntryKey.normalized, ctx); - const normalized = normalizeIndex(signature, parsedValue, ctx.$); - if (normalized.index) structure.index = append(structure.index, normalized.index); - if (normalized.required) structure.required = append(structure.required, normalized.required); - } - const structureNode = ctx.$.node("structure", structure); - return ctx.$.parseSchema({ - domain: "object", - structure: spread?.merge(structureNode) ?? structureNode - }); -}; -const appendNamedProp = (structure, kind, inner, ctx) => { - structure[kind] = append(structure[kind], ctx.$.node(kind, inner)); -}; -const writeInvalidUndeclaredBehaviorMessage = (actual) => `Value of '+' key must be 'reject', 'delete', or 'ignore' (was ${printable(actual)})`; -const nonLeadingSpreadError = "Spread operator may only be used as the first key in an object"; -const preparseKey = (key) => typeof key === "symbol" ? { - kind: "required", - normalized: key -} : key.at(-1) === "?" ? key.at(-2) === escapeChar ? { - kind: "required", - normalized: `${key.slice(0, -2)}?` -} : { - kind: "optional", - normalized: key.slice(0, -1) -} : key[0] === "[" && key.at(-1) === "]" ? { - kind: "index", - normalized: key.slice(1, -1) -} : key[0] === escapeChar && key[1] === "[" && key.at(-1) === "]" ? { - kind: "required", - normalized: key.slice(1) -} : key === "..." ? { kind: "spread" } : key === "+" ? { kind: "undeclared" } : { - kind: "required", - normalized: key === "\\..." ? "..." : key === "\\+" ? "+" : key -}; -const writeInvalidSpreadTypeMessage = (def) => `Spread operand must resolve to an object literal type (was ${def})`; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/tupleExpressions.js -const maybeParseTupleExpression = (def, ctx) => isIndexZeroExpression(def) ? indexZeroParsers[def[0]](def, ctx) : isIndexOneExpression(def) ? indexOneParsers[def[1]](def, ctx) : null; -const parseKeyOfTuple = (def, ctx) => ctx.$.parseOwnDefinitionFormat(def[1], ctx).keyof(); -const parseBranchTuple = (def, ctx) => { - if (def[2] === void 0) return throwParseError(writeMissingRightOperandMessage(def[1], "")); - const l = ctx.$.parseOwnDefinitionFormat(def[0], ctx); - const r = ctx.$.parseOwnDefinitionFormat(def[2], ctx); - if (def[1] === "|") return ctx.$.node("union", { branches: [l, r] }); - const result = def[1] === "&" ? intersectNodesRoot(l, r, ctx.$) : pipeNodesRoot(l, r, ctx.$); - if (result instanceof Disjoint) return result.throw(); - return result; -}; -const parseArrayTuple = (def, ctx) => ctx.$.parseOwnDefinitionFormat(def[0], ctx).array(); -const parseMorphTuple = (def, ctx) => { - if (typeof def[2] !== "function") return throwParseError(writeMalformedFunctionalExpressionMessage("=>", def[2])); - return ctx.$.parseOwnDefinitionFormat(def[0], ctx).pipe(def[2]); -}; -const writeMalformedFunctionalExpressionMessage = (operator, value$1) => `${operator === ":" ? "Narrow" : "Morph"} expression requires a function following '${operator}' (was ${typeof value$1})`; -const parseNarrowTuple = (def, ctx) => { - if (typeof def[2] !== "function") return throwParseError(writeMalformedFunctionalExpressionMessage(":", def[2])); - return ctx.$.parseOwnDefinitionFormat(def[0], ctx).constrain("predicate", def[2]); -}; -const parseAttributeTuple = (def, ctx) => ctx.$.parseOwnDefinitionFormat(def[0], ctx).configureReferences(def[2], "shallow"); -const defineIndexOneParsers = (parsers) => parsers; -const postfixParsers = defineIndexOneParsers({ - "[]": parseArrayTuple, - "?": () => throwParseError(shallowOptionalMessage) -}); -const infixParsers = defineIndexOneParsers({ - "|": parseBranchTuple, - "&": parseBranchTuple, - ":": parseNarrowTuple, - "=>": parseMorphTuple, - "|>": parseBranchTuple, - "@": parseAttributeTuple, - "=": () => throwParseError(shallowDefaultableMessage) -}); -const indexOneParsers = { - ...postfixParsers, - ...infixParsers -}; -const isIndexOneExpression = (def) => indexOneParsers[def[1]] !== void 0; -const defineIndexZeroParsers = (parsers) => parsers; -const indexZeroParsers = defineIndexZeroParsers({ - keyof: parseKeyOfTuple, - instanceof: (def, ctx) => { - if (typeof def[1] !== "function") return throwParseError(writeInvalidConstructorMessage(objectKindOrDomainOf(def[1]))); - const branches = def.slice(1).map((ctor) => typeof ctor === "function" ? ctx.$.node("proto", { proto: ctor }) : throwParseError(writeInvalidConstructorMessage(objectKindOrDomainOf(ctor)))); - return branches.length === 1 ? branches[0] : ctx.$.node("union", { branches }); - }, - "===": (def, ctx) => ctx.$.units(def.slice(1)) -}); -const isIndexZeroExpression = (def) => indexZeroParsers[def[0]] !== void 0; -const writeInvalidConstructorMessage = (actual) => `Expected a constructor following 'instanceof' operator (was ${actual})`; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/tupleLiteral.js -const parseTupleLiteral = (def, ctx) => { - let sequences = [{}]; - let i = 0; - while (i < def.length) { - let spread = false; - if (def[i] === "..." && i < def.length - 1) { - spread = true; - i++; - } - const parsedProperty = parseProperty(def[i], ctx); - const [valueNode, operator, possibleDefaultValue] = !isArray(parsedProperty) ? [parsedProperty] : parsedProperty; - i++; - if (spread) { - if (!valueNode.extends($ark.intrinsic.Array)) return throwParseError(writeNonArraySpreadMessage(valueNode.expression)); - sequences = sequences.flatMap((base) => valueNode.distribute((branch) => appendSpreadBranch(makeRootAndArrayPropertiesMutable(base), branch))); - } else sequences = sequences.map((base) => { - if (operator === "?") return appendOptionalElement(base, valueNode); - if (operator === "=") return appendDefaultableElement(base, valueNode, possibleDefaultValue); - return appendRequiredElement(base, valueNode); - }); - } - return ctx.$.parseSchema(sequences.map((sequence) => isEmptyObject(sequence) ? { - proto: Array, - exactLength: 0 - } : { - proto: Array, - sequence - })); -}; -const appendRequiredElement = (base, element) => { - if (base.defaultables || base.optionals) return throwParseError(base.variadic ? postfixAfterOptionalOrDefaultableMessage : requiredPostOptionalMessage); - if (base.variadic) base.postfix = append(base.postfix, element); - else base.prefix = append(base.prefix, element); - return base; -}; -const appendOptionalElement = (base, element) => { - if (base.variadic) return throwParseError(optionalOrDefaultableAfterVariadicMessage); - base.optionals = append(base.optionals, element); - return base; -}; -const appendDefaultableElement = (base, element, value$1) => { - if (base.variadic) return throwParseError(optionalOrDefaultableAfterVariadicMessage); - if (base.optionals) return throwParseError(defaultablePostOptionalMessage); - base.defaultables = append(base.defaultables, [[element, value$1]]); - return base; -}; -const appendVariadicElement = (base, element) => { - if (base.postfix) throwParseError(multipleVariadicMesage); - if (base.variadic) { - if (!base.variadic.equals(element)) throwParseError(multipleVariadicMesage); - } else base.variadic = element.internal; - return base; -}; -const appendSpreadBranch = (base, branch) => { - const spread = branch.select({ - method: "find", - kind: "sequence" - }); - if (!spread) return appendVariadicElement(base, $ark.intrinsic.unknown); - if (spread.prefix) for (const node$1 of spread.prefix) appendRequiredElement(base, node$1); - if (spread.optionals) for (const node$1 of spread.optionals) appendOptionalElement(base, node$1); - if (spread.variadic) appendVariadicElement(base, spread.variadic); - if (spread.postfix) for (const node$1 of spread.postfix) appendRequiredElement(base, node$1); - return base; -}; -const writeNonArraySpreadMessage = (operand) => `Spread element must be an array (was ${operand})`; -const multipleVariadicMesage = "A tuple may have at most one variadic element"; -const requiredPostOptionalMessage = "A required element may not follow an optional element"; -const optionalOrDefaultableAfterVariadicMessage = "An optional element may not follow a variadic element"; -const defaultablePostOptionalMessage = "A defaultable element may not follow an optional element without a default"; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/parser/definition.js -const parseCache = {}; -const parseInnerDefinition = (def, ctx) => { - if (typeof def === "string") { - if (ctx.args && Object.keys(ctx.args).some((k) => def.includes(k))) return parseString(def, ctx); - const scopeCache = parseCache[ctx.$.name] ??= {}; - return scopeCache[def] ??= parseString(def, ctx); - } - return hasDomain(def, "object") ? parseObject(def, ctx) : throwParseError(writeBadDefinitionTypeMessage(domainOf(def))); -}; -const parseObject = (def, ctx) => { - const objectKind = objectKindOf(def); - switch (objectKind) { - case void 0: - if (hasArkKind(def, "root")) return def; - return parseObjectLiteral(def, ctx); - case "Array": return parseTuple(def, ctx); - case "RegExp": return ctx.$.node("intersection", { - domain: "string", - pattern: def - }, { prereduced: true }); - case "Function": { - const resolvedDef = isThunk(def) ? def() : def; - if (hasArkKind(resolvedDef, "root")) return resolvedDef; - return throwParseError(writeBadDefinitionTypeMessage("Function")); - } - default: return throwParseError(writeBadDefinitionTypeMessage(objectKind ?? printable(def))); - } -}; -const parseTuple = (def, ctx) => maybeParseTupleExpression(def, ctx) ?? parseTupleLiteral(def, ctx); -const writeBadDefinitionTypeMessage = (actual) => `Type definitions must be strings or objects (was ${actual})`; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/type.js -var InternalTypeParser = class extends Callable { - constructor($) { - const attach = Object.assign({ - errors: ArkErrors, - hkt: Hkt, - $, - raw: $.parse, - module: $.constructor.module, - scope: $.constructor.scope, - define: $.define, - match: $.match, - generic: $.generic, - schema: $.schema, - keywords: $.ambient, - unit: $.unit, - enumerated: $.enumerated, - instanceOf: $.instanceOf, - valueOf: $.valueOf, - or: $.or, - and: $.and, - merge: $.merge, - pipe: $.pipe - }, $.ambientAttachments); - super((...args$1) => { - if (args$1.length === 1) return $.parse(args$1[0]); - if (args$1.length === 2 && typeof args$1[0] === "string" && args$1[0][0] === "<" && args$1[0].at(-1) === ">") { - const paramString = args$1[0].slice(1, -1); - const params = $.parseGenericParams(paramString, {}); - return new GenericRoot(params, args$1[1], $, $, null); - } - return $.parse(args$1); - }, { - bind: $, - attach - }); - } -}; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/scope.js -const $arkTypeRegistry = $ark; -var InternalScope = class InternalScope extends BaseScope { - get ambientAttachments() { - if (!$arkTypeRegistry.typeAttachments) return; - return this.cacheGetter("ambientAttachments", flatMorph($arkTypeRegistry.typeAttachments, (k, v) => [k, this.bindReference(v)])); - } - preparseOwnAliasEntry(alias, def) { - const firstParamIndex = alias.indexOf("<"); - if (firstParamIndex === -1) { - if (hasArkKind(def, "module") || hasArkKind(def, "generic")) return [alias, def]; - const qualifiedName = this.name === "ark" ? alias : alias === "root" ? this.name : `${this.name}.${alias}`; - const config = this.resolvedConfig.keywords?.[qualifiedName]; - if (config) def = [ - def, - "@", - config - ]; - return [alias, def]; - } - if (alias.at(-1) !== ">") throwParseError(`'>' must be the last character of a generic declaration in a scope`); - const name = alias.slice(0, firstParamIndex); - const paramString = alias.slice(firstParamIndex + 1, -1); - return [name, () => { - const params = this.parseGenericParams(paramString, { alias: name }); - const generic$1 = parseGeneric(params, def, this); - return generic$1; - }]; - } - parseGenericParams(def, opts) { - return parseGenericParamName(new ArkTypeScanner(def), [], this.createParseContext({ - ...opts, - def, - prefix: "generic" - })); - } - normalizeRootScopeValue(resolution) { - if (isThunk(resolution) && !hasArkKind(resolution, "generic")) return resolution(); - return resolution; - } - preparseOwnDefinitionFormat(def, opts) { - return { - ...opts, - def, - prefix: opts.alias ?? "type" - }; - } - parseOwnDefinitionFormat(def, ctx) { - const isScopeAlias = ctx.alias && ctx.alias in this.aliases; - if (!isScopeAlias && !ctx.args) ctx.args = { this: ctx.id }; - const result = parseInnerDefinition(def, ctx); - if (isArray(result)) { - if (result[1] === "=") return throwParseError(shallowDefaultableMessage); - if (result[1] === "?") return throwParseError(shallowOptionalMessage); - } - return result; - } - unit = (value$1) => this.units([value$1]); - valueOf = (tsEnum) => this.units(enumValues(tsEnum)); - enumerated = (...values) => this.units(values); - instanceOf = (ctor) => this.node("proto", { proto: ctor }, { prereduced: true }); - or = (...defs) => this.schema(defs.map((def) => this.parse(def))); - and = (...defs) => defs.reduce((node$1, def) => node$1.and(this.parse(def)), this.intrinsic.unknown); - merge = (...defs) => defs.reduce((node$1, def) => node$1.merge(this.parse(def)), this.intrinsic.object); - pipe = (...morphs) => this.intrinsic.unknown.pipe(...morphs); - match = new InternalMatchParser(this); - declare = () => ({ type: this.type }); - define(def) { - return def; - } - type = new InternalTypeParser(this); - static scope = (def, config = {}) => new InternalScope(def, config); - static module = (def, config = {}) => this.scope(def, config).export(); -}; -const scope = Object.assign(InternalScope.scope, { define: (def) => def }); -const Scope = InternalScope; - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/keywords/builtins.js -var MergeHkt = class extends Hkt { - description = "merge an object's properties onto another like `Merge(User, { isAdmin: \"true\" })`"; -}; -const Merge = genericNode(["base", intrinsic.object], ["props", intrinsic.object])((args$1) => args$1.base.merge(args$1.props), MergeHkt); -const arkBuiltins = Scope.module({ - Key: intrinsic.key, - Merge -}); - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/keywords/Array.js -var liftFromHkt = class extends Hkt {}; -const liftFrom = genericNode("element")((args$1) => { - const nonArrayElement = args$1.element.exclude(intrinsic.Array); - const lifted = nonArrayElement.array(); - return nonArrayElement.rawOr(lifted).pipe(liftArray).distribute((branch) => branch.assertHasKind("morph").declareOut(lifted), rootSchema); -}, liftFromHkt); -const arkArray = Scope.module({ - root: intrinsic.Array, - readonly: "root", - index: intrinsic.nonNegativeIntegerString, - liftFrom -}, { name: "Array" }); - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/keywords/FormData.js -const value = rootSchema(["string", registry.FileConstructor]); -const parsedFormDataValue = value.rawOr(value.array()); -const parsed = rootSchema({ - meta: "an object representing parsed form data", - domain: "object", - index: { - signature: "string", - value: parsedFormDataValue - } -}); -const arkFormData = Scope.module({ - root: ["instanceof", FormData], - value, - parsed, - parse: rootSchema({ - in: FormData, - morphs: (data) => { - const result = {}; - for (const [k, v] of data) if (k in result) { - const existing = result[k]; - if (typeof existing === "string" || existing instanceof registry.FileConstructor) result[k] = [existing, v]; - else existing.push(v); - } else result[k] = v; - return result; - }, - declaredOut: parsed - }) -}, { name: "FormData" }); - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/keywords/TypedArray.js -const TypedArray = Scope.module({ - Int8: ["instanceof", Int8Array], - Uint8: ["instanceof", Uint8Array], - Uint8Clamped: ["instanceof", Uint8ClampedArray], - Int16: ["instanceof", Int16Array], - Uint16: ["instanceof", Uint16Array], - Int32: ["instanceof", Int32Array], - Uint32: ["instanceof", Uint32Array], - Float32: ["instanceof", Float32Array], - Float64: ["instanceof", Float64Array], - BigInt64: ["instanceof", BigInt64Array], - BigUint64: ["instanceof", BigUint64Array] -}, { name: "TypedArray" }); - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/keywords/constructors.js -const omittedPrototypes = { - Boolean: 1, - Number: 1, - String: 1 -}; -const arkPrototypes = Scope.module({ - ...flatMorph({ - ...ecmascriptConstructors, - ...platformConstructors - }, (k, v) => k in omittedPrototypes ? [] : [k, ["instanceof", v]]), - Array: arkArray, - TypedArray, - FormData: arkFormData -}); - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/keywords/number.js -/** -* As per the ECMA-262 specification: -* A time value supports a slightly smaller range of -8,640,000,000,000,000 to 8,640,000,000,000,000 milliseconds. -* -* @see https://262.ecma-international.org/15.0/index.html#sec-time-values-and-time-range -*/ -const epoch$1 = rootSchema({ - domain: { - domain: "number", - meta: "a number representing a Unix timestamp" - }, - divisor: { - rule: 1, - meta: `an integer representing a Unix timestamp` - }, - min: { - rule: -864e13, - meta: `a Unix timestamp after -8640000000000000` - }, - max: { - rule: 864e13, - meta: "a Unix timestamp before 8640000000000000" - }, - meta: "an integer representing a safe Unix timestamp" -}); -const integer = rootSchema({ - domain: "number", - divisor: 1 -}); -const number = Scope.module({ - root: intrinsic.number, - integer, - epoch: epoch$1, - safe: rootSchema({ - domain: { - domain: "number", - numberAllowsNaN: false - }, - min: Number.MIN_SAFE_INTEGER, - max: Number.MAX_SAFE_INTEGER - }), - NaN: ["===", NaN], - Infinity: ["===", Number.POSITIVE_INFINITY], - NegativeInfinity: ["===", Number.NEGATIVE_INFINITY] -}, { name: "number" }); - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/keywords/string.js -const regexStringNode = (regex$1, description, jsonSchemaFormat) => { - const schema$1 = { - domain: "string", - pattern: { - rule: regex$1.source, - flags: regex$1.flags, - meta: description - } - }; - if (jsonSchemaFormat) schema$1.meta = { format: jsonSchemaFormat }; - return node("intersection", schema$1); -}; -const stringIntegerRoot = regexStringNode(wellFormedIntegerMatcher, "a well-formed integer string"); -const stringInteger = Scope.module({ - root: stringIntegerRoot, - parse: rootSchema({ - in: stringIntegerRoot, - morphs: (s, ctx) => { - const parsed$1 = Number.parseInt(s); - return Number.isSafeInteger(parsed$1) ? parsed$1 : ctx.error("an integer in the range Number.MIN_SAFE_INTEGER to Number.MAX_SAFE_INTEGER"); - }, - declaredOut: intrinsic.integer - }) -}, { name: "string.integer" }); -const hex = regexStringNode(/^[\dA-Fa-f]+$/, "hex characters only"); -const base64 = Scope.module({ - root: regexStringNode(/^(?:[\d+/A-Za-z]{4})*(?:[\d+/A-Za-z]{2}==|[\d+/A-Za-z]{3}=)?$/, "base64-encoded"), - url: regexStringNode(/^(?:[\w-]{4})*(?:[\w-]{2}(?:==|%3D%3D)?|[\w-]{3}(?:=|%3D)?)?$/, "base64url-encoded") -}, { name: "string.base64" }); -const preformattedCapitalize = regexStringNode(/^[A-Z].*$/, "capitalized"); -const capitalize = Scope.module({ - root: rootSchema({ - in: "string", - morphs: (s) => s.charAt(0).toUpperCase() + s.slice(1), - declaredOut: preformattedCapitalize - }), - preformatted: preformattedCapitalize -}, { name: "string.capitalize" }); -const isLuhnValid = (creditCardInput) => { - const sanitized = creditCardInput.replaceAll(/[ -]+/g, ""); - let sum = 0; - let digit; - let tmpNum; - let shouldDouble = false; - for (let i = sanitized.length - 1; i >= 0; i--) { - digit = sanitized.substring(i, i + 1); - tmpNum = Number.parseInt(digit, 10); - if (shouldDouble) { - tmpNum *= 2; - sum += tmpNum >= 10 ? tmpNum % 10 + 1 : tmpNum; - } else sum += tmpNum; - shouldDouble = !shouldDouble; - } - return !!(sum % 10 === 0 ? sanitized : false); -}; -const creditCardMatcher = /^(?:4\d{12}(?:\d{3,6})?|5[1-5]\d{14}|(222[1-9]|22[3-9]\d|2[3-6]\d{2}|27[01]\d|2720)\d{12}|6(?:011|5\d\d)\d{12,15}|3[47]\d{13}|3(?:0[0-5]|[68]\d)\d{11}|(?:2131|1800|35\d{3})\d{11}|6[27]\d{14}|^(81\d{14,17}))$/; -const creditCard = rootSchema({ - domain: "string", - pattern: { - meta: "a credit card number", - rule: creditCardMatcher.source - }, - predicate: { - meta: "a credit card number", - predicate: isLuhnValid - } -}); -const iso8601Matcher = /^([+-]?\d{4}(?!\d{2}\b))((-?)((0[1-9]|1[0-2])(\3([12]\d|0[1-9]|3[01]))?|W([0-4]\d|5[0-3])(-?[1-7])?|(00[1-9]|0[1-9]\d|[12]\d{2}|3([0-5]\d|6[1-6])))(T((([01]\d|2[0-3])((:?)[0-5]\d)?|24:?00)([,.]\d+(?!:))?)?(\17[0-5]\d([,.]\d+)?)?([Zz]|([+-])([01]\d|2[0-3]):?([0-5]\d)?)?)?)?$/; -const isParsableDate = (s) => !Number.isNaN(new Date(s).valueOf()); -const parsableDate = rootSchema({ - domain: "string", - predicate: { - meta: "a parsable date", - predicate: isParsableDate - } -}).assertHasKind("intersection"); -const epochRoot = stringInteger.root.internal.narrow((s, ctx) => { - const n = Number.parseInt(s); - const out = number.epoch(n); - if (out instanceof ArkErrors) { - ctx.errors.merge(out); - return false; - } - return true; -}).configure({ description: "an integer string representing a safe Unix timestamp" }, "self").assertHasKind("intersection"); -const epoch = Scope.module({ - root: epochRoot, - parse: rootSchema({ - in: epochRoot, - morphs: (s) => new Date(s), - declaredOut: intrinsic.Date - }) -}, { name: "string.date.epoch" }); -const isoRoot = regexStringNode(iso8601Matcher, "an ISO 8601 (YYYY-MM-DDTHH:mm:ss.sssZ) date").internal.assertHasKind("intersection"); -const iso = Scope.module({ - root: isoRoot, - parse: rootSchema({ - in: isoRoot, - morphs: (s) => new Date(s), - declaredOut: intrinsic.Date - }) -}, { name: "string.date.iso" }); -const stringDate = Scope.module({ - root: parsableDate, - parse: rootSchema({ - declaredIn: parsableDate, - in: "string", - morphs: (s, ctx) => { - const date = new Date(s); - if (Number.isNaN(date.valueOf())) return ctx.error("a parsable date"); - return date; - }, - declaredOut: intrinsic.Date - }), - iso, - epoch -}, { name: "string.date" }); -const email = regexStringNode(/^[\w%+.-]+@[\d.A-Za-z-]+\.[A-Za-z]{2,}$/, "an email address", "email"); -const ipv4Segment = "(?:[0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])"; -const ipv4Address = `(${ipv4Segment}[.]){3}${ipv4Segment}`; -const ipv4Matcher = /* @__PURE__ */ new RegExp(`^${ipv4Address}$`); -const ipv6Segment = "(?:[0-9a-fA-F]{1,4})"; -const ipv6Matcher = /* @__PURE__ */ new RegExp(`^((?:${ipv6Segment}:){7}(?:${ipv6Segment}|:)|(?:${ipv6Segment}:){6}(?:${ipv4Address}|:${ipv6Segment}|:)|(?:${ipv6Segment}:){5}(?::${ipv4Address}|(:${ipv6Segment}){1,2}|:)|(?:${ipv6Segment}:){4}(?:(:${ipv6Segment}){0,1}:${ipv4Address}|(:${ipv6Segment}){1,3}|:)|(?:${ipv6Segment}:){3}(?:(:${ipv6Segment}){0,2}:${ipv4Address}|(:${ipv6Segment}){1,4}|:)|(?:${ipv6Segment}:){2}(?:(:${ipv6Segment}){0,3}:${ipv4Address}|(:${ipv6Segment}){1,5}|:)|(?:${ipv6Segment}:){1}(?:(:${ipv6Segment}){0,4}:${ipv4Address}|(:${ipv6Segment}){1,6}|:)|(?::((?::${ipv6Segment}){0,5}:${ipv4Address}|(?::${ipv6Segment}){1,7}|:)))(%[0-9a-zA-Z.]{1,})?\$`); -const ip = Scope.module({ - root: [ - "v4 | v6", - "@", - "an IP address" - ], - v4: regexStringNode(ipv4Matcher, "an IPv4 address", "ipv4"), - v6: regexStringNode(ipv6Matcher, "an IPv6 address", "ipv6") -}, { name: "string.ip" }); -const jsonStringDescription = "a JSON string"; -const writeJsonSyntaxErrorProblem = (error) => { - if (!(error instanceof SyntaxError)) throw error; - return `must be ${jsonStringDescription} (${error})`; -}; -const jsonRoot = rootSchema({ - meta: jsonStringDescription, - domain: "string", - predicate: { - meta: jsonStringDescription, - predicate: (s, ctx) => { - try { - JSON.parse(s); - return true; - } catch (e) { - return ctx.reject({ - code: "predicate", - expected: jsonStringDescription, - problem: writeJsonSyntaxErrorProblem(e) - }); - } - } - } -}); -const parseJson = (s, ctx) => { - if (s.length === 0) return ctx.error({ - code: "predicate", - expected: jsonStringDescription, - actual: "empty" - }); - try { - return JSON.parse(s); - } catch (e) { - return ctx.error({ - code: "predicate", - expected: jsonStringDescription, - problem: writeJsonSyntaxErrorProblem(e) - }); - } -}; -const json$1 = Scope.module({ - root: jsonRoot, - parse: rootSchema({ - meta: "safe JSON string parser", - in: "string", - morphs: parseJson, - declaredOut: intrinsic.jsonObject - }) -}, { name: "string.json" }); -const preformattedLower = regexStringNode(/^[a-z]*$/, "only lowercase letters"); -const lower = Scope.module({ - root: rootSchema({ - in: "string", - morphs: (s) => s.toLowerCase(), - declaredOut: preformattedLower - }), - preformatted: preformattedLower -}, { name: "string.lower" }); -const normalizedForms = [ - "NFC", - "NFD", - "NFKC", - "NFKD" -]; -const preformattedNodes = flatMorph(normalizedForms, (i, form) => [form, rootSchema({ - domain: "string", - predicate: (s) => s.normalize(form) === s, - meta: `${form}-normalized unicode` -})]); -const normalizeNodes = flatMorph(normalizedForms, (i, form) => [form, rootSchema({ - in: "string", - morphs: (s) => s.normalize(form), - declaredOut: preformattedNodes[form] -})]); -const NFC = Scope.module({ - root: normalizeNodes.NFC, - preformatted: preformattedNodes.NFC -}, { name: "string.normalize.NFC" }); -const NFD = Scope.module({ - root: normalizeNodes.NFD, - preformatted: preformattedNodes.NFD -}, { name: "string.normalize.NFD" }); -const NFKC = Scope.module({ - root: normalizeNodes.NFKC, - preformatted: preformattedNodes.NFKC -}, { name: "string.normalize.NFKC" }); -const NFKD = Scope.module({ - root: normalizeNodes.NFKD, - preformatted: preformattedNodes.NFKD -}, { name: "string.normalize.NFKD" }); -const normalize = Scope.module({ - root: "NFC", - NFC, - NFD, - NFKC, - NFKD -}, { name: "string.normalize" }); -const numericRoot = regexStringNode(numericStringMatcher, "a well-formed numeric string"); -const stringNumeric = Scope.module({ - root: numericRoot, - parse: rootSchema({ - in: numericRoot, - morphs: (s) => Number.parseFloat(s), - declaredOut: intrinsic.number - }) -}, { name: "string.numeric" }); -const regexPatternDescription = "a regex pattern"; -const regex = rootSchema({ - domain: "string", - predicate: { - meta: regexPatternDescription, - predicate: (s, ctx) => { - try { - new RegExp(s); - return true; - } catch (e) { - return ctx.reject({ - code: "predicate", - expected: regexPatternDescription, - problem: String(e) - }); - } - } - }, - meta: { format: "regex" } -}); -const semverMatcher = /^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[A-Za-z-][\dA-Za-z-]*)(?:\.(?:0|[1-9]\d*|\d*[A-Za-z-][\dA-Za-z-]*))*))?(?:\+([\dA-Za-z-]+(?:\.[\dA-Za-z-]+)*))?$/; -const semver = regexStringNode(semverMatcher, "a semantic version (see https://semver.org/)"); -const preformattedTrim = regexStringNode(/^\S.*\S$|^\S?$/, "trimmed"); -const trim = Scope.module({ - root: rootSchema({ - in: "string", - morphs: (s) => s.trim(), - declaredOut: preformattedTrim - }), - preformatted: preformattedTrim -}, { name: "string.trim" }); -const preformattedUpper = regexStringNode(/^[A-Z]*$/, "only uppercase letters"); -const upper = Scope.module({ - root: rootSchema({ - in: "string", - morphs: (s) => s.toUpperCase(), - declaredOut: preformattedUpper - }), - preformatted: preformattedUpper -}, { name: "string.upper" }); -const isParsableUrl = (s) => { - if (URL.canParse) return URL.canParse(s); - try { - new URL(s); - return true; - } catch { - return false; - } -}; -const urlRoot = rootSchema({ - domain: "string", - predicate: { - meta: "a URL string", - predicate: isParsableUrl - }, - meta: { format: "uri" } -}); -const url = Scope.module({ - root: urlRoot, - parse: rootSchema({ - declaredIn: urlRoot, - in: "string", - morphs: (s, ctx) => { - try { - return new URL(s); - } catch { - return ctx.error("a URL string"); - } - }, - declaredOut: rootSchema(URL) - }) -}, { name: "string.url" }); -const uuid = Scope.module({ - root: [ - "versioned | nil | max", - "@", - { - description: "a UUID", - format: "uuid" - } - ], - "#nil": "'00000000-0000-0000-0000-000000000000'", - "#max": "'ffffffff-ffff-ffff-ffff-ffffffffffff'", - "#versioned": /[\da-f]{8}-[\da-f]{4}-[1-8][\da-f]{3}-[89ab][\da-f]{3}-[\da-f]{12}/i, - v1: regexStringNode(/^[\da-f]{8}-[\da-f]{4}-1[\da-f]{3}-[89ab][\da-f]{3}-[\da-f]{12}$/i, "a UUIDv1"), - v2: regexStringNode(/^[\da-f]{8}-[\da-f]{4}-2[\da-f]{3}-[89ab][\da-f]{3}-[\da-f]{12}$/i, "a UUIDv2"), - v3: regexStringNode(/^[\da-f]{8}-[\da-f]{4}-3[\da-f]{3}-[89ab][\da-f]{3}-[\da-f]{12}$/i, "a UUIDv3"), - v4: regexStringNode(/^[\da-f]{8}-[\da-f]{4}-4[\da-f]{3}-[89ab][\da-f]{3}-[\da-f]{12}$/i, "a UUIDv4"), - v5: regexStringNode(/^[\da-f]{8}-[\da-f]{4}-5[\da-f]{3}-[89ab][\da-f]{3}-[\da-f]{12}$/i, "a UUIDv5"), - v6: regexStringNode(/^[\da-f]{8}-[\da-f]{4}-6[\da-f]{3}-[89ab][\da-f]{3}-[\da-f]{12}$/i, "a UUIDv6"), - v7: regexStringNode(/^[\da-f]{8}-[\da-f]{4}-7[\da-f]{3}-[89ab][\da-f]{3}-[\da-f]{12}$/i, "a UUIDv7"), - v8: regexStringNode(/^[\da-f]{8}-[\da-f]{4}-8[\da-f]{3}-[89ab][\da-f]{3}-[\da-f]{12}$/i, "a UUIDv8") -}, { name: "string.uuid" }); -const string = Scope.module({ - root: intrinsic.string, - alpha: regexStringNode(/^[A-Za-z]*$/, "only letters"), - alphanumeric: regexStringNode(/^[\dA-Za-z]*$/, "only letters and digits 0-9"), - hex, - base64, - capitalize, - creditCard, - date: stringDate, - digits: regexStringNode(/^\d*$/, "only digits 0-9"), - email, - integer: stringInteger, - ip, - json: json$1, - lower, - normalize, - numeric: stringNumeric, - regex, - semver, - trim, - upper, - url, - uuid -}, { name: "string" }); - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/keywords/ts.js -const arkTsKeywords = Scope.module({ - bigint: intrinsic.bigint, - boolean: intrinsic.boolean, - false: intrinsic.false, - never: intrinsic.never, - null: intrinsic.null, - number: intrinsic.number, - object: intrinsic.object, - string: intrinsic.string, - symbol: intrinsic.symbol, - true: intrinsic.true, - unknown: intrinsic.unknown, - undefined: intrinsic.undefined -}); -const unknown = Scope.module({ - root: intrinsic.unknown, - any: intrinsic.unknown -}, { name: "unknown" }); -const json = Scope.module({ - root: intrinsic.jsonObject, - stringify: node("morph", { - in: intrinsic.jsonObject, - morphs: (data) => JSON.stringify(data), - declaredOut: intrinsic.string - }) -}, { name: "object.json" }); -const object = Scope.module({ - root: intrinsic.object, - json -}, { name: "object" }); -var RecordHkt = class extends Hkt { - description = "instantiate an object from an index signature and corresponding value type like `Record(\"string\", \"number\")`"; -}; -const Record = genericNode(["K", intrinsic.key], "V")((args$1) => ({ - domain: "object", - index: { - signature: args$1.K, - value: args$1.V - } -}), RecordHkt); -var PickHkt = class extends Hkt { - description = "pick a set of properties from an object like `Pick(User, \"name | age\")`"; -}; -const Pick = genericNode(["T", intrinsic.object], ["K", intrinsic.key])((args$1) => args$1.T.pick(args$1.K), PickHkt); -var OmitHkt = class extends Hkt { - description = "omit a set of properties from an object like `Omit(User, \"age\")`"; -}; -const Omit = genericNode(["T", intrinsic.object], ["K", intrinsic.key])((args$1) => args$1.T.omit(args$1.K), OmitHkt); -var PartialHkt = class extends Hkt { - description = "make all named properties of an object optional like `Partial(User)`"; -}; -const Partial = genericNode(["T", intrinsic.object])((args$1) => args$1.T.partial(), PartialHkt); -var RequiredHkt = class extends Hkt { - description = "make all named properties of an object required like `Required(User)`"; -}; -const Required = genericNode(["T", intrinsic.object])((args$1) => args$1.T.required(), RequiredHkt); -var ExcludeHkt = class extends Hkt { - description = "exclude branches of a union like `Exclude(\"boolean\", \"true\")`"; -}; -const Exclude = genericNode("T", "U")((args$1) => args$1.T.exclude(args$1.U), ExcludeHkt); -var ExtractHkt = class extends Hkt { - description = "extract branches of a union like `Extract(\"0 | false | 1\", \"number\")`"; -}; -const Extract = genericNode("T", "U")((args$1) => args$1.T.extract(args$1.U), ExtractHkt); -const arkTsGenerics = Scope.module({ - Exclude, - Extract, - Omit, - Partial, - Pick, - Record, - Required -}); - -//#endregion -//#region node_modules/.pnpm/arktype@2.1.20/node_modules/arktype/out/keywords/keywords.js -const ark = scope({ - ...arkTsKeywords, - ...arkTsGenerics, - ...arkPrototypes, - ...arkBuiltins, - string, - number, - object, - unknown -}, { - prereducedAliases: true, - name: "ark" -}); -const keywords = ark.export(); -Object.assign($arkTypeRegistry.ambient, keywords); -$arkTypeRegistry.typeAttachments = { - string: keywords.string.root, - number: keywords.number.root, - bigint: keywords.bigint, - boolean: keywords.boolean, - symbol: keywords.symbol, - undefined: keywords.undefined, - null: keywords.null, - object: keywords.object.root, - unknown: keywords.unknown.root, - false: keywords.false, - true: keywords.true, - never: keywords.never, - arrayIndex: keywords.Array.index, - Key: keywords.Key, - Record: keywords.Record, - Array: keywords.Array.root, - Date: keywords.Date -}; -const type = Object.assign(ark.type, $arkTypeRegistry.typeAttachments); -const match = ark.match; -const generic = ark.generic; -const schema = ark.schema; -const define = ark.define; -const declare = ark.declare; - -//#endregion -//#region node_modules/.pnpm/find-up-simple@1.0.1/node_modules/find-up-simple/index.js -const toPath = (urlOrPath) => urlOrPath instanceof URL ? fileURLToPath(urlOrPath) : urlOrPath; -async function findUp(name, { cwd = process$1.cwd(), type: type$1 = "file", stopAt } = {}) { - let directory = path.resolve(toPath(cwd) ?? ""); - const { root } = path.parse(directory); - stopAt = path.resolve(directory, toPath(stopAt ?? root)); - const isAbsoluteName = path.isAbsolute(name); - while (directory) { - const filePath = isAbsoluteName ? name : path.join(directory, name); - try { - const stats = await fsPromises.stat(filePath); - if (type$1 === "file" && stats.isFile() || type$1 === "directory" && stats.isDirectory()) return filePath; - } catch {} - if (directory === stopAt || directory === root) break; - directory = path.dirname(directory); - } -} - -//#endregion -//#region src/lib/core/PackageJson.ts -const PackageJson = type({ - name: "string", - version: "string.semver", - widgetName: "string.upper" -}); - -//#endregion -//#region src/build.ts -async function build() { - console.log("Building the project..."); - const result = await readPackageUp(); - if (!result) throw new Error("No package.json found"); - const pkg = PackageJson(result); - if (pkg instanceof type.errors) { - console.error(pkg.summary); - throw new Error("package.json is invalid"); - } - console.dir(pkg); -} -async function readPackageUp() { - const filePath = await findUp("package.json"); - console.log("Found package.json at:", filePath); - if (!filePath) return; - const data = await readFile(filePath, "utf-8"); - try { - return JSON.parse(data); - } catch { - console.error("Failed to parse package.json"); - } -} - -//#endregion -//#region src/constants.ts -const { version } = JSON.parse(readFileSync(new URL("../package.json", import.meta.url)).toString()); -const VERSION = version; - -//#endregion -//#region src/cli.ts -const cli = cac("mpx"); -cli.command("build", "Build the project").action(build); -cli.help(); -cli.version(VERSION); -if (process.argv.length === 2) { - cli.outputHelp(); - process.exit(1); -} -cli.on("command:*", () => { - console.error(`Unknown command: "%s"`, cli.args.join(" ")); - console.error("See 'mpw --help' for a list of available commands."); - process.exit(1); -}); -cli.parse(); - -//#endregion \ No newline at end of file +import "../dist/mpx.js"; diff --git a/packages/mpx/package-lock.json b/packages/mpx/package-lock.json deleted file mode 100644 index 86cf0e03..00000000 --- a/packages/mpx/package-lock.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "name": "@mendix/mpx", - "version": "0.1.0", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "@mendix/mpx", - "version": "0.1.0", - "hasInstallScript": true, - "license": "Apache-2.0", - "devDependencies": {} - } - } -} diff --git a/packages/mpx/package.json b/packages/mpx/package.json index d8ba8a99..3212a9ad 100644 --- a/packages/mpx/package.json +++ b/packages/mpx/package.json @@ -6,7 +6,7 @@ "scripts": { "preinstall": "npx only-allow pnpm", "test": "echo 'test is missing'", - "dev": "premove bin && rolldown -c rolldown.config.ts -w" + "dev": "premove dist && rolldown -c rolldown.config.ts -w" }, "keywords": [ "mendix", @@ -18,13 +18,15 @@ "license": "Apache-2.0", "packageManager": "pnpm@10.13.1+sha512.37ebf1a5c7a30d5fabe0c5df44ee8da4c965ca0c5af3dbab28c3a1681b70a256218d05c81c9c0dcf767ef6b8551eb5b960042b9ed4300c59242336377e01cfad", "dependencies": { + "arktype": "^2.1.20", "rolldown": "1.0.0-beta.26" }, "devDependencies": { "@tsconfig/node22": "^22.0.2", "@types/node": "^24.0.13", - "arktype": "^2.1.20", "cac": "^6.7.14", + "chalk": "^5.4.1", + "fast-glob": "^3.3.3", "find-up-simple": "^1.0.1", "premove": "^4.0.0", "prettier": "^3.6.2" diff --git a/packages/mpx/pnpm-lock.yaml b/packages/mpx/pnpm-lock.yaml index acaae56f..ec70d551 100644 --- a/packages/mpx/pnpm-lock.yaml +++ b/packages/mpx/pnpm-lock.yaml @@ -8,6 +8,9 @@ importers: .: dependencies: + arktype: + specifier: ^2.1.20 + version: 2.1.20 rolldown: specifier: 1.0.0-beta.26 version: 1.0.0-beta.26 @@ -18,12 +21,15 @@ importers: '@types/node': specifier: ^24.0.13 version: 24.0.13 - arktype: - specifier: ^2.1.20 - version: 2.1.20 cac: specifier: ^6.7.14 version: 6.7.14 + chalk: + specifier: ^5.4.1 + version: 5.4.1 + fast-glob: + specifier: ^3.3.3 + version: 3.3.3 find-up-simple: specifier: ^1.0.1 version: 1.0.1 @@ -54,6 +60,18 @@ packages: '@napi-rs/wasm-runtime@0.2.12': resolution: {integrity: sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==} + '@nodelib/fs.scandir@2.1.5': + resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} + engines: {node: '>= 8'} + + '@nodelib/fs.stat@2.0.5': + resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} + engines: {node: '>= 8'} + + '@nodelib/fs.walk@1.2.8': + resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} + engines: {node: '>= 8'} + '@oxc-project/runtime@0.76.0': resolution: {integrity: sha512-17iezP/BukiovZZR7lp6fZZjNTOmodCWQKkI7sn2sOB1TiccRWzO2bpxnE94jhg8l+nBRMrwnM/cjFCr23winw==} engines: {node: '>=6.9.0'} @@ -140,14 +158,61 @@ packages: arktype@2.1.20: resolution: {integrity: sha512-IZCEEXaJ8g+Ijd59WtSYwtjnqXiwM8sWQ5EjGamcto7+HVN9eK0C4p0zDlCuAwWhpqr6fIBkxPuYDl4/Mcj/+Q==} + braces@3.0.3: + resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} + engines: {node: '>=8'} + cac@6.7.14: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} engines: {node: '>=8'} + chalk@5.4.1: + resolution: {integrity: sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==} + engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} + + fast-glob@3.3.3: + resolution: {integrity: sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==} + engines: {node: '>=8.6.0'} + + fastq@1.19.1: + resolution: {integrity: sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==} + + fill-range@7.1.1: + resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} + engines: {node: '>=8'} + find-up-simple@1.0.1: resolution: {integrity: sha512-afd4O7zpqHeRyg4PfDQsXmlDe2PfdHtJt6Akt8jOWaApLOZk5JXs6VMR29lz03pRe9mpykrRCYIYxaJYcfpncQ==} engines: {node: '>=18'} + glob-parent@5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} + + is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + + is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + + is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + + merge2@1.4.1: + resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} + engines: {node: '>= 8'} + + micromatch@4.0.8: + resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} + engines: {node: '>=8.6'} + + picomatch@2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + premove@4.0.0: resolution: {integrity: sha512-zim/Hr4+FVdCIM7zL9b9Z0Wfd5Ya3mnKtiuDv7L5lzYzanSq6cOcVJ7EFcgK4I0pt28l8H0jX/x3nyog380XgQ==} engines: {node: '>=6'} @@ -158,10 +223,24 @@ packages: engines: {node: '>=14'} hasBin: true + queue-microtask@1.2.3: + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + + reusify@1.1.0: + resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + rolldown@1.0.0-beta.26: resolution: {integrity: sha512-2rad1JDFst/GD1J86RuqN1SIP8O8Xv4UbqNyKaVayXTjgF0D6HpvTnUZ1RQ6tANpZweGmq4v6Ay0uyRNEycFPw==} hasBin: true + run-parallel@1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + + to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + tslib@2.8.1: resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} @@ -199,6 +278,18 @@ snapshots: '@tybys/wasm-util': 0.10.0 optional: true + '@nodelib/fs.scandir@2.1.5': + dependencies: + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 + + '@nodelib/fs.stat@2.0.5': {} + + '@nodelib/fs.walk@1.2.8': + dependencies: + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.19.1 + '@oxc-project/runtime@0.76.0': {} '@oxc-project/types@0.76.0': {} @@ -261,14 +352,61 @@ snapshots: '@ark/schema': 0.46.0 '@ark/util': 0.46.0 + braces@3.0.3: + dependencies: + fill-range: 7.1.1 + cac@6.7.14: {} + chalk@5.4.1: {} + + fast-glob@3.3.3: + dependencies: + '@nodelib/fs.stat': 2.0.5 + '@nodelib/fs.walk': 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.8 + + fastq@1.19.1: + dependencies: + reusify: 1.1.0 + + fill-range@7.1.1: + dependencies: + to-regex-range: 5.0.1 + find-up-simple@1.0.1: {} + glob-parent@5.1.2: + dependencies: + is-glob: 4.0.3 + + is-extglob@2.1.1: {} + + is-glob@4.0.3: + dependencies: + is-extglob: 2.1.1 + + is-number@7.0.0: {} + + merge2@1.4.1: {} + + micromatch@4.0.8: + dependencies: + braces: 3.0.3 + picomatch: 2.3.1 + + picomatch@2.3.1: {} + premove@4.0.0: {} prettier@3.6.2: {} + queue-microtask@1.2.3: {} + + reusify@1.1.0: {} + rolldown@1.0.0-beta.26: dependencies: '@oxc-project/runtime': 0.76.0 @@ -289,6 +427,14 @@ snapshots: '@rolldown/binding-win32-ia32-msvc': 1.0.0-beta.26 '@rolldown/binding-win32-x64-msvc': 1.0.0-beta.26 + run-parallel@1.2.0: + dependencies: + queue-microtask: 1.2.3 + + to-regex-range@5.0.1: + dependencies: + is-number: 7.0.0 + tslib@2.8.1: optional: true diff --git a/packages/mpx/rolldown.config.ts b/packages/mpx/rolldown.config.ts index 4fedd8e6..8d5c67b0 100644 --- a/packages/mpx/rolldown.config.ts +++ b/packages/mpx/rolldown.config.ts @@ -2,12 +2,14 @@ import type { RolldownOptions } from "rolldown"; const config: RolldownOptions = { input: "./src/cli.ts", - external: ["rolldown"], + external: ["rolldown", "arktype"], output: { - file: "./bin/mpx.js", - inlineDynamicImports: true + file: "./dist/mpx.js", + inlineDynamicImports: true, + minify: false }, - platform: "node" + platform: "node", + treeshake: true }; export default config; diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 46996130..62a02579 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -1,34 +1,46 @@ -import { type } from "arktype"; -import { findUp } from "find-up-simple"; +import { ArkErrors } from "arktype"; +import chalk from "chalk"; +import fg from "fast-glob"; import { readFile } from "node:fs/promises"; +import { resolve } from "node:path"; +import { rolldown } from "rolldown"; +import { pprint } from "./error-utils.js"; import { PackageJson } from "./lib/core/PackageJson.js"; -export async function build() { - console.log("Building the project..."); - const result = await readPackageUp(); - if (!result) { - throw new Error("No package.json found"); +export async function actionBuild(root?: string) { + try { + await build(root); + } catch (error) { + console.error(chalk.red("BUILD ERROR")); + console.error(pprint(error instanceof Error ? error.message : String(error))); + process.exit(1); } - const pkg = PackageJson(result); +} - if (pkg instanceof type.errors) { - console.error(pkg.summary); - throw new Error("package.json is invalid"); - } - console.dir(pkg); +export async function build(root?: string) { + root = resolve(root ?? ""); + process.chdir(root); + + const pkg = await readPackageJson(root); + const [entry] = await fg(["src/**/*.ts", "src/**/*.tsx"]); + const bundle = await rolldown({ + input: entry, + external: [/^react\/jsx-runtime$/] + }); + + await bundle.write({ + format: "esm", + minify: false + }); } -export async function readPackageUp(): Promise<{} | undefined> { - const filePath = await findUp("package.json"); - console.log("Found package.json at:", filePath); - if (!filePath) { - return; - } - const data = await readFile(filePath, "utf-8"); +export async function readPackageJson(root: string): Promise { + const filePath = resolve(root, "package.json"); + const pkg = PackageJson(await readFile(filePath, "utf-8")); - try { - return JSON.parse(data); - } catch { - console.error("Failed to parse package.json"); + if (pkg instanceof ArkErrors) { + throw new Error(`Invalid package.json:\n${pkg.summary}`); } + + return pkg; } diff --git a/packages/mpx/src/cli.ts b/packages/mpx/src/cli.ts index 611bef58..588b0f7d 100644 --- a/packages/mpx/src/cli.ts +++ b/packages/mpx/src/cli.ts @@ -1,21 +1,17 @@ #!/usr/bin/env node import { cac } from "cac"; -import { build } from "./build.js"; +import { actionBuild } from "./build.js"; import { VERSION } from "./constants.js"; const cli = cac("mpx"); -cli.command("build", "Build the project").action(build); +cli.command("dev [root]", "Run build in watch mode").action(actionBuild); +// cli.command("build", "Create production build").action(build); cli.help(); cli.version(VERSION); -if (process.argv.length === 2) { - cli.outputHelp(); - process.exit(1); -} - cli.on("command:*", () => { console.error(`Unknown command: "%s"`, cli.args.join(" ")); console.error("See 'mpw --help' for a list of available commands."); @@ -23,3 +19,17 @@ cli.on("command:*", () => { }); cli.parse(); + +if (process.argv.length <= 2) { + cli.outputHelp(); + process.exit(1); +} + +process.on("uncaughtException", error => { + console.error("Uncaught Exception:", error.message); + process.exit(1); +}); + +// process.on("unhandledRejection", (reason, promise) => { +// console.error("Unhandled Rejection at:", promise, "reason:", reason); +// }); diff --git a/packages/mpx/src/error-utils.ts b/packages/mpx/src/error-utils.ts new file mode 100644 index 00000000..28f9c449 --- /dev/null +++ b/packages/mpx/src/error-utils.ts @@ -0,0 +1,8 @@ +import chalk from "chalk"; + +export function pprint(msg: string) { + return msg + .split("\n") + .map(line => chalk.yellow(line)) + .join("\n"); +} diff --git a/packages/mpx/src/lib/core/PackageJson.ts b/packages/mpx/src/lib/core/PackageJson.ts index 2ec3d1b5..b2044481 100644 --- a/packages/mpx/src/lib/core/PackageJson.ts +++ b/packages/mpx/src/lib/core/PackageJson.ts @@ -1,9 +1,9 @@ import { type } from "arktype"; -export const PackageJson = type({ - name: "string", +export const PackageJson = type("string.json.parse").to({ + name: type("string > 0").to("string.trim"), version: "string.semver", - widgetName: "string.upper" + widgetName: type("string > 0").to("string.trim") }); export type PackageJson = typeof PackageJson.infer; diff --git a/packages/mpx/test/package.json b/packages/mpx/test/package.json deleted file mode 100644 index 9ae64689..00000000 --- a/packages/mpx/test/package.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "name": "fun", - "version": "1.1.1", - "widgetName": "fun" -} diff --git a/packages/mpx/tsconfig.json b/packages/mpx/tsconfig.json index ecc5cfc6..cebe8bda 100644 --- a/packages/mpx/tsconfig.json +++ b/packages/mpx/tsconfig.json @@ -1,5 +1,6 @@ { "extends": "@tsconfig/node22/tsconfig.json", + "exclude": ["input"], "compilerOptions": { "noEmit": true } From 7d3be69e4b5f0fa19c56abc862f3502bdafeaea2 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Sat, 12 Jul 2025 11:23:59 +0200 Subject: [PATCH 04/45] refactor --- packages/mpx/src/build.ts | 75 ++++++++++++++++--- packages/mpx/src/constants.ts | 17 +++-- packages/mpx/src/lib/build-utils.ts | 0 .../src/lib/{core => parsers}/PackageJson.ts | 0 4 files changed, 76 insertions(+), 16 deletions(-) create mode 100644 packages/mpx/src/lib/build-utils.ts rename packages/mpx/src/lib/{core => parsers}/PackageJson.ts (100%) diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 62a02579..c7f366c2 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -1,11 +1,10 @@ import { ArkErrors } from "arktype"; import chalk from "chalk"; -import fg from "fast-glob"; -import { readFile } from "node:fs/promises"; -import { resolve } from "node:path"; +import fs from "node:fs/promises"; +import path from "node:path"; import { rolldown } from "rolldown"; import { pprint } from "./error-utils.js"; -import { PackageJson } from "./lib/core/PackageJson.js"; +import { PackageJson } from "./lib/parsers/PackageJson.js"; export async function actionBuild(root?: string) { try { @@ -18,13 +17,16 @@ export async function actionBuild(root?: string) { } export async function build(root?: string) { - root = resolve(root ?? ""); + root = path.resolve(root ?? ""); process.chdir(root); - const pkg = await readPackageJson(root); - const [entry] = await fg(["src/**/*.ts", "src/**/*.tsx"]); + const [pkg, isTs] = await Promise.all([readPackageJson(root), isTypeScriptProject(root)]); + + const inputFiles = getInputFiles(pkg.widgetName, isTs); + + // const [entry] = await fg(["src/**/*.ts", "src/**/*.tsx"]); const bundle = await rolldown({ - input: entry, + input: inputFiles.widgetFile, external: [/^react\/jsx-runtime$/] }); @@ -34,9 +36,60 @@ export async function build(root?: string) { }); } -export async function readPackageJson(root: string): Promise { - const filePath = resolve(root, "package.json"); - const pkg = PackageJson(await readFile(filePath, "utf-8")); +interface InputFiles { + editorConfig: string; + editorPreview: string; + packageXml: string; + widgetFile: string; + widgetXml: string; +} + +function getInputFiles(widgetName: string, isTs: boolean): InputFiles { + const ext = isTs ? "ts" : "js"; + const extJsx = isTs ? "tsx" : "jsx"; + + const editorConfig = path.format({ + dir: "src", + name: widgetName, + ext: `editorConfig.${ext}` + }); + + const editorPreview = path.format({ + dir: "src", + name: widgetName, + ext: `editorPreview.${extJsx}` + }); + + const packageXml = path.format({ + dir: "src", + base: "package.xml" + }); + + const widgetFile = path.format({ + dir: "src", + name: widgetName, + ext: extJsx + }); + + const widgetXml = path.format({ + dir: "src", + name: widgetName, + ext: "xml" + }); + + return { editorConfig, editorPreview, packageXml, widgetFile, widgetXml }; +} + +async function isTypeScriptProject(root: string): Promise { + return fs.access(path.resolve(root, "tsconfig.json"), fs.constants.F_OK).then( + () => true, + () => false + ); +} + +async function readPackageJson(root: string): Promise { + const filePath = path.resolve(root, "package.json"); + const pkg = PackageJson(await fs.readFile(filePath, "utf-8")); if (pkg instanceof ArkErrors) { throw new Error(`Invalid package.json:\n${pkg.summary}`); diff --git a/packages/mpx/src/constants.ts b/packages/mpx/src/constants.ts index 530ac15f..a4594b6c 100644 --- a/packages/mpx/src/constants.ts +++ b/packages/mpx/src/constants.ts @@ -1,7 +1,14 @@ -import { readFileSync } from "node:fs" +import { readFileSync } from "node:fs"; -const { version } = JSON.parse( - readFileSync(new URL("../package.json", import.meta.url)).toString() -) +const { version } = JSON.parse(readFileSync(new URL("../package.json", import.meta.url)).toString()); -export const VERSION = version as string +export const VERSION = version as string; + +export const STD_EXTERNALS = [ + // "mendix" and internals under "mendix/" + /^mendix($|\/)/, + /^react$/, + /^react\/jsx-runtime$/, + /^react-dom$/, + /^big.js$/ +]; diff --git a/packages/mpx/src/lib/build-utils.ts b/packages/mpx/src/lib/build-utils.ts new file mode 100644 index 00000000..e69de29b diff --git a/packages/mpx/src/lib/core/PackageJson.ts b/packages/mpx/src/lib/parsers/PackageJson.ts similarity index 100% rename from packages/mpx/src/lib/core/PackageJson.ts rename to packages/mpx/src/lib/parsers/PackageJson.ts From f1ebbcd9498d1848bf6d938269d29f7d10321673 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Mon, 14 Jul 2025 11:16:50 +0200 Subject: [PATCH 05/45] chore: config and parser --- packages/mpx/src/build.ts | 95 ++++++++++++++------- packages/mpx/src/lib/parsers/PackageJson.ts | 3 +- 2 files changed, 65 insertions(+), 33 deletions(-) diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index c7f366c2..4597852b 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -20,13 +20,17 @@ export async function build(root?: string) { root = path.resolve(root ?? ""); process.chdir(root); - const [pkg, isTs] = await Promise.all([readPackageJson(root), isTypeScriptProject(root)]); + const [pkg, isTsProject] = await Promise.all([readPackageJson(root), isTypeScriptProject(root)]); - const inputFiles = getInputFiles(pkg.widgetName, isTs); + const config = new ProjectConfig({ + pkg, + isTsProject + }); + console.log(config.files, config.outputDirs); // const [entry] = await fg(["src/**/*.ts", "src/**/*.tsx"]); const bundle = await rolldown({ - input: inputFiles.widgetFile, + input: config.files.widgetFile, external: [/^react\/jsx-runtime$/] }); @@ -36,7 +40,7 @@ export async function build(root?: string) { }); } -interface InputFiles { +interface BundleInputFiles { editorConfig: string; editorPreview: string; packageXml: string; @@ -44,40 +48,67 @@ interface InputFiles { widgetXml: string; } -function getInputFiles(widgetName: string, isTs: boolean): InputFiles { - const ext = isTs ? "ts" : "js"; - const extJsx = isTs ? "tsx" : "jsx"; +interface BundleOutputDirs { + dist: string; + widgetDir: string; +} - const editorConfig = path.format({ - dir: "src", - name: widgetName, - ext: `editorConfig.${ext}` - }); +interface ProjectConfigInputs { + pkg: PackageJson; + isTsProject: boolean; +} - const editorPreview = path.format({ - dir: "src", - name: widgetName, - ext: `editorPreview.${extJsx}` - }); +class ProjectConfig { + readonly #dist = path.join("dist/tmp/widgets"); + readonly #inputs: ProjectConfigInputs; - const packageXml = path.format({ - dir: "src", - base: "package.xml" - }); + constructor(inputs: ProjectConfigInputs) { + this.#inputs = inputs; + } - const widgetFile = path.format({ - dir: "src", - name: widgetName, - ext: extJsx - }); + get files(): BundleInputFiles { + const { pkg, isTsProject } = this.#inputs; + const ext = isTsProject ? "ts" : "js"; + const extJsx = isTsProject ? "tsx" : "jsx"; + + const editorConfig = path.format({ + dir: "src", + name: pkg.widgetName, + ext: `editorConfig.${ext}` + }); + + const editorPreview = path.format({ + dir: "src", + name: pkg.widgetName, + ext: `editorPreview.${extJsx}` + }); + + const packageXml = path.format({ + dir: "src", + base: "package.xml" + }); + + const widgetFile = path.format({ + dir: "src", + name: pkg.widgetName, + ext: extJsx + }); + + const widgetXml = path.format({ + dir: "src", + name: pkg.widgetName, + ext: "xml" + }); + + return { editorConfig, editorPreview, packageXml, widgetFile, widgetXml }; + } - const widgetXml = path.format({ - dir: "src", - name: widgetName, - ext: "xml" - }); + get outputDirs(): BundleOutputDirs { + const { pkg } = this.#inputs; + const widgetDir = path.join(this.#dist, ...pkg.packagePath.split("."), pkg.widgetName.toLowerCase()); - return { editorConfig, editorPreview, packageXml, widgetFile, widgetXml }; + return { dist: this.#dist, widgetDir }; + } } async function isTypeScriptProject(root: string): Promise { diff --git a/packages/mpx/src/lib/parsers/PackageJson.ts b/packages/mpx/src/lib/parsers/PackageJson.ts index b2044481..dfe25105 100644 --- a/packages/mpx/src/lib/parsers/PackageJson.ts +++ b/packages/mpx/src/lib/parsers/PackageJson.ts @@ -3,7 +3,8 @@ import { type } from "arktype"; export const PackageJson = type("string.json.parse").to({ name: type("string > 0").to("string.trim"), version: "string.semver", - widgetName: type("string > 0").to("string.trim") + widgetName: type("string > 0").to("string.trim"), + packagePath: type(/^[a-zA-Z]+(\.[a-zA-Z]+)*$/).describe("must be dot separated path like 'example.widget'") }); export type PackageJson = typeof PackageJson.infer; From 50f39646bf24cf7ca7a42196b25eeaa838eedf1b Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Mon, 14 Jul 2025 13:11:18 +0200 Subject: [PATCH 06/45] chore: add options --- packages/mpx/package.json | 2 +- packages/mpx/pnpm-lock.yaml | 17 ++++++++--------- packages/mpx/src/build.ts | 8 +++----- packages/mpx/src/cli.ts | 8 +++++--- packages/mpx/src/utils/colors.ts | 8 ++++++++ packages/mpx/src/utils/logging.ts | 14 ++++++++++++++ 6 files changed, 39 insertions(+), 18 deletions(-) create mode 100644 packages/mpx/src/utils/colors.ts create mode 100644 packages/mpx/src/utils/logging.ts diff --git a/packages/mpx/package.json b/packages/mpx/package.json index 3212a9ad..078630ac 100644 --- a/packages/mpx/package.json +++ b/packages/mpx/package.json @@ -19,13 +19,13 @@ "packageManager": "pnpm@10.13.1+sha512.37ebf1a5c7a30d5fabe0c5df44ee8da4c965ca0c5af3dbab28c3a1681b70a256218d05c81c9c0dcf767ef6b8551eb5b960042b9ed4300c59242336377e01cfad", "dependencies": { "arktype": "^2.1.20", + "picocolors": "^1.1.1", "rolldown": "1.0.0-beta.26" }, "devDependencies": { "@tsconfig/node22": "^22.0.2", "@types/node": "^24.0.13", "cac": "^6.7.14", - "chalk": "^5.4.1", "fast-glob": "^3.3.3", "find-up-simple": "^1.0.1", "premove": "^4.0.0", diff --git a/packages/mpx/pnpm-lock.yaml b/packages/mpx/pnpm-lock.yaml index ec70d551..5e89deb6 100644 --- a/packages/mpx/pnpm-lock.yaml +++ b/packages/mpx/pnpm-lock.yaml @@ -11,6 +11,9 @@ importers: arktype: specifier: ^2.1.20 version: 2.1.20 + picocolors: + specifier: ^1.1.1 + version: 1.1.1 rolldown: specifier: 1.0.0-beta.26 version: 1.0.0-beta.26 @@ -24,9 +27,6 @@ importers: cac: specifier: ^6.7.14 version: 6.7.14 - chalk: - specifier: ^5.4.1 - version: 5.4.1 fast-glob: specifier: ^3.3.3 version: 3.3.3 @@ -166,10 +166,6 @@ packages: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} engines: {node: '>=8'} - chalk@5.4.1: - resolution: {integrity: sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==} - engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} - fast-glob@3.3.3: resolution: {integrity: sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==} engines: {node: '>=8.6.0'} @@ -209,6 +205,9 @@ packages: resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} engines: {node: '>=8.6'} + picocolors@1.1.1: + resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} + picomatch@2.3.1: resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} engines: {node: '>=8.6'} @@ -358,8 +357,6 @@ snapshots: cac@6.7.14: {} - chalk@5.4.1: {} - fast-glob@3.3.3: dependencies: '@nodelib/fs.stat': 2.0.5 @@ -397,6 +394,8 @@ snapshots: braces: 3.0.3 picomatch: 2.3.1 + picocolors@1.1.1: {} + picomatch@2.3.1: {} premove@4.0.0: {} diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 4597852b..20c4c4f3 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -1,17 +1,15 @@ import { ArkErrors } from "arktype"; -import chalk from "chalk"; import fs from "node:fs/promises"; import path from "node:path"; import { rolldown } from "rolldown"; -import { pprint } from "./error-utils.js"; import { PackageJson } from "./lib/parsers/PackageJson.js"; +import { printError } from "./utils/logging.js"; -export async function actionBuild(root?: string) { +export async function buildCommand(root?: string): Promise { try { await build(root); } catch (error) { - console.error(chalk.red("BUILD ERROR")); - console.error(pprint(error instanceof Error ? error.message : String(error))); + printError(error); process.exit(1); } } diff --git a/packages/mpx/src/cli.ts b/packages/mpx/src/cli.ts index 588b0f7d..dac89323 100644 --- a/packages/mpx/src/cli.ts +++ b/packages/mpx/src/cli.ts @@ -1,13 +1,15 @@ #!/usr/bin/env node import { cac } from "cac"; -import { actionBuild } from "./build.js"; +import { buildCommand } from "./build.js"; import { VERSION } from "./constants.js"; const cli = cac("mpx"); -cli.command("dev [root]", "Run build in watch mode").action(actionBuild); -// cli.command("build", "Create production build").action(build); +cli.command("build [root]", "Build widget") + .option("-w, --watch", "watch for changes and rebuild") + .option("-m, --minify", "minify the output (this option is on in CI environment)") + .action(buildCommand); cli.help(); cli.version(VERSION); diff --git a/packages/mpx/src/utils/colors.ts b/packages/mpx/src/utils/colors.ts new file mode 100644 index 00000000..b232185f --- /dev/null +++ b/packages/mpx/src/utils/colors.ts @@ -0,0 +1,8 @@ +import { env } from "node:process"; +import pc from "picocolors"; + +// @see https://no-color.org +// @see https://www.npmjs.com/package/chalk +export const { bold, cyan, dim, gray, green, red, underline, yellow } = pc.createColors( + env.FORCE_COLOR !== "0" && !env.NO_COLOR +); diff --git a/packages/mpx/src/utils/logging.ts b/packages/mpx/src/utils/logging.ts new file mode 100644 index 00000000..2b8b76a8 --- /dev/null +++ b/packages/mpx/src/utils/logging.ts @@ -0,0 +1,14 @@ +import { bold, red } from "./colors.js"; + +export function printError(error: unknown): void { + if (error instanceof Error) { + const name = error.name; + console.error(formatMessage(error.message)); + } else { + console.error(red("Unknown error:"), bold(String(error))); + } +} + +const formatMessage = (message: string): string => { + return bold(red(`[Error] ${bold(message)}`)); +}; From 13bf0f36b29faf7efd3c7d186ac1144f0bea78b7 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Mon, 14 Jul 2025 16:05:41 +0200 Subject: [PATCH 07/45] feat: connect build command with rolldown --- packages/mpx/package.json | 21 +- packages/mpx/pnpm-lock.yaml | 294 +++++++++++++++++--- packages/mpx/rolldown.config.ts | 3 +- packages/mpx/src/build.ts | 136 +++++++-- packages/mpx/src/cli.ts | 4 +- packages/mpx/src/error-utils.ts | 8 - packages/mpx/src/lib/parsers/PackageJson.ts | 18 +- packages/mpx/src/utils/error.ts | 24 ++ packages/mpx/src/utils/logging.ts | 14 - packages/mpx/tsconfig.json | 3 +- 10 files changed, 426 insertions(+), 99 deletions(-) delete mode 100644 packages/mpx/src/error-utils.ts create mode 100644 packages/mpx/src/utils/error.ts delete mode 100644 packages/mpx/src/utils/logging.ts diff --git a/packages/mpx/package.json b/packages/mpx/package.json index 078630ac..3a232a66 100644 --- a/packages/mpx/package.json +++ b/packages/mpx/package.json @@ -6,7 +6,8 @@ "scripts": { "preinstall": "npx only-allow pnpm", "test": "echo 'test is missing'", - "dev": "premove dist && rolldown -c rolldown.config.ts -w" + "dev": "premove dist && rolldown -c rolldown.config.ts -w", + "build": "premove dist && rolldown -c rolldown.config.ts" }, "keywords": [ "mendix", @@ -17,18 +18,22 @@ "author": "", "license": "Apache-2.0", "packageManager": "pnpm@10.13.1+sha512.37ebf1a5c7a30d5fabe0c5df44ee8da4c965ca0c5af3dbab28c3a1681b70a256218d05c81c9c0dcf767ef6b8551eb5b960042b9ed4300c59242336377e01cfad", - "dependencies": { - "arktype": "^2.1.20", - "picocolors": "^1.1.1", - "rolldown": "1.0.0-beta.26" - }, "devDependencies": { "@tsconfig/node22": "^22.0.2", "@types/node": "^24.0.13", "cac": "^6.7.14", + "consola": "^3.4.2", "fast-glob": "^3.3.3", - "find-up-simple": "^1.0.1", + "picocolors": "^1.1.1", "premove": "^4.0.0", - "prettier": "^3.6.2" + "prettier": "^3.6.2", + "pretty-ms": "^9.2.0", + "rollup": "^4.45.0", + "signal-exit": "^4.1.0", + "typescript": "^5.8.3", + "zod": "^4.0.5" + }, + "dependencies": { + "rolldown": "1.0.0-beta.26" } } diff --git a/packages/mpx/pnpm-lock.yaml b/packages/mpx/pnpm-lock.yaml index 5e89deb6..e6991ec5 100644 --- a/packages/mpx/pnpm-lock.yaml +++ b/packages/mpx/pnpm-lock.yaml @@ -8,12 +8,6 @@ importers: .: dependencies: - arktype: - specifier: ^2.1.20 - version: 2.1.20 - picocolors: - specifier: ^1.1.1 - version: 1.1.1 rolldown: specifier: 1.0.0-beta.26 version: 1.0.0-beta.26 @@ -27,27 +21,39 @@ importers: cac: specifier: ^6.7.14 version: 6.7.14 + consola: + specifier: ^3.4.2 + version: 3.4.2 fast-glob: specifier: ^3.3.3 version: 3.3.3 - find-up-simple: - specifier: ^1.0.1 - version: 1.0.1 + picocolors: + specifier: ^1.1.1 + version: 1.1.1 premove: specifier: ^4.0.0 version: 4.0.0 prettier: specifier: ^3.6.2 version: 3.6.2 + pretty-ms: + specifier: ^9.2.0 + version: 9.2.0 + rollup: + specifier: ^4.45.0 + version: 4.45.0 + signal-exit: + specifier: ^4.1.0 + version: 4.1.0 + typescript: + specifier: ^5.8.3 + version: 5.8.3 + zod: + specifier: ^4.0.5 + version: 4.0.5 packages: - '@ark/schema@0.46.0': - resolution: {integrity: sha512-c2UQdKgP2eqqDArfBqQIJppxJHvNNXuQPeuSPlDML4rjw+f1cu0qAlzOG4b8ujgm9ctIDWwhpyw6gjG5ledIVQ==} - - '@ark/util@0.46.0': - resolution: {integrity: sha512-JPy/NGWn/lvf1WmGCPw2VGpBg5utZraE84I7wli18EDF3p3zc/e9WolT35tINeZO3l7C77SjqRJeAUoT0CvMRg==} - '@emnapi/core@1.4.4': resolution: {integrity: sha512-A9CnAbC6ARNMKcIcrQwq6HeHCjpcBZ5wSx4U01WXCqEKlrzB9F9315WDNHkrs2xbx7YjjSxbUYxuN6EQzpcY2g==} @@ -142,12 +148,115 @@ packages: '@rolldown/pluginutils@1.0.0-beta.26': resolution: {integrity: sha512-r/5po89voz/QRPDmoErL10+hVuTAuz1SHvokx+yWBlOIPB5C41jC7QhLqq9kaebx/+EHyoV3z22/qBfX81Ns8A==} + '@rollup/rollup-android-arm-eabi@4.45.0': + resolution: {integrity: sha512-2o/FgACbji4tW1dzXOqAV15Eu7DdgbKsF2QKcxfG4xbh5iwU7yr5RRP5/U+0asQliSYv5M4o7BevlGIoSL0LXg==} + cpu: [arm] + os: [android] + + '@rollup/rollup-android-arm64@4.45.0': + resolution: {integrity: sha512-PSZ0SvMOjEAxwZeTx32eI/j5xSYtDCRxGu5k9zvzoY77xUNssZM+WV6HYBLROpY5CkXsbQjvz40fBb7WPwDqtQ==} + cpu: [arm64] + os: [android] + + '@rollup/rollup-darwin-arm64@4.45.0': + resolution: {integrity: sha512-BA4yPIPssPB2aRAWzmqzQ3y2/KotkLyZukVB7j3psK/U3nVJdceo6qr9pLM2xN6iRP/wKfxEbOb1yrlZH6sYZg==} + cpu: [arm64] + os: [darwin] + + '@rollup/rollup-darwin-x64@4.45.0': + resolution: {integrity: sha512-Pr2o0lvTwsiG4HCr43Zy9xXrHspyMvsvEw4FwKYqhli4FuLE5FjcZzuQ4cfPe0iUFCvSQG6lACI0xj74FDZKRA==} + cpu: [x64] + os: [darwin] + + '@rollup/rollup-freebsd-arm64@4.45.0': + resolution: {integrity: sha512-lYE8LkE5h4a/+6VnnLiL14zWMPnx6wNbDG23GcYFpRW1V9hYWHAw9lBZ6ZUIrOaoK7NliF1sdwYGiVmziUF4vA==} + cpu: [arm64] + os: [freebsd] + + '@rollup/rollup-freebsd-x64@4.45.0': + resolution: {integrity: sha512-PVQWZK9sbzpvqC9Q0GlehNNSVHR+4m7+wET+7FgSnKG3ci5nAMgGmr9mGBXzAuE5SvguCKJ6mHL6vq1JaJ/gvw==} + cpu: [x64] + os: [freebsd] + + '@rollup/rollup-linux-arm-gnueabihf@4.45.0': + resolution: {integrity: sha512-hLrmRl53prCcD+YXTfNvXd776HTxNh8wPAMllusQ+amcQmtgo3V5i/nkhPN6FakW+QVLoUUr2AsbtIRPFU3xIA==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm-musleabihf@4.45.0': + resolution: {integrity: sha512-XBKGSYcrkdiRRjl+8XvrUR3AosXU0NvF7VuqMsm7s5nRy+nt58ZMB19Jdp1RdqewLcaYnpk8zeVs/4MlLZEJxw==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm64-gnu@4.45.0': + resolution: {integrity: sha512-fRvZZPUiBz7NztBE/2QnCS5AtqLVhXmUOPj9IHlfGEXkapgImf4W9+FSkL8cWqoAjozyUzqFmSc4zh2ooaeF6g==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-arm64-musl@4.45.0': + resolution: {integrity: sha512-Btv2WRZOcUGi8XU80XwIvzTg4U6+l6D0V6sZTrZx214nrwxw5nAi8hysaXj/mctyClWgesyuxbeLylCBNauimg==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-loongarch64-gnu@4.45.0': + resolution: {integrity: sha512-Li0emNnwtUZdLwHjQPBxn4VWztcrw/h7mgLyHiEI5Z0MhpeFGlzaiBHpSNVOMB/xucjXTTcO+dhv469Djr16KA==} + cpu: [loong64] + os: [linux] + + '@rollup/rollup-linux-powerpc64le-gnu@4.45.0': + resolution: {integrity: sha512-sB8+pfkYx2kvpDCfd63d5ScYT0Fz1LO6jIb2zLZvmK9ob2D8DeVqrmBDE0iDK8KlBVmsTNzrjr3G1xV4eUZhSw==} + cpu: [ppc64] + os: [linux] + + '@rollup/rollup-linux-riscv64-gnu@4.45.0': + resolution: {integrity: sha512-5GQ6PFhh7E6jQm70p1aW05G2cap5zMOvO0se5JMecHeAdj5ZhWEHbJ4hiKpfi1nnnEdTauDXxPgXae/mqjow9w==} + cpu: [riscv64] + os: [linux] + + '@rollup/rollup-linux-riscv64-musl@4.45.0': + resolution: {integrity: sha512-N/euLsBd1rekWcuduakTo/dJw6U6sBP3eUq+RXM9RNfPuWTvG2w/WObDkIvJ2KChy6oxZmOSC08Ak2OJA0UiAA==} + cpu: [riscv64] + os: [linux] + + '@rollup/rollup-linux-s390x-gnu@4.45.0': + resolution: {integrity: sha512-2l9sA7d7QdikL0xQwNMO3xURBUNEWyHVHfAsHsUdq+E/pgLTUcCE+gih5PCdmyHmfTDeXUWVhqL0WZzg0nua3g==} + cpu: [s390x] + os: [linux] + + '@rollup/rollup-linux-x64-gnu@4.45.0': + resolution: {integrity: sha512-XZdD3fEEQcwG2KrJDdEQu7NrHonPxxaV0/w2HpvINBdcqebz1aL+0vM2WFJq4DeiAVT6F5SUQas65HY5JDqoPw==} + cpu: [x64] + os: [linux] + + '@rollup/rollup-linux-x64-musl@4.45.0': + resolution: {integrity: sha512-7ayfgvtmmWgKWBkCGg5+xTQ0r5V1owVm67zTrsEY1008L5ro7mCyGYORomARt/OquB9KY7LpxVBZes+oSniAAQ==} + cpu: [x64] + os: [linux] + + '@rollup/rollup-win32-arm64-msvc@4.45.0': + resolution: {integrity: sha512-B+IJgcBnE2bm93jEW5kHisqvPITs4ddLOROAcOc/diBgrEiQJJ6Qcjby75rFSmH5eMGrqJryUgJDhrfj942apQ==} + cpu: [arm64] + os: [win32] + + '@rollup/rollup-win32-ia32-msvc@4.45.0': + resolution: {integrity: sha512-+CXwwG66g0/FpWOnP/v1HnrGVSOygK/osUbu3wPRy8ECXjoYKjRAyfxYpDQOfghC5qPJYLPH0oN4MCOjwgdMug==} + cpu: [ia32] + os: [win32] + + '@rollup/rollup-win32-x64-msvc@4.45.0': + resolution: {integrity: sha512-SRf1cytG7wqcHVLrBc9VtPK4pU5wxiB/lNIkNmW2ApKXIg+RpqwHfsaEK+e7eH4A1BpI6BX/aBWXxZCIrJg3uA==} + cpu: [x64] + os: [win32] + '@tsconfig/node22@22.0.2': resolution: {integrity: sha512-Kmwj4u8sDRDrMYRoN9FDEcXD8UpBSaPQQ24Gz+Gamqfm7xxn+GBR7ge/Z7pK8OXNGyUzbSwJj+TH6B+DS/epyA==} '@tybys/wasm-util@0.10.0': resolution: {integrity: sha512-VyyPYFlOMNylG45GoAe0xDoLwWuowvf92F9kySqzYh8vmYm7D2u4iUJKa1tOUpS70Ku13ASrOkS4ScXFsTaCNQ==} + '@types/estree@1.0.8': + resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} + '@types/node@24.0.13': resolution: {integrity: sha512-Qm9OYVOFHFYg3wJoTSrz80hoec5Lia/dPp84do3X7dZvLikQvM1YpmvTBEdIr/e+U8HTkFjLHLnl78K/qjf+jQ==} @@ -155,9 +264,6 @@ packages: resolution: {integrity: sha512-BGcItUBWSMRgOCe+SVZJ+S7yTRG0eGt9cXAHev72yuGcY23hnLA7Bky5L/xLyPINoSN95geovfBkqoTlNZYa7w==} engines: {node: '>=14'} - arktype@2.1.20: - resolution: {integrity: sha512-IZCEEXaJ8g+Ijd59WtSYwtjnqXiwM8sWQ5EjGamcto7+HVN9eK0C4p0zDlCuAwWhpqr6fIBkxPuYDl4/Mcj/+Q==} - braces@3.0.3: resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} engines: {node: '>=8'} @@ -166,6 +272,10 @@ packages: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} engines: {node: '>=8'} + consola@3.4.2: + resolution: {integrity: sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==} + engines: {node: ^14.18.0 || >=16.10.0} + fast-glob@3.3.3: resolution: {integrity: sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==} engines: {node: '>=8.6.0'} @@ -177,9 +287,10 @@ packages: resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} engines: {node: '>=8'} - find-up-simple@1.0.1: - resolution: {integrity: sha512-afd4O7zpqHeRyg4PfDQsXmlDe2PfdHtJt6Akt8jOWaApLOZk5JXs6VMR29lz03pRe9mpykrRCYIYxaJYcfpncQ==} - engines: {node: '>=18'} + fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] glob-parent@5.1.2: resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} @@ -205,6 +316,10 @@ packages: resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} engines: {node: '>=8.6'} + parse-ms@4.0.0: + resolution: {integrity: sha512-TXfryirbmq34y8QBwgqCVLi+8oA3oWx2eAnSn62ITyEhEYaWRlVZ2DvMM9eZbMs/RfxPu/PK/aBLyGj4IrqMHw==} + engines: {node: '>=18'} + picocolors@1.1.1: resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} @@ -222,6 +337,10 @@ packages: engines: {node: '>=14'} hasBin: true + pretty-ms@9.2.0: + resolution: {integrity: sha512-4yf0QO/sllf/1zbZWYnvWw3NxCQwLXKzIj0G849LSufP15BXKM0rbD2Z3wVnkMfjdn/CB0Dpp444gYAACdsplg==} + engines: {node: '>=18'} + queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} @@ -233,9 +352,18 @@ packages: resolution: {integrity: sha512-2rad1JDFst/GD1J86RuqN1SIP8O8Xv4UbqNyKaVayXTjgF0D6HpvTnUZ1RQ6tANpZweGmq4v6Ay0uyRNEycFPw==} hasBin: true + rollup@4.45.0: + resolution: {integrity: sha512-WLjEcJRIo7i3WDDgOIJqVI2d+lAC3EwvOGy+Xfq6hs+GQuAA4Di/H72xmXkOhrIWFg2PFYSKZYfH0f4vfKXN4A==} + engines: {node: '>=18.0.0', npm: '>=8.0.0'} + hasBin: true + run-parallel@1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + signal-exit@4.1.0: + resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} + engines: {node: '>=14'} + to-regex-range@5.0.1: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} @@ -243,16 +371,18 @@ packages: tslib@2.8.1: resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} + typescript@5.8.3: + resolution: {integrity: sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==} + engines: {node: '>=14.17'} + hasBin: true + undici-types@7.8.0: resolution: {integrity: sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw==} -snapshots: - - '@ark/schema@0.46.0': - dependencies: - '@ark/util': 0.46.0 + zod@4.0.5: + resolution: {integrity: sha512-/5UuuRPStvHXu7RS+gmvRf4NXrNxpSllGwDnCBcJZtQsKrviYXm54yDGV2KYNLT5kq0lHGcl7lqWJLgSaG+tgA==} - '@ark/util@0.46.0': {} +snapshots: '@emnapi/core@1.4.4': dependencies: @@ -333,6 +463,66 @@ snapshots: '@rolldown/pluginutils@1.0.0-beta.26': {} + '@rollup/rollup-android-arm-eabi@4.45.0': + optional: true + + '@rollup/rollup-android-arm64@4.45.0': + optional: true + + '@rollup/rollup-darwin-arm64@4.45.0': + optional: true + + '@rollup/rollup-darwin-x64@4.45.0': + optional: true + + '@rollup/rollup-freebsd-arm64@4.45.0': + optional: true + + '@rollup/rollup-freebsd-x64@4.45.0': + optional: true + + '@rollup/rollup-linux-arm-gnueabihf@4.45.0': + optional: true + + '@rollup/rollup-linux-arm-musleabihf@4.45.0': + optional: true + + '@rollup/rollup-linux-arm64-gnu@4.45.0': + optional: true + + '@rollup/rollup-linux-arm64-musl@4.45.0': + optional: true + + '@rollup/rollup-linux-loongarch64-gnu@4.45.0': + optional: true + + '@rollup/rollup-linux-powerpc64le-gnu@4.45.0': + optional: true + + '@rollup/rollup-linux-riscv64-gnu@4.45.0': + optional: true + + '@rollup/rollup-linux-riscv64-musl@4.45.0': + optional: true + + '@rollup/rollup-linux-s390x-gnu@4.45.0': + optional: true + + '@rollup/rollup-linux-x64-gnu@4.45.0': + optional: true + + '@rollup/rollup-linux-x64-musl@4.45.0': + optional: true + + '@rollup/rollup-win32-arm64-msvc@4.45.0': + optional: true + + '@rollup/rollup-win32-ia32-msvc@4.45.0': + optional: true + + '@rollup/rollup-win32-x64-msvc@4.45.0': + optional: true + '@tsconfig/node22@22.0.2': {} '@tybys/wasm-util@0.10.0': @@ -340,23 +530,22 @@ snapshots: tslib: 2.8.1 optional: true + '@types/estree@1.0.8': {} + '@types/node@24.0.13': dependencies: undici-types: 7.8.0 ansis@4.1.0: {} - arktype@2.1.20: - dependencies: - '@ark/schema': 0.46.0 - '@ark/util': 0.46.0 - braces@3.0.3: dependencies: fill-range: 7.1.1 cac@6.7.14: {} + consola@3.4.2: {} + fast-glob@3.3.3: dependencies: '@nodelib/fs.stat': 2.0.5 @@ -373,7 +562,8 @@ snapshots: dependencies: to-regex-range: 5.0.1 - find-up-simple@1.0.1: {} + fsevents@2.3.3: + optional: true glob-parent@5.1.2: dependencies: @@ -394,6 +584,8 @@ snapshots: braces: 3.0.3 picomatch: 2.3.1 + parse-ms@4.0.0: {} + picocolors@1.1.1: {} picomatch@2.3.1: {} @@ -402,6 +594,10 @@ snapshots: prettier@3.6.2: {} + pretty-ms@9.2.0: + dependencies: + parse-ms: 4.0.0 + queue-microtask@1.2.3: {} reusify@1.1.0: {} @@ -426,10 +622,38 @@ snapshots: '@rolldown/binding-win32-ia32-msvc': 1.0.0-beta.26 '@rolldown/binding-win32-x64-msvc': 1.0.0-beta.26 + rollup@4.45.0: + dependencies: + '@types/estree': 1.0.8 + optionalDependencies: + '@rollup/rollup-android-arm-eabi': 4.45.0 + '@rollup/rollup-android-arm64': 4.45.0 + '@rollup/rollup-darwin-arm64': 4.45.0 + '@rollup/rollup-darwin-x64': 4.45.0 + '@rollup/rollup-freebsd-arm64': 4.45.0 + '@rollup/rollup-freebsd-x64': 4.45.0 + '@rollup/rollup-linux-arm-gnueabihf': 4.45.0 + '@rollup/rollup-linux-arm-musleabihf': 4.45.0 + '@rollup/rollup-linux-arm64-gnu': 4.45.0 + '@rollup/rollup-linux-arm64-musl': 4.45.0 + '@rollup/rollup-linux-loongarch64-gnu': 4.45.0 + '@rollup/rollup-linux-powerpc64le-gnu': 4.45.0 + '@rollup/rollup-linux-riscv64-gnu': 4.45.0 + '@rollup/rollup-linux-riscv64-musl': 4.45.0 + '@rollup/rollup-linux-s390x-gnu': 4.45.0 + '@rollup/rollup-linux-x64-gnu': 4.45.0 + '@rollup/rollup-linux-x64-musl': 4.45.0 + '@rollup/rollup-win32-arm64-msvc': 4.45.0 + '@rollup/rollup-win32-ia32-msvc': 4.45.0 + '@rollup/rollup-win32-x64-msvc': 4.45.0 + fsevents: 2.3.3 + run-parallel@1.2.0: dependencies: queue-microtask: 1.2.3 + signal-exit@4.1.0: {} + to-regex-range@5.0.1: dependencies: is-number: 7.0.0 @@ -437,4 +661,8 @@ snapshots: tslib@2.8.1: optional: true + typescript@5.8.3: {} + undici-types@7.8.0: {} + + zod@4.0.5: {} diff --git a/packages/mpx/rolldown.config.ts b/packages/mpx/rolldown.config.ts index 8d5c67b0..1861eacc 100644 --- a/packages/mpx/rolldown.config.ts +++ b/packages/mpx/rolldown.config.ts @@ -1,8 +1,9 @@ import type { RolldownOptions } from "rolldown"; +import pkg from "./package.json" with { type: "json" }; const config: RolldownOptions = { input: "./src/cli.ts", - external: ["rolldown", "arktype"], + external: [Object.keys(pkg.dependencies ?? {})].flat(), output: { file: "./dist/mpx.js", inlineDynamicImports: true, diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 20c4c4f3..0c0ca653 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -1,41 +1,76 @@ -import { ArkErrors } from "arktype"; +import consola from "consola"; import fs from "node:fs/promises"; import path from "node:path"; -import { rolldown } from "rolldown"; +import { env } from "node:process"; +import ms from "pretty-ms"; +import { BuildOptions, build as buildBundle, watch } from "rolldown"; +import { onExit } from "signal-exit"; import { PackageJson } from "./lib/parsers/PackageJson.js"; -import { printError } from "./utils/logging.js"; +import { bold, green } from "./utils/colors.js"; +import { parsePackageError } from "./utils/error.js"; -export async function buildCommand(root?: string): Promise { +interface BuildCommandOptions { + watch?: boolean; + minify?: boolean; +} + +export async function build(root: string | undefined, options: BuildCommandOptions): Promise { try { - await build(root); + // consola.log(root, options); + await runBuild(root, options); } catch (error) { - printError(error); + consola.error(error); process.exit(1); } } -export async function build(root?: string) { +export async function runBuild(root: string | undefined, options: BuildCommandOptions = {}): Promise { + options.watch ??= false; + options.minify ??= !!env.CI; root = path.resolve(root ?? ""); + process.chdir(root); const [pkg, isTsProject] = await Promise.all([readPackageJson(root), isTypeScriptProject(root)]); - const config = new ProjectConfig({ + const project = new ProjectConfig({ pkg, isTsProject }); - console.log(config.files, config.outputDirs); - // const [entry] = await fg(["src/**/*.ts", "src/**/*.tsx"]); - const bundle = await rolldown({ - input: config.files.widgetFile, - external: [/^react\/jsx-runtime$/] - }); + const bundles = await loadConfig(project); + + await fs.rm(project.outputDirs.dist, { recursive: true, force: true }); + + if (!options.watch) { + buildMeasure.start(); + for (const bundle of bundles) { + await buildBundle(bundle); + consola.success(pprintSuccessOutput(bundle.output?.file!)); + } + buildMeasure.end(); + } else { + consola.start("Start build in watch mode"); + const watcher = watch(bundles); + watcher.on("event", event => { + if (event.code === "BUNDLE_END") { + let [outFile] = event.output; + outFile = bold(path.relative(root, outFile)); + consola.success(pprintSuccessOutput(outFile, event.duration)); + event.result?.close(); + } + + if (event.code === "END") { + consola.log(""); + } + }); - await bundle.write({ - format: "esm", - minify: false - }); + onExit(() => { + watcher.close(); + consola.log(""); + consola.log("Build watcher stopped"); + }); + } } interface BundleInputFiles { @@ -57,11 +92,15 @@ interface ProjectConfigInputs { } class ProjectConfig { - readonly #dist = path.join("dist/tmp/widgets"); + readonly #dist = path.join("dist"); readonly #inputs: ProjectConfigInputs; + readonly pkg: PackageJson; + readonly isTsProject: boolean; constructor(inputs: ProjectConfigInputs) { this.#inputs = inputs; + this.pkg = inputs.pkg; + this.isTsProject = inputs.isTsProject; } get files(): BundleInputFiles { @@ -103,12 +142,43 @@ class ProjectConfig { get outputDirs(): BundleOutputDirs { const { pkg } = this.#inputs; - const widgetDir = path.join(this.#dist, ...pkg.packagePath.split("."), pkg.widgetName.toLowerCase()); + // const widgetDir = path.join(this.#dist, ...pkg.packagePath.split("."), pkg.widgetName.toLowerCase()); + const widgetDir = this.#dist; return { dist: this.#dist, widgetDir }; } } +function defaultConfig(project: ProjectConfig): BuildOptions[] { + const esmBundle = { + input: project.files.widgetFile, + external: ["react/jsx-runtime"], + output: { + file: path.join(project.outputDirs.widgetDir, "widget.mjs"), + format: "esm" + } + } satisfies BuildOptions; + + const umdBundle = { + input: project.files.widgetFile, + external: ["react/jsx-runtime"], + output: { + file: path.join(project.outputDirs.widgetDir, "widget.js"), + format: "umd", + name: `${project.pkg.packagePath}.${project.pkg.widgetName}`, + globals: { + "react/jsx-runtime": "react_jsx_runtime" + } + } + } satisfies BuildOptions; + + return [esmBundle, umdBundle]; +} + +async function loadConfig(project: ProjectConfig): Promise { + return defaultConfig(project); +} + async function isTypeScriptProject(root: string): Promise { return fs.access(path.resolve(root, "tsconfig.json"), fs.constants.F_OK).then( () => true, @@ -118,11 +188,27 @@ async function isTypeScriptProject(root: string): Promise { async function readPackageJson(root: string): Promise { const filePath = path.resolve(root, "package.json"); - const pkg = PackageJson(await fs.readFile(filePath, "utf-8")); - - if (pkg instanceof ArkErrors) { - throw new Error(`Invalid package.json:\n${pkg.summary}`); + try { + return PackageJson.parse(JSON.parse(await fs.readFile(filePath, "utf-8"))); + } catch (error) { + throw parsePackageError(error); } +} - return pkg; +function pprintSuccessOutput(file: string, duration?: number): string { + if (!duration) { + return `Built ${bold(file)}`; + } + return `Built ${bold(file)} in ${green(ms(duration))}`; } + +const buildMeasure = { + start() { + performance.mark("build-start"); + }, + end() { + performance.mark("build-end"); + const buildInfo = performance.measure("build", "build-start", "build-end"); + consola.success("Done in", green(ms(buildInfo.duration))); + } +}; diff --git a/packages/mpx/src/cli.ts b/packages/mpx/src/cli.ts index dac89323..f042ae59 100644 --- a/packages/mpx/src/cli.ts +++ b/packages/mpx/src/cli.ts @@ -1,7 +1,7 @@ #!/usr/bin/env node import { cac } from "cac"; -import { buildCommand } from "./build.js"; +import { build } from "./build.js"; import { VERSION } from "./constants.js"; const cli = cac("mpx"); @@ -9,7 +9,7 @@ const cli = cac("mpx"); cli.command("build [root]", "Build widget") .option("-w, --watch", "watch for changes and rebuild") .option("-m, --minify", "minify the output (this option is on in CI environment)") - .action(buildCommand); + .action(build); cli.help(); cli.version(VERSION); diff --git a/packages/mpx/src/error-utils.ts b/packages/mpx/src/error-utils.ts deleted file mode 100644 index 28f9c449..00000000 --- a/packages/mpx/src/error-utils.ts +++ /dev/null @@ -1,8 +0,0 @@ -import chalk from "chalk"; - -export function pprint(msg: string) { - return msg - .split("\n") - .map(line => chalk.yellow(line)) - .join("\n"); -} diff --git a/packages/mpx/src/lib/parsers/PackageJson.ts b/packages/mpx/src/lib/parsers/PackageJson.ts index dfe25105..1bdcee13 100644 --- a/packages/mpx/src/lib/parsers/PackageJson.ts +++ b/packages/mpx/src/lib/parsers/PackageJson.ts @@ -1,10 +1,14 @@ -import { type } from "arktype"; +import * as z from "zod"; -export const PackageJson = type("string.json.parse").to({ - name: type("string > 0").to("string.trim"), - version: "string.semver", - widgetName: type("string > 0").to("string.trim"), - packagePath: type(/^[a-zA-Z]+(\.[a-zA-Z]+)*$/).describe("must be dot separated path like 'example.widget'") +export const PackageJson = z.object({ + name: z.string().min(1).trim(), + version: z.string().refine(val => /^\d+\.\d+\.\d+$/.test(val), { + message: "Invalid semver" + }), + widgetName: z.string().min(1).trim(), + packagePath: z.string().regex(/^[a-zA-Z]+(\.[a-zA-Z]+)*$/, { + message: "must be dot separated path like 'example.widget'" + }) }); -export type PackageJson = typeof PackageJson.infer; +export type PackageJson = z.infer; diff --git a/packages/mpx/src/utils/error.ts b/packages/mpx/src/utils/error.ts new file mode 100644 index 00000000..0ca3c838 --- /dev/null +++ b/packages/mpx/src/utils/error.ts @@ -0,0 +1,24 @@ +import { prettifyError, ZodError } from "zod"; + +class BuildError extends Error { + constructor(message: string) { + super(`Build failed with error:\n\n${message}`); + this.name = "BuildError"; + } +} + +export function parsePackageError(error: unknown) { + return new BuildError(`Failed to parse package.json:\n${formatMessage(error)}`); +} + +export function formatMessage(error: unknown): string { + if (error instanceof ZodError) { + return prettifyError(error); + } else if (error instanceof Error) { + return error.message; + } else if (typeof error === "string") { + return error; + } else { + return String(error); + } +} diff --git a/packages/mpx/src/utils/logging.ts b/packages/mpx/src/utils/logging.ts deleted file mode 100644 index 2b8b76a8..00000000 --- a/packages/mpx/src/utils/logging.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { bold, red } from "./colors.js"; - -export function printError(error: unknown): void { - if (error instanceof Error) { - const name = error.name; - console.error(formatMessage(error.message)); - } else { - console.error(red("Unknown error:"), bold(String(error))); - } -} - -const formatMessage = (message: string): string => { - return bold(red(`[Error] ${bold(message)}`)); -}; diff --git a/packages/mpx/tsconfig.json b/packages/mpx/tsconfig.json index cebe8bda..7caad30f 100644 --- a/packages/mpx/tsconfig.json +++ b/packages/mpx/tsconfig.json @@ -2,6 +2,7 @@ "extends": "@tsconfig/node22/tsconfig.json", "exclude": ["input"], "compilerOptions": { - "noEmit": true + "noEmit": true, + "resolveJsonModule": true } } From 98e48a4a1b6ab2ea325c801bf74cadad5d8a81b1 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Mon, 14 Jul 2025 16:26:37 +0200 Subject: [PATCH 08/45] chore: update config --- packages/mpx/src/build.ts | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 0c0ca653..4b5dcacc 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -16,7 +16,6 @@ interface BuildCommandOptions { export async function build(root: string | undefined, options: BuildCommandOptions): Promise { try { - // consola.log(root, options); await runBuild(root, options); } catch (error) { consola.error(error); @@ -142,8 +141,7 @@ class ProjectConfig { get outputDirs(): BundleOutputDirs { const { pkg } = this.#inputs; - // const widgetDir = path.join(this.#dist, ...pkg.packagePath.split("."), pkg.widgetName.toLowerCase()); - const widgetDir = this.#dist; + const widgetDir = path.join(this.#dist, ...pkg.packagePath.split("."), pkg.widgetName.toLowerCase()); return { dist: this.#dist, widgetDir }; } @@ -154,7 +152,11 @@ function defaultConfig(project: ProjectConfig): BuildOptions[] { input: project.files.widgetFile, external: ["react/jsx-runtime"], output: { - file: path.join(project.outputDirs.widgetDir, "widget.mjs"), + file: path.format({ + dir: project.outputDirs.widgetDir, + name: project.pkg.widgetName, + ext: "mjs" + }), format: "esm" } } satisfies BuildOptions; @@ -163,7 +165,11 @@ function defaultConfig(project: ProjectConfig): BuildOptions[] { input: project.files.widgetFile, external: ["react/jsx-runtime"], output: { - file: path.join(project.outputDirs.widgetDir, "widget.js"), + file: path.format({ + dir: project.outputDirs.widgetDir, + name: project.pkg.widgetName, + ext: "js" + }), format: "umd", name: `${project.pkg.packagePath}.${project.pkg.widgetName}`, globals: { From 0d00aa51e7cb6ce570e80b15c8e63b1a50693cf8 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Mon, 14 Jul 2025 16:34:17 +0200 Subject: [PATCH 09/45] chore: add logger --- packages/mpx/src/build.ts | 18 +++++++++--------- packages/mpx/src/utils/logger.ts | 9 +++++++++ 2 files changed, 18 insertions(+), 9 deletions(-) create mode 100644 packages/mpx/src/utils/logger.ts diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 4b5dcacc..2057bf28 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -1,4 +1,3 @@ -import consola from "consola"; import fs from "node:fs/promises"; import path from "node:path"; import { env } from "node:process"; @@ -8,6 +7,7 @@ import { onExit } from "signal-exit"; import { PackageJson } from "./lib/parsers/PackageJson.js"; import { bold, green } from "./utils/colors.js"; import { parsePackageError } from "./utils/error.js"; +import { logger } from "./utils/logger.js"; interface BuildCommandOptions { watch?: boolean; @@ -18,7 +18,7 @@ export async function build(root: string | undefined, options: BuildCommandOptio try { await runBuild(root, options); } catch (error) { - consola.error(error); + logger.error(error); process.exit(1); } } @@ -45,29 +45,29 @@ export async function runBuild(root: string | undefined, options: BuildCommandOp buildMeasure.start(); for (const bundle of bundles) { await buildBundle(bundle); - consola.success(pprintSuccessOutput(bundle.output?.file!)); + logger.success(pprintSuccessOutput(bundle.output?.file!)); } buildMeasure.end(); } else { - consola.start("Start build in watch mode"); + logger.start("Start build in watch mode"); const watcher = watch(bundles); watcher.on("event", event => { if (event.code === "BUNDLE_END") { let [outFile] = event.output; outFile = bold(path.relative(root, outFile)); - consola.success(pprintSuccessOutput(outFile, event.duration)); + logger.success(pprintSuccessOutput(outFile, event.duration)); event.result?.close(); } if (event.code === "END") { - consola.log(""); + logger.log(""); } }); onExit(() => { watcher.close(); - consola.log(""); - consola.log("Build watcher stopped"); + logger.log(""); + logger.log("Build watcher stopped"); }); } } @@ -215,6 +215,6 @@ const buildMeasure = { end() { performance.mark("build-end"); const buildInfo = performance.measure("build", "build-start", "build-end"); - consola.success("Done in", green(ms(buildInfo.duration))); + logger.success("Done in", green(ms(buildInfo.duration))); } }; diff --git a/packages/mpx/src/utils/logger.ts b/packages/mpx/src/utils/logger.ts new file mode 100644 index 00000000..4998fb2f --- /dev/null +++ b/packages/mpx/src/utils/logger.ts @@ -0,0 +1,9 @@ +import { createConsola } from "consola"; +import { env } from "node:process"; + +const CI = !!env.CI; + +export const logger = createConsola({ + level: CI ? 2 : 3, + fancy: true +}); From 00798dda56ab5405a0c24e5a44efcc92d5aea58c Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Mon, 14 Jul 2025 16:53:22 +0200 Subject: [PATCH 10/45] chore: add editor entries --- packages/mpx/src/build.ts | 104 ++++++++++++++++++++++++++++++++------ 1 file changed, 88 insertions(+), 16 deletions(-) diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 2057bf28..36749c82 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -4,6 +4,7 @@ import { env } from "node:process"; import ms from "pretty-ms"; import { BuildOptions, build as buildBundle, watch } from "rolldown"; import { onExit } from "signal-exit"; +import { STD_EXTERNALS } from "./constants.js"; import { PackageJson } from "./lib/parsers/PackageJson.js"; import { bold, green } from "./utils/colors.js"; import { parsePackageError } from "./utils/error.js"; @@ -80,6 +81,13 @@ interface BundleInputFiles { widgetXml: string; } +interface BundleOutputFiles { + editorConfig: string; + editorPreview: string; + esm: string; + umd: string; +} + interface BundleOutputDirs { dist: string; widgetDir: string; @@ -95,6 +103,8 @@ class ProjectConfig { readonly #inputs: ProjectConfigInputs; readonly pkg: PackageJson; readonly isTsProject: boolean; + readonly configExt = "editorConfig"; + readonly previewExt = "editorPreview"; constructor(inputs: ProjectConfigInputs) { this.#inputs = inputs; @@ -110,13 +120,13 @@ class ProjectConfig { const editorConfig = path.format({ dir: "src", name: pkg.widgetName, - ext: `editorConfig.${ext}` + ext: `${this.configExt}.${ext}` }); const editorPreview = path.format({ dir: "src", name: pkg.widgetName, - ext: `editorPreview.${extJsx}` + ext: `${this.previewExt}.${extJsx}` }); const packageXml = path.format({ @@ -145,31 +155,48 @@ class ProjectConfig { return { dist: this.#dist, widgetDir }; } + + get outputFiles(): BundleOutputFiles { + return { + esm: path.format({ + dir: this.outputDirs.widgetDir, + name: this.pkg.widgetName, + ext: "mjs" + }), + umd: path.format({ + dir: this.outputDirs.widgetDir, + name: this.pkg.widgetName, + ext: "js" + }), + editorConfig: path.format({ + dir: this.outputDirs.dist, + name: this.pkg.widgetName, + ext: `${this.configExt}.js` + }), + editorPreview: path.format({ + dir: this.outputDirs.dist, + name: this.pkg.widgetName, + ext: `${this.previewExt}.js` + }) + }; + } } -function defaultConfig(project: ProjectConfig): BuildOptions[] { +async function defaultConfig(project: ProjectConfig): Promise { const esmBundle = { input: project.files.widgetFile, - external: ["react/jsx-runtime"], + external: [...STD_EXTERNALS], output: { - file: path.format({ - dir: project.outputDirs.widgetDir, - name: project.pkg.widgetName, - ext: "mjs" - }), + file: project.outputFiles.esm, format: "esm" } } satisfies BuildOptions; const umdBundle = { input: project.files.widgetFile, - external: ["react/jsx-runtime"], + external: [...STD_EXTERNALS], output: { - file: path.format({ - dir: project.outputDirs.widgetDir, - name: project.pkg.widgetName, - ext: "js" - }), + file: project.outputFiles.umd, format: "umd", name: `${project.pkg.packagePath}.${project.pkg.widgetName}`, globals: { @@ -178,7 +205,38 @@ function defaultConfig(project: ProjectConfig): BuildOptions[] { } } satisfies BuildOptions; - return [esmBundle, umdBundle]; + const editorConfigBundle = { + input: project.files.editorConfig, + output: { + file: project.outputFiles.editorConfig, + format: "commonjs" + } + } satisfies BuildOptions; + + const editorPreviewBundle = { + input: project.files.editorPreview, + output: { + file: project.outputFiles.editorPreview, + format: "commonjs" + } + } satisfies BuildOptions; + + const bundles: BuildOptions[] = [esmBundle, umdBundle]; + + const [addEditorConfig, addEditorPreview] = await Promise.all([ + hasEditorConfig(project), + hasEditorPreview(project) + ]); + + if (addEditorConfig) { + bundles.push(editorConfigBundle); + } + + if (addEditorPreview) { + bundles.push(editorPreviewBundle); + } + + return bundles; } async function loadConfig(project: ProjectConfig): Promise { @@ -192,6 +250,20 @@ async function isTypeScriptProject(root: string): Promise { ); } +async function hasEditorConfig(project: ProjectConfig): Promise { + return fs.access(path.resolve(project.files.editorConfig), fs.constants.F_OK).then( + () => true, + () => false + ); +} + +async function hasEditorPreview(project: ProjectConfig): Promise { + return fs.access(path.resolve(project.files.editorPreview), fs.constants.F_OK).then( + () => true, + () => false + ); +} + async function readPackageJson(root: string): Promise { const filePath = path.resolve(root, "package.json"); try { From 828efcaa5dca9cb0085512b33c0eef08de6113e1 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Mon, 14 Jul 2025 16:56:44 +0200 Subject: [PATCH 11/45] chore: report build errors in watch mode --- packages/mpx/src/build.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 36749c82..992996a1 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -60,6 +60,10 @@ export async function runBuild(root: string | undefined, options: BuildCommandOp event.result?.close(); } + if (event.code === "ERROR") { + logger.error(event.error); + } + if (event.code === "END") { logger.log(""); } From 8a23f3ddda1b61eaa152ce65a1d0b9863e4c28f7 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Mon, 14 Jul 2025 18:05:20 +0200 Subject: [PATCH 12/45] chore: finish with mpk --- packages/mpx/package.json | 1 + packages/mpx/pnpm-lock.yaml | 609 ++++++++++++++++++ packages/mpx/src/build.ts | 133 +--- packages/mpx/src/lib/build-utils.ts | 0 packages/mpx/src/utils/mpk.ts | 9 + .../src/{lib => utils}/parsers/PackageJson.ts | 0 packages/mpx/src/utils/project-config.ts | 129 ++++ 7 files changed, 764 insertions(+), 117 deletions(-) delete mode 100644 packages/mpx/src/lib/build-utils.ts create mode 100644 packages/mpx/src/utils/mpk.ts rename packages/mpx/src/{lib => utils}/parsers/PackageJson.ts (100%) create mode 100644 packages/mpx/src/utils/project-config.ts diff --git a/packages/mpx/package.json b/packages/mpx/package.json index 3a232a66..2c3c4dfa 100644 --- a/packages/mpx/package.json +++ b/packages/mpx/package.json @@ -31,6 +31,7 @@ "rollup": "^4.45.0", "signal-exit": "^4.1.0", "typescript": "^5.8.3", + "zip-a-folder": "^3.1.9", "zod": "^4.0.5" }, "dependencies": { diff --git a/packages/mpx/pnpm-lock.yaml b/packages/mpx/pnpm-lock.yaml index e6991ec5..5bc7171a 100644 --- a/packages/mpx/pnpm-lock.yaml +++ b/packages/mpx/pnpm-lock.yaml @@ -48,6 +48,9 @@ importers: typescript: specifier: ^5.8.3 version: 5.8.3 + zip-a-folder: + specifier: ^3.1.9 + version: 3.1.9 zod: specifier: ^4.0.5 version: 4.0.5 @@ -63,6 +66,18 @@ packages: '@emnapi/wasi-threads@1.0.3': resolution: {integrity: sha512-8K5IFFsQqF9wQNJptGbS6FNKgUTsSRYnTqNCG1vPP8jFdjSv18n2mQfJpkt2Oibo9iBEzcDnDxNwKTzC7svlJw==} + '@isaacs/balanced-match@4.0.1': + resolution: {integrity: sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==} + engines: {node: 20 || >=22} + + '@isaacs/brace-expansion@5.0.0': + resolution: {integrity: sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==} + engines: {node: 20 || >=22} + + '@isaacs/cliui@8.0.2': + resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} + engines: {node: '>=12'} + '@napi-rs/wasm-runtime@0.2.12': resolution: {integrity: sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==} @@ -85,6 +100,10 @@ packages: '@oxc-project/types@0.76.0': resolution: {integrity: sha512-CH3THIrSViKal8yV/Wh3FK0pFhp40nzW1MUDCik9fNuid2D/7JJXKJnfFOAvMxInGXDlvmgT6ACAzrl47TqzkQ==} + '@pkgjs/parseargs@0.11.0': + resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} + engines: {node: '>=14'} + '@rolldown/binding-darwin-arm64@1.0.0-beta.26': resolution: {integrity: sha512-I73Ej+PVoCJiYQHpy45CHKkLgFqrYv9O1CUJs6TIav6f8f9WAVeN/k0YXrs0tgMO20AfsyEN8zenz2wprVWOYQ==} cpu: [arm64] @@ -260,22 +279,122 @@ packages: '@types/node@24.0.13': resolution: {integrity: sha512-Qm9OYVOFHFYg3wJoTSrz80hoec5Lia/dPp84do3X7dZvLikQvM1YpmvTBEdIr/e+U8HTkFjLHLnl78K/qjf+jQ==} + abort-controller@3.0.0: + resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} + engines: {node: '>=6.5'} + + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + + ansi-regex@6.1.0: + resolution: {integrity: sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==} + engines: {node: '>=12'} + + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + + ansi-styles@6.2.1: + resolution: {integrity: sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==} + engines: {node: '>=12'} + ansis@4.1.0: resolution: {integrity: sha512-BGcItUBWSMRgOCe+SVZJ+S7yTRG0eGt9cXAHev72yuGcY23hnLA7Bky5L/xLyPINoSN95geovfBkqoTlNZYa7w==} engines: {node: '>=14'} + archiver-utils@5.0.2: + resolution: {integrity: sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA==} + engines: {node: '>= 14'} + + archiver@7.0.1: + resolution: {integrity: sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ==} + engines: {node: '>= 14'} + + async@3.2.6: + resolution: {integrity: sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==} + + b4a@1.6.7: + resolution: {integrity: sha512-OnAYlL5b7LEkALw87fUVafQw5rVR9RjwGd4KUwNQ6DrrNmaVaUCgLipfVlzrPQ4tWOR9P0IXGNOx50jYCCdSJg==} + + balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + + bare-events@2.6.0: + resolution: {integrity: sha512-EKZ5BTXYExaNqi3I3f9RtEsaI/xBSGjE0XZCZilPzFAV/goswFHuPd9jEZlPIZ/iNZJwDSao9qRiScySz7MbQg==} + + base64-js@1.5.1: + resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} + + brace-expansion@2.0.2: + resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} + braces@3.0.3: resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} engines: {node: '>=8'} + buffer-crc32@1.0.0: + resolution: {integrity: sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==} + engines: {node: '>=8.0.0'} + + buffer@6.0.3: + resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} + cac@6.7.14: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} engines: {node: '>=8'} + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + + compress-commons@6.0.2: + resolution: {integrity: sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==} + engines: {node: '>= 14'} + consola@3.4.2: resolution: {integrity: sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==} engines: {node: ^14.18.0 || >=16.10.0} + core-util-is@1.0.3: + resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==} + + crc-32@1.2.2: + resolution: {integrity: sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==} + engines: {node: '>=0.8'} + hasBin: true + + crc32-stream@6.0.0: + resolution: {integrity: sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g==} + engines: {node: '>= 14'} + + cross-spawn@7.0.6: + resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} + engines: {node: '>= 8'} + + eastasianwidth@0.2.0: + resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + + emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + + emoji-regex@9.2.2: + resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + + event-target-shim@5.0.1: + resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==} + engines: {node: '>=6'} + + events@3.3.0: + resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} + engines: {node: '>=0.8.x'} + + fast-fifo@1.3.2: + resolution: {integrity: sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==} + fast-glob@3.3.3: resolution: {integrity: sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==} engines: {node: '>=8.6.0'} @@ -287,6 +406,10 @@ packages: resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} engines: {node: '>=8'} + foreground-child@3.3.1: + resolution: {integrity: sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==} + engines: {node: '>=14'} + fsevents@2.3.3: resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} @@ -296,10 +419,32 @@ packages: resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} engines: {node: '>= 6'} + glob@10.4.5: + resolution: {integrity: sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==} + hasBin: true + + glob@11.0.3: + resolution: {integrity: sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==} + engines: {node: 20 || >=22} + hasBin: true + + graceful-fs@4.2.11: + resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} + + ieee754@1.2.1: + resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} + + inherits@2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + is-extglob@2.1.1: resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} engines: {node: '>=0.10.0'} + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + is-glob@4.0.3: resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} engines: {node: '>=0.10.0'} @@ -308,6 +453,37 @@ packages: resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} engines: {node: '>=0.12.0'} + is-stream@2.0.1: + resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} + engines: {node: '>=8'} + + isarray@1.0.0: + resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==} + + isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + + jackspeak@3.4.3: + resolution: {integrity: sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==} + + jackspeak@4.1.1: + resolution: {integrity: sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==} + engines: {node: 20 || >=22} + + lazystream@1.0.1: + resolution: {integrity: sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==} + engines: {node: '>= 0.6.3'} + + lodash@4.17.21: + resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} + + lru-cache@10.4.3: + resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==} + + lru-cache@11.1.0: + resolution: {integrity: sha512-QIXZUBJUx+2zHUdQujWejBkcD9+cs94tLn0+YL8UrCh+D5sCXZ4c7LaEH48pNwRY3MLDgqUFyhlCyjJPf1WP0A==} + engines: {node: 20 || >=22} + merge2@1.4.1: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} @@ -316,10 +492,45 @@ packages: resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} engines: {node: '>=8.6'} + minimatch@10.0.3: + resolution: {integrity: sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==} + engines: {node: 20 || >=22} + + minimatch@5.1.6: + resolution: {integrity: sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==} + engines: {node: '>=10'} + + minimatch@9.0.5: + resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==} + engines: {node: '>=16 || 14 >=14.17'} + + minipass@7.1.2: + resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} + engines: {node: '>=16 || 14 >=14.17'} + + normalize-path@3.0.0: + resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} + engines: {node: '>=0.10.0'} + + package-json-from-dist@1.0.1: + resolution: {integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==} + parse-ms@4.0.0: resolution: {integrity: sha512-TXfryirbmq34y8QBwgqCVLi+8oA3oWx2eAnSn62ITyEhEYaWRlVZ2DvMM9eZbMs/RfxPu/PK/aBLyGj4IrqMHw==} engines: {node: '>=18'} + path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + + path-scurry@1.11.1: + resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} + engines: {node: '>=16 || 14 >=14.18'} + + path-scurry@2.0.0: + resolution: {integrity: sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==} + engines: {node: 20 || >=22} + picocolors@1.1.1: resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} @@ -341,9 +552,26 @@ packages: resolution: {integrity: sha512-4yf0QO/sllf/1zbZWYnvWw3NxCQwLXKzIj0G849LSufP15BXKM0rbD2Z3wVnkMfjdn/CB0Dpp444gYAACdsplg==} engines: {node: '>=18'} + process-nextick-args@2.0.1: + resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==} + + process@0.11.10: + resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==} + engines: {node: '>= 0.6.0'} + queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + readable-stream@2.3.8: + resolution: {integrity: sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==} + + readable-stream@4.7.0: + resolution: {integrity: sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + readdir-glob@1.1.3: + resolution: {integrity: sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==} + reusify@1.1.0: resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} @@ -360,10 +588,55 @@ packages: run-parallel@1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + safe-buffer@5.1.2: + resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==} + + safe-buffer@5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + + shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + + shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + signal-exit@4.1.0: resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} engines: {node: '>=14'} + streamx@2.22.1: + resolution: {integrity: sha512-znKXEBxfatz2GBNK02kRnCXjV+AA4kjZIUxeWSr3UGirZMJfTE9uiwKHobnbgxWyL/JWro8tTq+vOqAK1/qbSA==} + + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + + string-width@5.1.2: + resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} + engines: {node: '>=12'} + + string_decoder@1.1.1: + resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==} + + string_decoder@1.3.0: + resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + + strip-ansi@7.1.0: + resolution: {integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==} + engines: {node: '>=12'} + + tar-stream@3.1.7: + resolution: {integrity: sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==} + + text-decoder@1.2.3: + resolution: {integrity: sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA==} + to-regex-range@5.0.1: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} @@ -379,6 +652,29 @@ packages: undici-types@7.8.0: resolution: {integrity: sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw==} + util-deprecate@1.0.2: + resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + + which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + + wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + + wrap-ansi@8.1.0: + resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} + engines: {node: '>=12'} + + zip-a-folder@3.1.9: + resolution: {integrity: sha512-0TPP3eK5mbZxHnOE8w/Jg6gwxsxZOrA3hXHMfC3I4mcTvyJwNt7GZP8i6uiAMVNu43QTmVz0ngEMKcjgpLZLmQ==} + + zip-stream@6.0.1: + resolution: {integrity: sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA==} + engines: {node: '>= 14'} + zod@4.0.5: resolution: {integrity: sha512-/5UuuRPStvHXu7RS+gmvRf4NXrNxpSllGwDnCBcJZtQsKrviYXm54yDGV2KYNLT5kq0lHGcl7lqWJLgSaG+tgA==} @@ -400,6 +696,21 @@ snapshots: tslib: 2.8.1 optional: true + '@isaacs/balanced-match@4.0.1': {} + + '@isaacs/brace-expansion@5.0.0': + dependencies: + '@isaacs/balanced-match': 4.0.1 + + '@isaacs/cliui@8.0.2': + dependencies: + string-width: 5.1.2 + string-width-cjs: string-width@4.2.3 + strip-ansi: 7.1.0 + strip-ansi-cjs: strip-ansi@6.0.1 + wrap-ansi: 8.1.0 + wrap-ansi-cjs: wrap-ansi@7.0.0 + '@napi-rs/wasm-runtime@0.2.12': dependencies: '@emnapi/core': 1.4.4 @@ -423,6 +734,9 @@ snapshots: '@oxc-project/types@0.76.0': {} + '@pkgjs/parseargs@0.11.0': + optional: true + '@rolldown/binding-darwin-arm64@1.0.0-beta.26': optional: true @@ -536,16 +850,113 @@ snapshots: dependencies: undici-types: 7.8.0 + abort-controller@3.0.0: + dependencies: + event-target-shim: 5.0.1 + + ansi-regex@5.0.1: {} + + ansi-regex@6.1.0: {} + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + ansi-styles@6.2.1: {} + ansis@4.1.0: {} + archiver-utils@5.0.2: + dependencies: + glob: 10.4.5 + graceful-fs: 4.2.11 + is-stream: 2.0.1 + lazystream: 1.0.1 + lodash: 4.17.21 + normalize-path: 3.0.0 + readable-stream: 4.7.0 + + archiver@7.0.1: + dependencies: + archiver-utils: 5.0.2 + async: 3.2.6 + buffer-crc32: 1.0.0 + readable-stream: 4.7.0 + readdir-glob: 1.1.3 + tar-stream: 3.1.7 + zip-stream: 6.0.1 + + async@3.2.6: {} + + b4a@1.6.7: {} + + balanced-match@1.0.2: {} + + bare-events@2.6.0: + optional: true + + base64-js@1.5.1: {} + + brace-expansion@2.0.2: + dependencies: + balanced-match: 1.0.2 + braces@3.0.3: dependencies: fill-range: 7.1.1 + buffer-crc32@1.0.0: {} + + buffer@6.0.3: + dependencies: + base64-js: 1.5.1 + ieee754: 1.2.1 + cac@6.7.14: {} + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + compress-commons@6.0.2: + dependencies: + crc-32: 1.2.2 + crc32-stream: 6.0.0 + is-stream: 2.0.1 + normalize-path: 3.0.0 + readable-stream: 4.7.0 + consola@3.4.2: {} + core-util-is@1.0.3: {} + + crc-32@1.2.2: {} + + crc32-stream@6.0.0: + dependencies: + crc-32: 1.2.2 + readable-stream: 4.7.0 + + cross-spawn@7.0.6: + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + + eastasianwidth@0.2.0: {} + + emoji-regex@8.0.0: {} + + emoji-regex@9.2.2: {} + + event-target-shim@5.0.1: {} + + events@3.3.0: {} + + fast-fifo@1.3.2: {} + fast-glob@3.3.3: dependencies: '@nodelib/fs.stat': 2.0.5 @@ -562,6 +973,11 @@ snapshots: dependencies: to-regex-range: 5.0.1 + foreground-child@3.3.1: + dependencies: + cross-spawn: 7.0.6 + signal-exit: 4.1.0 + fsevents@2.3.3: optional: true @@ -569,14 +985,66 @@ snapshots: dependencies: is-glob: 4.0.3 + glob@10.4.5: + dependencies: + foreground-child: 3.3.1 + jackspeak: 3.4.3 + minimatch: 9.0.5 + minipass: 7.1.2 + package-json-from-dist: 1.0.1 + path-scurry: 1.11.1 + + glob@11.0.3: + dependencies: + foreground-child: 3.3.1 + jackspeak: 4.1.1 + minimatch: 10.0.3 + minipass: 7.1.2 + package-json-from-dist: 1.0.1 + path-scurry: 2.0.0 + + graceful-fs@4.2.11: {} + + ieee754@1.2.1: {} + + inherits@2.0.4: {} + is-extglob@2.1.1: {} + is-fullwidth-code-point@3.0.0: {} + is-glob@4.0.3: dependencies: is-extglob: 2.1.1 is-number@7.0.0: {} + is-stream@2.0.1: {} + + isarray@1.0.0: {} + + isexe@2.0.0: {} + + jackspeak@3.4.3: + dependencies: + '@isaacs/cliui': 8.0.2 + optionalDependencies: + '@pkgjs/parseargs': 0.11.0 + + jackspeak@4.1.1: + dependencies: + '@isaacs/cliui': 8.0.2 + + lazystream@1.0.1: + dependencies: + readable-stream: 2.3.8 + + lodash@4.17.21: {} + + lru-cache@10.4.3: {} + + lru-cache@11.1.0: {} + merge2@1.4.1: {} micromatch@4.0.8: @@ -584,8 +1052,38 @@ snapshots: braces: 3.0.3 picomatch: 2.3.1 + minimatch@10.0.3: + dependencies: + '@isaacs/brace-expansion': 5.0.0 + + minimatch@5.1.6: + dependencies: + brace-expansion: 2.0.2 + + minimatch@9.0.5: + dependencies: + brace-expansion: 2.0.2 + + minipass@7.1.2: {} + + normalize-path@3.0.0: {} + + package-json-from-dist@1.0.1: {} + parse-ms@4.0.0: {} + path-key@3.1.1: {} + + path-scurry@1.11.1: + dependencies: + lru-cache: 10.4.3 + minipass: 7.1.2 + + path-scurry@2.0.0: + dependencies: + lru-cache: 11.1.0 + minipass: 7.1.2 + picocolors@1.1.1: {} picomatch@2.3.1: {} @@ -598,8 +1096,34 @@ snapshots: dependencies: parse-ms: 4.0.0 + process-nextick-args@2.0.1: {} + + process@0.11.10: {} + queue-microtask@1.2.3: {} + readable-stream@2.3.8: + dependencies: + core-util-is: 1.0.3 + inherits: 2.0.4 + isarray: 1.0.0 + process-nextick-args: 2.0.1 + safe-buffer: 5.1.2 + string_decoder: 1.1.1 + util-deprecate: 1.0.2 + + readable-stream@4.7.0: + dependencies: + abort-controller: 3.0.0 + buffer: 6.0.3 + events: 3.3.0 + process: 0.11.10 + string_decoder: 1.3.0 + + readdir-glob@1.1.3: + dependencies: + minimatch: 5.1.6 + reusify@1.1.0: {} rolldown@1.0.0-beta.26: @@ -652,8 +1176,63 @@ snapshots: dependencies: queue-microtask: 1.2.3 + safe-buffer@5.1.2: {} + + safe-buffer@5.2.1: {} + + shebang-command@2.0.0: + dependencies: + shebang-regex: 3.0.0 + + shebang-regex@3.0.0: {} + signal-exit@4.1.0: {} + streamx@2.22.1: + dependencies: + fast-fifo: 1.3.2 + text-decoder: 1.2.3 + optionalDependencies: + bare-events: 2.6.0 + + string-width@4.2.3: + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + + string-width@5.1.2: + dependencies: + eastasianwidth: 0.2.0 + emoji-regex: 9.2.2 + strip-ansi: 7.1.0 + + string_decoder@1.1.1: + dependencies: + safe-buffer: 5.1.2 + + string_decoder@1.3.0: + dependencies: + safe-buffer: 5.2.1 + + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + + strip-ansi@7.1.0: + dependencies: + ansi-regex: 6.1.0 + + tar-stream@3.1.7: + dependencies: + b4a: 1.6.7 + fast-fifo: 1.3.2 + streamx: 2.22.1 + + text-decoder@1.2.3: + dependencies: + b4a: 1.6.7 + to-regex-range@5.0.1: dependencies: is-number: 7.0.0 @@ -665,4 +1244,34 @@ snapshots: undici-types@7.8.0: {} + util-deprecate@1.0.2: {} + + which@2.0.2: + dependencies: + isexe: 2.0.0 + + wrap-ansi@7.0.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + wrap-ansi@8.1.0: + dependencies: + ansi-styles: 6.2.1 + string-width: 5.1.2 + strip-ansi: 7.1.0 + + zip-a-folder@3.1.9: + dependencies: + archiver: 7.0.1 + glob: 11.0.3 + is-glob: 4.0.3 + + zip-stream@6.0.1: + dependencies: + archiver-utils: 5.0.2 + compress-commons: 6.0.2 + readable-stream: 4.7.0 + zod@4.0.5: {} diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 992996a1..33ab99f0 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -5,10 +5,12 @@ import ms from "pretty-ms"; import { BuildOptions, build as buildBundle, watch } from "rolldown"; import { onExit } from "signal-exit"; import { STD_EXTERNALS } from "./constants.js"; -import { PackageJson } from "./lib/parsers/PackageJson.js"; import { bold, green } from "./utils/colors.js"; import { parsePackageError } from "./utils/error.js"; import { logger } from "./utils/logger.js"; +import { createMPK } from "./utils/mpk.js"; +import { PackageJson } from "./utils/parsers/PackageJson.js"; +import { ProjectConfig } from "./utils/project-config.js"; interface BuildCommandOptions { watch?: boolean; @@ -40,7 +42,11 @@ export async function runBuild(root: string | undefined, options: BuildCommandOp const bundles = await loadConfig(project); - await fs.rm(project.outputDirs.dist, { recursive: true, force: true }); + // await fs.rm(project.outputDirs.dist, { recursive: true, force: true }); + + console.dir(project.inputFiles); + console.dir(project.outputDirs); + console.dir(project.outputFiles); if (!options.watch) { buildMeasure.start(); @@ -48,6 +54,8 @@ export async function runBuild(root: string | undefined, options: BuildCommandOp await buildBundle(bundle); logger.success(pprintSuccessOutput(bundle.output?.file!)); } + await createMPK(project.outputDirs.contentRoot, project.outputFiles.mpk); + logger.success(pprintSuccessOutput(project.outputFiles.mpk)); buildMeasure.end(); } else { logger.start("Start build in watch mode"); @@ -77,118 +85,9 @@ export async function runBuild(root: string | undefined, options: BuildCommandOp } } -interface BundleInputFiles { - editorConfig: string; - editorPreview: string; - packageXml: string; - widgetFile: string; - widgetXml: string; -} - -interface BundleOutputFiles { - editorConfig: string; - editorPreview: string; - esm: string; - umd: string; -} - -interface BundleOutputDirs { - dist: string; - widgetDir: string; -} - -interface ProjectConfigInputs { - pkg: PackageJson; - isTsProject: boolean; -} - -class ProjectConfig { - readonly #dist = path.join("dist"); - readonly #inputs: ProjectConfigInputs; - readonly pkg: PackageJson; - readonly isTsProject: boolean; - readonly configExt = "editorConfig"; - readonly previewExt = "editorPreview"; - - constructor(inputs: ProjectConfigInputs) { - this.#inputs = inputs; - this.pkg = inputs.pkg; - this.isTsProject = inputs.isTsProject; - } - - get files(): BundleInputFiles { - const { pkg, isTsProject } = this.#inputs; - const ext = isTsProject ? "ts" : "js"; - const extJsx = isTsProject ? "tsx" : "jsx"; - - const editorConfig = path.format({ - dir: "src", - name: pkg.widgetName, - ext: `${this.configExt}.${ext}` - }); - - const editorPreview = path.format({ - dir: "src", - name: pkg.widgetName, - ext: `${this.previewExt}.${extJsx}` - }); - - const packageXml = path.format({ - dir: "src", - base: "package.xml" - }); - - const widgetFile = path.format({ - dir: "src", - name: pkg.widgetName, - ext: extJsx - }); - - const widgetXml = path.format({ - dir: "src", - name: pkg.widgetName, - ext: "xml" - }); - - return { editorConfig, editorPreview, packageXml, widgetFile, widgetXml }; - } - - get outputDirs(): BundleOutputDirs { - const { pkg } = this.#inputs; - const widgetDir = path.join(this.#dist, ...pkg.packagePath.split("."), pkg.widgetName.toLowerCase()); - - return { dist: this.#dist, widgetDir }; - } - - get outputFiles(): BundleOutputFiles { - return { - esm: path.format({ - dir: this.outputDirs.widgetDir, - name: this.pkg.widgetName, - ext: "mjs" - }), - umd: path.format({ - dir: this.outputDirs.widgetDir, - name: this.pkg.widgetName, - ext: "js" - }), - editorConfig: path.format({ - dir: this.outputDirs.dist, - name: this.pkg.widgetName, - ext: `${this.configExt}.js` - }), - editorPreview: path.format({ - dir: this.outputDirs.dist, - name: this.pkg.widgetName, - ext: `${this.previewExt}.js` - }) - }; - } -} - async function defaultConfig(project: ProjectConfig): Promise { const esmBundle = { - input: project.files.widgetFile, + input: project.inputFiles.widgetFile, external: [...STD_EXTERNALS], output: { file: project.outputFiles.esm, @@ -197,7 +96,7 @@ async function defaultConfig(project: ProjectConfig): Promise { } satisfies BuildOptions; const umdBundle = { - input: project.files.widgetFile, + input: project.inputFiles.widgetFile, external: [...STD_EXTERNALS], output: { file: project.outputFiles.umd, @@ -210,7 +109,7 @@ async function defaultConfig(project: ProjectConfig): Promise { } satisfies BuildOptions; const editorConfigBundle = { - input: project.files.editorConfig, + input: project.inputFiles.editorConfig, output: { file: project.outputFiles.editorConfig, format: "commonjs" @@ -218,7 +117,7 @@ async function defaultConfig(project: ProjectConfig): Promise { } satisfies BuildOptions; const editorPreviewBundle = { - input: project.files.editorPreview, + input: project.inputFiles.editorPreview, output: { file: project.outputFiles.editorPreview, format: "commonjs" @@ -255,14 +154,14 @@ async function isTypeScriptProject(root: string): Promise { } async function hasEditorConfig(project: ProjectConfig): Promise { - return fs.access(path.resolve(project.files.editorConfig), fs.constants.F_OK).then( + return fs.access(path.resolve(project.inputFiles.editorConfig), fs.constants.F_OK).then( () => true, () => false ); } async function hasEditorPreview(project: ProjectConfig): Promise { - return fs.access(path.resolve(project.files.editorPreview), fs.constants.F_OK).then( + return fs.access(path.resolve(project.inputFiles.editorPreview), fs.constants.F_OK).then( () => true, () => false ); diff --git a/packages/mpx/src/lib/build-utils.ts b/packages/mpx/src/lib/build-utils.ts deleted file mode 100644 index e69de29b..00000000 diff --git a/packages/mpx/src/utils/mpk.ts b/packages/mpx/src/utils/mpk.ts new file mode 100644 index 00000000..61059e1e --- /dev/null +++ b/packages/mpx/src/utils/mpk.ts @@ -0,0 +1,9 @@ +import fs from "node:fs/promises"; +import path from "node:path"; +import { zip } from "zip-a-folder"; + +export async function createMPK(contentRoot: string, filename: string): Promise { + const dst = path.dirname(filename); + await fs.mkdir(dst, { recursive: true }); + await zip(contentRoot, filename); +} diff --git a/packages/mpx/src/lib/parsers/PackageJson.ts b/packages/mpx/src/utils/parsers/PackageJson.ts similarity index 100% rename from packages/mpx/src/lib/parsers/PackageJson.ts rename to packages/mpx/src/utils/parsers/PackageJson.ts diff --git a/packages/mpx/src/utils/project-config.ts b/packages/mpx/src/utils/project-config.ts new file mode 100644 index 00000000..6d35e01b --- /dev/null +++ b/packages/mpx/src/utils/project-config.ts @@ -0,0 +1,129 @@ +import path from "node:path"; +import { PackageJson } from "./parsers/PackageJson.js"; + +/** Files located in src directory */ +interface BundleInputFiles { + editorConfig: string; + editorPreview: string; + packageXml: string; + widgetFile: string; + widgetXml: string; +} + +/** Files for that will be in final build output */ +interface BundleOutputFiles { + editorConfig: string; + editorPreview: string; + esm: string; + umd: string; + mpk: string; +} + +interface BundleOutputDirs { + dist: string; + mpkDir: string; + contentRoot: string; + widgetDir: string; +} + +interface ProjectConfigInputs { + pkg: PackageJson; + isTsProject: boolean; +} + +export class ProjectConfig { + /** Output directory for built files */ + readonly dist = "dist"; + /** Package root directory that contains all widget files shipped with mpk */ + readonly contentRoot = path.join(this.dist, "tmp", "widgets"); + /** Widget package.json */ + readonly pkg: PackageJson; + readonly isTsProject: boolean; + + constructor(inputs: ProjectConfigInputs) { + this.pkg = inputs.pkg; + this.isTsProject = inputs.isTsProject; + } + + get inputFiles(): BundleInputFiles { + const { pkg, isTsProject } = this; + const ext = isTsProject ? "ts" : "js"; + const extJsx = isTsProject ? "tsx" : "jsx"; + + const editorConfig = path.format({ + dir: "src", + name: pkg.widgetName, + ext: `editorConfig.${ext}` + }); + + const editorPreview = path.format({ + dir: "src", + name: pkg.widgetName, + ext: `editorPreview.${extJsx}` + }); + + const packageXml = path.format({ + dir: "src", + base: "package.xml" + }); + + const widgetFile = path.format({ + dir: "src", + name: pkg.widgetName, + ext: extJsx + }); + + const widgetXml = path.format({ + dir: "src", + name: pkg.widgetName, + ext: "xml" + }); + + return { editorConfig, editorPreview, packageXml, widgetFile, widgetXml }; + } + + /** Directory where widget bundles will be output */ + get widgetDir(): string { + const { pkg, contentRoot } = this; + return path.join(contentRoot, ...pkg.packagePath.split("."), pkg.widgetName.toLowerCase()); + } + + get outputDirs(): BundleOutputDirs { + return { + dist: this.dist, + mpkDir: path.join(this.dist, this.pkg.version), + contentRoot: this.contentRoot, + widgetDir: this.widgetDir + }; + } + + get outputFiles(): BundleOutputFiles { + const { pkg, outputDirs } = this; + return { + esm: path.format({ + dir: outputDirs.widgetDir, + name: pkg.widgetName, + ext: "mjs" + }), + umd: path.format({ + dir: outputDirs.widgetDir, + name: pkg.widgetName, + ext: "js" + }), + editorConfig: path.format({ + dir: outputDirs.contentRoot, + name: pkg.widgetName, + ext: `editorConfig.js` + }), + editorPreview: path.format({ + dir: outputDirs.contentRoot, + name: pkg.widgetName, + ext: `editorPreview.js` + }), + mpk: path.format({ + dir: outputDirs.mpkDir, + base: `${pkg.packagePath}.${pkg.widgetName}.mpk` + }) + }; + } +} From 57d17cdb6d23f98671b386691e0f93260cfde91d Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Tue, 15 Jul 2025 11:06:29 +0200 Subject: [PATCH 13/45] chore: refactor --- packages/mpx/package.json | 1 + packages/mpx/pnpm-lock.yaml | 166 +++++++++++++++++++++++++++++++ packages/mpx/src/build.ts | 90 +++++++++++------ packages/mpx/src/utils/logger.ts | 9 +- 4 files changed, 231 insertions(+), 35 deletions(-) diff --git a/packages/mpx/package.json b/packages/mpx/package.json index 2c3c4dfa..087052d4 100644 --- a/packages/mpx/package.json +++ b/packages/mpx/package.json @@ -23,6 +23,7 @@ "@types/node": "^24.0.13", "cac": "^6.7.14", "consola": "^3.4.2", + "cpx2": "^8.0.0", "fast-glob": "^3.3.3", "picocolors": "^1.1.1", "premove": "^4.0.0", diff --git a/packages/mpx/pnpm-lock.yaml b/packages/mpx/pnpm-lock.yaml index 5bc7171a..988358a9 100644 --- a/packages/mpx/pnpm-lock.yaml +++ b/packages/mpx/pnpm-lock.yaml @@ -24,6 +24,9 @@ importers: consola: specifier: ^3.4.2 version: 3.4.2 + cpx2: + specifier: ^8.0.0 + version: 8.0.0 fast-glob: specifier: ^3.3.3 version: 3.3.3 @@ -362,6 +365,11 @@ packages: core-util-is@1.0.3: resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==} + cpx2@8.0.0: + resolution: {integrity: sha512-RxD9jrSVNSOmfcbiPlr3XnKbUKH9K1w2HCv0skczUKhsZTueiDBecxuaSAKQkYSLQaGVA4ZQJZlTj5hVNNEvKg==} + engines: {node: ^20.0.0 || >=22.0.0, npm: '>=10'} + hasBin: true + crc-32@1.2.2: resolution: {integrity: sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==} engines: {node: '>=0.8'} @@ -375,6 +383,22 @@ packages: resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} engines: {node: '>= 8'} + debounce@2.2.0: + resolution: {integrity: sha512-Xks6RUDLZFdz8LIdR6q0MTH44k7FikOmnh5xkSjMig6ch45afc8sjTjRQf3P6ax8dMgcQrYO/AR2RGWURrruqw==} + engines: {node: '>=18'} + + debug@4.4.1: + resolution: {integrity: sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + duplexer@0.1.2: + resolution: {integrity: sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==} + eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} @@ -406,19 +430,33 @@ packages: resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} engines: {node: '>=8'} + find-index@0.1.1: + resolution: {integrity: sha512-uJ5vWrfBKMcE6y2Z8834dwEZj9mNGxYa3t3I53OwFeuZ8D9oc2E5zcsrkuhX6h4iYrjhiv0T3szQmxlAV9uxDg==} + foreground-child@3.3.1: resolution: {integrity: sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==} engines: {node: '>=14'} + fs-extra@11.3.0: + resolution: {integrity: sha512-Z4XaCL6dUDHfP/jT25jJKMmtxvuwbkrD1vNSMFlo9lNLY2c5FHYSQgHPRZUjAB26TpDEoW9HCOgplrdbaPV/ew==} + engines: {node: '>=14.14'} + fsevents@2.3.3: resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} os: [darwin] + function-bind@1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + glob-parent@5.1.2: resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} engines: {node: '>= 6'} + glob2base@0.0.12: + resolution: {integrity: sha512-ZyqlgowMbfj2NPjxaZZ/EtsXlOch28FRXgMd64vqZWk1bT9+wvSRLYD1om9M7QfQru51zJPAT17qXm4/zd+9QA==} + engines: {node: '>= 0.10'} + glob@10.4.5: resolution: {integrity: sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==} hasBin: true @@ -431,12 +469,24 @@ packages: graceful-fs@4.2.11: resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} + hasown@2.0.2: + resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} + engines: {node: '>= 0.4'} + ieee754@1.2.1: resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} + ignore@6.0.2: + resolution: {integrity: sha512-InwqeHHN2XpumIkMvpl/DCJVrAHgCsG5+cn1XlnLWGwtZBm8QJfSusItfrwx81CTp5agNZqpKU2J/ccC5nGT4A==} + engines: {node: '>= 4'} + inherits@2.0.4: resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + is-core-module@2.16.1: + resolution: {integrity: sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==} + engines: {node: '>= 0.4'} + is-extglob@2.1.1: resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} engines: {node: '>=0.10.0'} @@ -470,6 +520,9 @@ packages: resolution: {integrity: sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==} engines: {node: 20 || >=22} + jsonfile@6.1.0: + resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} + lazystream@1.0.1: resolution: {integrity: sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==} engines: {node: '>= 0.6.3'} @@ -504,14 +557,24 @@ packages: resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==} engines: {node: '>=16 || 14 >=14.17'} + minimist@1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} + minipass@7.1.2: resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} engines: {node: '>=16 || 14 >=14.17'} + ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} engines: {node: '>=0.10.0'} + p-map@7.0.3: + resolution: {integrity: sha512-VkndIv2fIB99swvQoA65bm+fsmt6UNdGeIB0oxBs+WhAhdh08QA04JXpI7rbB9r08/nkbysKoya9rtDERYOYMA==} + engines: {node: '>=18'} + package-json-from-dist@1.0.1: resolution: {integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==} @@ -523,6 +586,9 @@ packages: resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} engines: {node: '>=8'} + path-parse@1.0.7: + resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} + path-scurry@1.11.1: resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} engines: {node: '>=16 || 14 >=14.18'} @@ -572,6 +638,11 @@ packages: readdir-glob@1.1.3: resolution: {integrity: sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==} + resolve@1.22.10: + resolution: {integrity: sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==} + engines: {node: '>= 0.4'} + hasBin: true + reusify@1.1.0: resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} @@ -602,6 +673,10 @@ packages: resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} engines: {node: '>=8'} + shell-quote@1.8.3: + resolution: {integrity: sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==} + engines: {node: '>= 0.4'} + signal-exit@4.1.0: resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} engines: {node: '>=14'} @@ -631,6 +706,13 @@ packages: resolution: {integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==} engines: {node: '>=12'} + subarg@1.0.0: + resolution: {integrity: sha512-RIrIdRY0X1xojthNcVtgT9sjpOGagEUKpZdgBUi054OEPFo282yg+zE+t1Rj3+RqKq2xStL7uUHhY+AjbC4BXg==} + + supports-preserve-symlinks-flag@1.0.0: + resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} + engines: {node: '>= 0.4'} + tar-stream@3.1.7: resolution: {integrity: sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==} @@ -652,6 +734,10 @@ packages: undici-types@7.8.0: resolution: {integrity: sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw==} + universalify@2.0.1: + resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==} + engines: {node: '>= 10.0.0'} + util-deprecate@1.0.2: resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} @@ -932,6 +1018,24 @@ snapshots: core-util-is@1.0.3: {} + cpx2@8.0.0: + dependencies: + debounce: 2.2.0 + debug: 4.4.1 + duplexer: 0.1.2 + fs-extra: 11.3.0 + glob: 11.0.3 + glob2base: 0.0.12 + ignore: 6.0.2 + minimatch: 10.0.3 + p-map: 7.0.3 + resolve: 1.22.10 + safe-buffer: 5.2.1 + shell-quote: 1.8.3 + subarg: 1.0.0 + transitivePeerDependencies: + - supports-color + crc-32@1.2.2: {} crc32-stream@6.0.0: @@ -945,6 +1049,14 @@ snapshots: shebang-command: 2.0.0 which: 2.0.2 + debounce@2.2.0: {} + + debug@4.4.1: + dependencies: + ms: 2.1.3 + + duplexer@0.1.2: {} + eastasianwidth@0.2.0: {} emoji-regex@8.0.0: {} @@ -973,18 +1085,32 @@ snapshots: dependencies: to-regex-range: 5.0.1 + find-index@0.1.1: {} + foreground-child@3.3.1: dependencies: cross-spawn: 7.0.6 signal-exit: 4.1.0 + fs-extra@11.3.0: + dependencies: + graceful-fs: 4.2.11 + jsonfile: 6.1.0 + universalify: 2.0.1 + fsevents@2.3.3: optional: true + function-bind@1.1.2: {} + glob-parent@5.1.2: dependencies: is-glob: 4.0.3 + glob2base@0.0.12: + dependencies: + find-index: 0.1.1 + glob@10.4.5: dependencies: foreground-child: 3.3.1 @@ -1005,10 +1131,20 @@ snapshots: graceful-fs@4.2.11: {} + hasown@2.0.2: + dependencies: + function-bind: 1.1.2 + ieee754@1.2.1: {} + ignore@6.0.2: {} + inherits@2.0.4: {} + is-core-module@2.16.1: + dependencies: + hasown: 2.0.2 + is-extglob@2.1.1: {} is-fullwidth-code-point@3.0.0: {} @@ -1035,6 +1171,12 @@ snapshots: dependencies: '@isaacs/cliui': 8.0.2 + jsonfile@6.1.0: + dependencies: + universalify: 2.0.1 + optionalDependencies: + graceful-fs: 4.2.11 + lazystream@1.0.1: dependencies: readable-stream: 2.3.8 @@ -1064,16 +1206,24 @@ snapshots: dependencies: brace-expansion: 2.0.2 + minimist@1.2.8: {} + minipass@7.1.2: {} + ms@2.1.3: {} + normalize-path@3.0.0: {} + p-map@7.0.3: {} + package-json-from-dist@1.0.1: {} parse-ms@4.0.0: {} path-key@3.1.1: {} + path-parse@1.0.7: {} + path-scurry@1.11.1: dependencies: lru-cache: 10.4.3 @@ -1124,6 +1274,12 @@ snapshots: dependencies: minimatch: 5.1.6 + resolve@1.22.10: + dependencies: + is-core-module: 2.16.1 + path-parse: 1.0.7 + supports-preserve-symlinks-flag: 1.0.0 + reusify@1.1.0: {} rolldown@1.0.0-beta.26: @@ -1186,6 +1342,8 @@ snapshots: shebang-regex@3.0.0: {} + shell-quote@1.8.3: {} + signal-exit@4.1.0: {} streamx@2.22.1: @@ -1223,6 +1381,12 @@ snapshots: dependencies: ansi-regex: 6.1.0 + subarg@1.0.0: + dependencies: + minimist: 1.2.8 + + supports-preserve-symlinks-flag@1.0.0: {} + tar-stream@3.1.7: dependencies: b4a: 1.6.7 @@ -1244,6 +1408,8 @@ snapshots: undici-types@7.8.0: {} + universalify@2.0.1: {} + util-deprecate@1.0.2: {} which@2.0.2: diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 33ab99f0..cbdc4008 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -1,3 +1,5 @@ +import { ConsolaInstance } from "consola"; +import fg from "fast-glob"; import fs from "node:fs/promises"; import path from "node:path"; import { env } from "node:process"; @@ -7,7 +9,7 @@ import { onExit } from "signal-exit"; import { STD_EXTERNALS } from "./constants.js"; import { bold, green } from "./utils/colors.js"; import { parsePackageError } from "./utils/error.js"; -import { logger } from "./utils/logger.js"; +import { createLogger } from "./utils/logger.js"; import { createMPK } from "./utils/mpk.js"; import { PackageJson } from "./utils/parsers/PackageJson.js"; import { ProjectConfig } from "./utils/project-config.js"; @@ -17,50 +19,77 @@ interface BuildCommandOptions { minify?: boolean; } -export async function build(root: string | undefined, options: BuildCommandOptions): Promise { - try { - await runBuild(root, options); - } catch (error) { - logger.error(error); - process.exit(1); - } -} +// TODO: Copy files and add watcher. -export async function runBuild(root: string | undefined, options: BuildCommandOptions = {}): Promise { +export async function build(root: string | undefined, options: BuildCommandOptions): Promise { options.watch ??= false; options.minify ??= !!env.CI; - root = path.resolve(root ?? ""); - process.chdir(root); + const logger: ConsolaInstance = createLogger(); + try { + root = path.resolve(root ?? ""); + process.chdir(root); - const [pkg, isTsProject] = await Promise.all([readPackageJson(root), isTypeScriptProject(root)]); + const [pkg, isTsProject] = await Promise.all([readPackageJson(root), isTypeScriptProject(root)]); - const project = new ProjectConfig({ - pkg, - isTsProject - }); + const project = new ProjectConfig({ + pkg, + isTsProject + }); - const bundles = await loadConfig(project); + const bundles = await loadConfig(project); - // await fs.rm(project.outputDirs.dist, { recursive: true, force: true }); + await fs.rm(project.outputDirs.dist, { recursive: true, force: true }); + console.dir(project.inputFiles); + console.dir(project.outputDirs); + console.dir(project.outputFiles); + if (options.watch) { + await tasks.watch({ project, bundles, logger, root }); + } else { + await tasks.build({ project, bundles, logger, root }); + } + } catch (error) { + logger.error(error); + process.exit(1); + } +} - console.dir(project.inputFiles); - console.dir(project.outputDirs); - console.dir(project.outputFiles); +interface TaskInput { + root: string; + bundles: BuildOptions[]; + project: ProjectConfig; + logger: ConsolaInstance; +} - if (!options.watch) { +const tasks = { + async build({ project, bundles, logger }: TaskInput): Promise { buildMeasure.start(); + for (const bundle of bundles) { await buildBundle(bundle); logger.success(pprintSuccessOutput(bundle.output?.file!)); } + + const stream = fg.stream(["src/*.xml", "src/*.@(tile|icon)?(.dark).png"]); + for await (const src of stream) { + const f = path.parse(src as string); + const dst = path.join(project.outputDirs.contentRoot, f.base); + await fs.cp(src as string, dst, { + recursive: true + }); + } + await createMPK(project.outputDirs.contentRoot, project.outputFiles.mpk); logger.success(pprintSuccessOutput(project.outputFiles.mpk)); - buildMeasure.end(); - } else { + + const buildInfo = buildMeasure.end(); + logger.success("Done in", green(ms(buildInfo.duration))); + }, + async watch({ root, bundles, logger }: TaskInput): Promise { logger.start("Start build in watch mode"); - const watcher = watch(bundles); - watcher.on("event", event => { + + const bundlesWatcher = watch(bundles); + bundlesWatcher.on("event", event => { if (event.code === "BUNDLE_END") { let [outFile] = event.output; outFile = bold(path.relative(root, outFile)); @@ -78,12 +107,12 @@ export async function runBuild(root: string | undefined, options: BuildCommandOp }); onExit(() => { - watcher.close(); + bundlesWatcher.close(); logger.log(""); logger.log("Build watcher stopped"); }); } -} +}; async function defaultConfig(project: ProjectConfig): Promise { const esmBundle = { @@ -189,7 +218,6 @@ const buildMeasure = { }, end() { performance.mark("build-end"); - const buildInfo = performance.measure("build", "build-start", "build-end"); - logger.success("Done in", green(ms(buildInfo.duration))); + return performance.measure("build", "build-start", "build-end"); } }; diff --git a/packages/mpx/src/utils/logger.ts b/packages/mpx/src/utils/logger.ts index 4998fb2f..f384d349 100644 --- a/packages/mpx/src/utils/logger.ts +++ b/packages/mpx/src/utils/logger.ts @@ -3,7 +3,8 @@ import { env } from "node:process"; const CI = !!env.CI; -export const logger = createConsola({ - level: CI ? 2 : 3, - fancy: true -}); +export const createLogger = () => + createConsola({ + level: CI ? 2 : 3, + fancy: true + }); From 34c8d81bcda25f902bbb218a493f115fa3f8a9d4 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Tue, 15 Jul 2025 11:39:31 +0200 Subject: [PATCH 14/45] chore: add fs utils and change logging --- packages/mpx/src/build.ts | 59 +++++++++++------------------------- packages/mpx/src/utils/fs.ts | 33 ++++++++++++++++++++ 2 files changed, 51 insertions(+), 41 deletions(-) create mode 100644 packages/mpx/src/utils/fs.ts diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index cbdc4008..8458ad99 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -8,10 +8,9 @@ import { BuildOptions, build as buildBundle, watch } from "rolldown"; import { onExit } from "signal-exit"; import { STD_EXTERNALS } from "./constants.js"; import { bold, green } from "./utils/colors.js"; -import { parsePackageError } from "./utils/error.js"; +import { hasEditorConfig, hasEditorPreview, isTypeScriptProject, readPackageJson } from "./utils/fs.js"; import { createLogger } from "./utils/logger.js"; import { createMPK } from "./utils/mpk.js"; -import { PackageJson } from "./utils/parsers/PackageJson.js"; import { ProjectConfig } from "./utils/project-config.js"; interface BuildCommandOptions { @@ -67,7 +66,7 @@ const tasks = { for (const bundle of bundles) { await buildBundle(bundle); - logger.success(pprintSuccessOutput(bundle.output?.file!)); + logger.success(formatMsg.built(bundle.output?.file!)); } const stream = fg.stream(["src/*.xml", "src/*.@(tile|icon)?(.dark).png"]); @@ -80,7 +79,7 @@ const tasks = { } await createMPK(project.outputDirs.contentRoot, project.outputFiles.mpk); - logger.success(pprintSuccessOutput(project.outputFiles.mpk)); + logger.success(formatMsg.built(project.outputFiles.mpk)); const buildInfo = buildMeasure.end(); logger.success("Done in", green(ms(buildInfo.duration))); @@ -89,11 +88,17 @@ const tasks = { logger.start("Start build in watch mode"); const bundlesWatcher = watch(bundles); + + let waitingChanges = false; bundlesWatcher.on("event", event => { if (event.code === "BUNDLE_END") { let [outFile] = event.output; outFile = bold(path.relative(root, outFile)); - logger.success(pprintSuccessOutput(outFile, event.duration)); + if (!waitingChanges) { + logger.success(formatMsg.built(outFile)); + } else { + logger.success(formatMsg.rebuilt(outFile, event.duration)); + } event.result?.close(); } @@ -103,6 +108,10 @@ const tasks = { if (event.code === "END") { logger.log(""); + if (!waitingChanges) { + logger.info("Watching for changes..."); + } + waitingChanges = true; } }); @@ -175,42 +184,10 @@ async function loadConfig(project: ProjectConfig): Promise { return defaultConfig(project); } -async function isTypeScriptProject(root: string): Promise { - return fs.access(path.resolve(root, "tsconfig.json"), fs.constants.F_OK).then( - () => true, - () => false - ); -} - -async function hasEditorConfig(project: ProjectConfig): Promise { - return fs.access(path.resolve(project.inputFiles.editorConfig), fs.constants.F_OK).then( - () => true, - () => false - ); -} - -async function hasEditorPreview(project: ProjectConfig): Promise { - return fs.access(path.resolve(project.inputFiles.editorPreview), fs.constants.F_OK).then( - () => true, - () => false - ); -} - -async function readPackageJson(root: string): Promise { - const filePath = path.resolve(root, "package.json"); - try { - return PackageJson.parse(JSON.parse(await fs.readFile(filePath, "utf-8"))); - } catch (error) { - throw parsePackageError(error); - } -} - -function pprintSuccessOutput(file: string, duration?: number): string { - if (!duration) { - return `Built ${bold(file)}`; - } - return `Built ${bold(file)} in ${green(ms(duration))}`; -} +const formatMsg = { + built: (file: string) => `Built ${bold(file)}`, + rebuilt: (file: string, duration: number) => `Rebuilt ${bold(file)} in ${green(ms(duration))}` +}; const buildMeasure = { start() { diff --git a/packages/mpx/src/utils/fs.ts b/packages/mpx/src/utils/fs.ts new file mode 100644 index 00000000..4b86a318 --- /dev/null +++ b/packages/mpx/src/utils/fs.ts @@ -0,0 +1,33 @@ +import fs from "node:fs/promises"; +import path from "node:path"; +import { parsePackageError } from "./error.js"; +import { PackageJson } from "./parsers/PackageJson.js"; +import { ProjectConfig } from "./project-config.js"; + +function access(filePath: string): Promise { + return fs.access(filePath, fs.constants.F_OK).then( + () => true, + () => false + ); +} + +export async function isTypeScriptProject(root: string): Promise { + return access(path.resolve(root, "tsconfig.json")); +} + +export async function hasEditorConfig(project: ProjectConfig): Promise { + return access(path.resolve(project.inputFiles.editorConfig)); +} + +export async function hasEditorPreview(project: ProjectConfig): Promise { + return access(path.resolve(project.inputFiles.editorPreview)); +} + +export async function readPackageJson(root: string): Promise { + const filePath = path.resolve(root, "package.json"); + try { + return PackageJson.parse(JSON.parse(await fs.readFile(filePath, "utf-8"))); + } catch (error) { + throw parsePackageError(error); + } +} From 44e0b57ade0d90f24b0ce076f3c0f757c225ae59 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Tue, 15 Jul 2025 13:09:31 +0200 Subject: [PATCH 15/45] chore: add asset watchers --- packages/mpx/package.json | 1 + packages/mpx/pnpm-lock.yaml | 17 +++++ packages/mpx/src/build.ts | 120 ++++++++++++++++++++++++++---------- 3 files changed, 106 insertions(+), 32 deletions(-) diff --git a/packages/mpx/package.json b/packages/mpx/package.json index 087052d4..a171a31b 100644 --- a/packages/mpx/package.json +++ b/packages/mpx/package.json @@ -22,6 +22,7 @@ "@tsconfig/node22": "^22.0.2", "@types/node": "^24.0.13", "cac": "^6.7.14", + "chokidar": "^4.0.3", "consola": "^3.4.2", "cpx2": "^8.0.0", "fast-glob": "^3.3.3", diff --git a/packages/mpx/pnpm-lock.yaml b/packages/mpx/pnpm-lock.yaml index 988358a9..c8a7810d 100644 --- a/packages/mpx/pnpm-lock.yaml +++ b/packages/mpx/pnpm-lock.yaml @@ -21,6 +21,9 @@ importers: cac: specifier: ^6.7.14 version: 6.7.14 + chokidar: + specifier: ^4.0.3 + version: 4.0.3 consola: specifier: ^3.4.2 version: 3.4.2 @@ -347,6 +350,10 @@ packages: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} engines: {node: '>=8'} + chokidar@4.0.3: + resolution: {integrity: sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==} + engines: {node: '>= 14.16.0'} + color-convert@2.0.1: resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} engines: {node: '>=7.0.0'} @@ -638,6 +645,10 @@ packages: readdir-glob@1.1.3: resolution: {integrity: sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==} + readdirp@4.1.2: + resolution: {integrity: sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==} + engines: {node: '>= 14.18.0'} + resolve@1.22.10: resolution: {integrity: sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==} engines: {node: '>= 0.4'} @@ -1000,6 +1011,10 @@ snapshots: cac@6.7.14: {} + chokidar@4.0.3: + dependencies: + readdirp: 4.1.2 + color-convert@2.0.1: dependencies: color-name: 1.1.4 @@ -1274,6 +1289,8 @@ snapshots: dependencies: minimatch: 5.1.6 + readdirp@4.1.2: {} + resolve@1.22.10: dependencies: is-core-module: 2.16.1 diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 8458ad99..9d5f08e8 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -1,3 +1,4 @@ +import chokidar from "chokidar"; import { ConsolaInstance } from "consola"; import fg from "fast-glob"; import fs from "node:fs/promises"; @@ -53,7 +54,7 @@ export async function build(root: string | undefined, options: BuildCommandOptio } } -interface TaskInput { +interface TaskParams { root: string; bundles: BuildOptions[]; project: ProjectConfig; @@ -61,7 +62,8 @@ interface TaskInput { } const tasks = { - async build({ project, bundles, logger }: TaskInput): Promise { + async build(params: TaskParams): Promise { + const { project, bundles, logger } = params; buildMeasure.start(); for (const bundle of bundles) { @@ -69,14 +71,7 @@ const tasks = { logger.success(formatMsg.built(bundle.output?.file!)); } - const stream = fg.stream(["src/*.xml", "src/*.@(tile|icon)?(.dark).png"]); - for await (const src of stream) { - const f = path.parse(src as string); - const dst = path.join(project.outputDirs.contentRoot, f.base); - await fs.cp(src as string, dst, { - recursive: true - }); - } + await tasks.copyWidgetAssets(params); await createMPK(project.outputDirs.contentRoot, project.outputFiles.mpk); logger.success(formatMsg.built(project.outputFiles.mpk)); @@ -84,42 +79,102 @@ const tasks = { const buildInfo = buildMeasure.end(); logger.success("Done in", green(ms(buildInfo.duration))); }, - async watch({ root, bundles, logger }: TaskInput): Promise { + async watch(params: TaskParams): Promise { + const { root, bundles, logger } = params; logger.start("Start build in watch mode"); const bundlesWatcher = watch(bundles); - let waitingChanges = false; - bundlesWatcher.on("event", event => { - if (event.code === "BUNDLE_END") { - let [outFile] = event.output; - outFile = bold(path.relative(root, outFile)); - if (!waitingChanges) { - logger.success(formatMsg.built(outFile)); - } else { - logger.success(formatMsg.rebuilt(outFile, event.duration)); + const bundleWatchReady = new Promise(resolve => { + let isFirstEvent = true; + bundlesWatcher.on("event", event => { + if (event.code === "BUNDLE_END") { + let [outFile] = event.output; + outFile = bold(path.relative(root, outFile)); + if (isFirstEvent) { + logger.success(formatMsg.built(outFile)); + } else { + logger.info(formatMsg.rebuilt(outFile, event.duration)); + } + event.result?.close(); } - event.result?.close(); - } - if (event.code === "ERROR") { - logger.error(event.error); - } + if (event.code === "ERROR") { + logger.error(event.error); + } - if (event.code === "END") { - logger.log(""); - if (!waitingChanges) { - logger.info("Watching for changes..."); + if (event.code === "END") { + if (isFirstEvent) { + resolve(); + } + isFirstEvent = false; } - waitingChanges = true; - } + }); }); + await bundleWatchReady; + await tasks.watchWidgetAssets(params); + await tasks.watchContent(params); + logger.info("Waiting for changes..."); + onExit(() => { bundlesWatcher.close(); logger.log(""); logger.log("Build watcher stopped"); }); + }, + async copyWidgetAssets({ project }: TaskParams): Promise { + const stream = fg.stream(["src/*.xml", "src/*.@(tile|icon)?(.dark).png"]); + for await (const src of stream) { + const f = path.parse(src as string); + const dst = path.join(project.outputDirs.contentRoot, f.base); + + await fs.cp(src as string, dst, { + recursive: true + }); + } + }, + async watchWidgetAssets(params: TaskParams): Promise { + const { project, logger } = params; + + await tasks.copyWidgetAssets(params); + + const watcher = chokidar.watch(await fg(["src/*.xml", "src/*.@(tile|icon)?(.dark).png"])); + watcher.on("change", async file => { + logger.info(formatMsg.copy(file)); + const f = path.parse(file); + const dst = path.join(project.outputDirs.contentRoot, f.base); + await fs.cp(file, dst); + }); + + onExit(() => { + watcher.close(); + }); + }, + + async watchContent({ logger, project }: TaskParams): Promise { + await createMPK(project.outputDirs.contentRoot, project.outputFiles.mpk); + const watcher = chokidar.watch(project.outputDirs.contentRoot); + + let debounceTimer: NodeJS.Timeout | null = null; + + watcher.on("change", async () => { + if (debounceTimer) { + clearTimeout(debounceTimer); + } + + debounceTimer = setTimeout(async () => { + await createMPK(project.outputDirs.contentRoot, project.outputFiles.mpk); + logger.success(formatMsg.built(project.outputFiles.mpk)); + }, 30); + }); + + onExit(() => { + if (debounceTimer) { + clearTimeout(debounceTimer); + } + watcher.close(); + }); } }; @@ -186,7 +241,8 @@ async function loadConfig(project: ProjectConfig): Promise { const formatMsg = { built: (file: string) => `Built ${bold(file)}`, - rebuilt: (file: string, duration: number) => `Rebuilt ${bold(file)} in ${green(ms(duration))}` + rebuilt: (file: string, duration: number) => `Rebuilt ${bold(file)} in ${green(ms(duration))}`, + copy: (file: string) => `Copy ${bold(file)}` }; const buildMeasure = { From 2bd95ec5b27221d9bbc21b7da6fe3ccf01a5e117 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Tue, 15 Jul 2025 13:34:47 +0200 Subject: [PATCH 16/45] chore: extract glob --- packages/mpx/src/build.ts | 6 +++--- packages/mpx/src/constants.ts | 2 ++ 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 9d5f08e8..6d9b3634 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -7,7 +7,7 @@ import { env } from "node:process"; import ms from "pretty-ms"; import { BuildOptions, build as buildBundle, watch } from "rolldown"; import { onExit } from "signal-exit"; -import { STD_EXTERNALS } from "./constants.js"; +import { STD_EXTERNALS, WIDGET_ASSETS } from "./constants.js"; import { bold, green } from "./utils/colors.js"; import { hasEditorConfig, hasEditorPreview, isTypeScriptProject, readPackageJson } from "./utils/fs.js"; import { createLogger } from "./utils/logger.js"; @@ -124,7 +124,7 @@ const tasks = { }); }, async copyWidgetAssets({ project }: TaskParams): Promise { - const stream = fg.stream(["src/*.xml", "src/*.@(tile|icon)?(.dark).png"]); + const stream = fg.stream(WIDGET_ASSETS); for await (const src of stream) { const f = path.parse(src as string); const dst = path.join(project.outputDirs.contentRoot, f.base); @@ -139,7 +139,7 @@ const tasks = { await tasks.copyWidgetAssets(params); - const watcher = chokidar.watch(await fg(["src/*.xml", "src/*.@(tile|icon)?(.dark).png"])); + const watcher = chokidar.watch(await fg(WIDGET_ASSETS)); watcher.on("change", async file => { logger.info(formatMsg.copy(file)); const f = path.parse(file); diff --git a/packages/mpx/src/constants.ts b/packages/mpx/src/constants.ts index a4594b6c..b4e893e7 100644 --- a/packages/mpx/src/constants.ts +++ b/packages/mpx/src/constants.ts @@ -12,3 +12,5 @@ export const STD_EXTERNALS = [ /^react-dom$/, /^big.js$/ ]; + +export const WIDGET_ASSETS = ["src/*.xml", "src/*.@(tile|icon)?(.dark).png"]; From 9a3b9133f68e384dcd6583e64631a7269cbeb620 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Tue, 15 Jul 2025 14:10:08 +0200 Subject: [PATCH 17/45] chore: add project path getter --- packages/mpx/src/build.ts | 2 -- packages/mpx/src/cli.ts | 9 +++------ packages/mpx/src/utils/fs.ts | 2 +- packages/mpx/src/utils/parsers/PackageJson.ts | 7 ++++++- packages/mpx/src/utils/project-config.ts | 20 +++++++++++++++++++ 5 files changed, 30 insertions(+), 10 deletions(-) diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 6d9b3634..3b730db0 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -19,8 +19,6 @@ interface BuildCommandOptions { minify?: boolean; } -// TODO: Copy files and add watcher. - export async function build(root: string | undefined, options: BuildCommandOptions): Promise { options.watch ??= false; options.minify ??= !!env.CI; diff --git a/packages/mpx/src/cli.ts b/packages/mpx/src/cli.ts index f042ae59..86d518c8 100644 --- a/packages/mpx/src/cli.ts +++ b/packages/mpx/src/cli.ts @@ -4,7 +4,8 @@ import { cac } from "cac"; import { build } from "./build.js"; import { VERSION } from "./constants.js"; -const cli = cac("mpx"); +const name = "mpx"; +const cli = cac(name); cli.command("build [root]", "Build widget") .option("-w, --watch", "watch for changes and rebuild") @@ -16,7 +17,7 @@ cli.version(VERSION); cli.on("command:*", () => { console.error(`Unknown command: "%s"`, cli.args.join(" ")); - console.error("See 'mpw --help' for a list of available commands."); + console.error(`See '${name} --help' for a list of available commands.`); process.exit(1); }); @@ -31,7 +32,3 @@ process.on("uncaughtException", error => { console.error("Uncaught Exception:", error.message); process.exit(1); }); - -// process.on("unhandledRejection", (reason, promise) => { -// console.error("Unhandled Rejection at:", promise, "reason:", reason); -// }); diff --git a/packages/mpx/src/utils/fs.ts b/packages/mpx/src/utils/fs.ts index 4b86a318..2efae808 100644 --- a/packages/mpx/src/utils/fs.ts +++ b/packages/mpx/src/utils/fs.ts @@ -4,7 +4,7 @@ import { parsePackageError } from "./error.js"; import { PackageJson } from "./parsers/PackageJson.js"; import { ProjectConfig } from "./project-config.js"; -function access(filePath: string): Promise { +export function access(filePath: string): Promise { return fs.access(filePath, fs.constants.F_OK).then( () => true, () => false diff --git a/packages/mpx/src/utils/parsers/PackageJson.ts b/packages/mpx/src/utils/parsers/PackageJson.ts index 1bdcee13..ac9d6331 100644 --- a/packages/mpx/src/utils/parsers/PackageJson.ts +++ b/packages/mpx/src/utils/parsers/PackageJson.ts @@ -8,7 +8,12 @@ export const PackageJson = z.object({ widgetName: z.string().min(1).trim(), packagePath: z.string().regex(/^[a-zA-Z]+(\.[a-zA-Z]+)*$/, { message: "must be dot separated path like 'example.widget'" - }) + }), + config: z.optional( + z.object({ + projectPath: z.string().optional() + }) + ) }); export type PackageJson = z.infer; diff --git a/packages/mpx/src/utils/project-config.ts b/packages/mpx/src/utils/project-config.ts index 6d35e01b..5f62de7f 100644 --- a/packages/mpx/src/utils/project-config.ts +++ b/packages/mpx/src/utils/project-config.ts @@ -1,4 +1,6 @@ import path from "node:path"; +import { env } from "node:process"; +import { access } from "./fs.js"; import { PackageJson } from "./parsers/PackageJson.js"; /** Files located in src directory */ @@ -126,4 +128,22 @@ export class ProjectConfig { }) }; } + + async getProjectPath(): Promise { + const { pkg } = this; + const projectPath = (() => { + if (env.MX_PROJECT_PATH) { + return env.MX_PROJECT_PATH; + } + if (pkg.config?.projectPath) { + return pkg.config.projectPath; + } + + return path.join("tests", "testProject"); + })(); + + if (await access(projectPath)) { + return projectPath; + } + } } From a23eb22e91d6e83e2c96c96c6945b4b181e9bd92 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Tue, 15 Jul 2025 15:10:47 +0200 Subject: [PATCH 18/45] feat: print project path --- packages/mpx/src/build.ts | 21 ++++++++++++++------- packages/mpx/src/utils/colors.ts | 24 +++++++++++++++++++++--- packages/mpx/src/utils/project-config.ts | 8 +++++++- 3 files changed, 42 insertions(+), 11 deletions(-) diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 3b730db0..fb21affa 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -8,7 +8,7 @@ import ms from "pretty-ms"; import { BuildOptions, build as buildBundle, watch } from "rolldown"; import { onExit } from "signal-exit"; import { STD_EXTERNALS, WIDGET_ASSETS } from "./constants.js"; -import { bold, green } from "./utils/colors.js"; +import { bgBlue, blue, bold, dim, green, greenBright, inverse, white } from "./utils/colors.js"; import { hasEditorConfig, hasEditorPreview, isTypeScriptProject, readPackageJson } from "./utils/fs.js"; import { createLogger } from "./utils/logger.js"; import { createMPK } from "./utils/mpk.js"; @@ -35,12 +35,17 @@ export async function build(root: string | undefined, options: BuildCommandOptio isTsProject }); + const projectPath = await project.getProjectPath(); + if (projectPath) { + logger.info(formatMsg.mxpath(projectPath)); + } + const bundles = await loadConfig(project); await fs.rm(project.outputDirs.dist, { recursive: true, force: true }); - console.dir(project.inputFiles); - console.dir(project.outputDirs); - console.dir(project.outputFiles); + // console.dir(project.inputFiles); + // console.dir(project.outputDirs); + // console.dir(project.outputFiles); if (options.watch) { await tasks.watch({ project, bundles, logger, root }); } else { @@ -88,7 +93,7 @@ const tasks = { bundlesWatcher.on("event", event => { if (event.code === "BUNDLE_END") { let [outFile] = event.output; - outFile = bold(path.relative(root, outFile)); + outFile = path.relative(root, outFile); if (isFirstEvent) { logger.success(formatMsg.built(outFile)); } else { @@ -239,8 +244,10 @@ async function loadConfig(project: ProjectConfig): Promise { const formatMsg = { built: (file: string) => `Built ${bold(file)}`, - rebuilt: (file: string, duration: number) => `Rebuilt ${bold(file)} in ${green(ms(duration))}`, - copy: (file: string) => `Copy ${bold(file)}` + rebuilt: (file: string, duration: number) => `Rebuilt ${dim(file)} in ${green(ms(duration))}`, + copy: (file: string) => `Copy ${bold(file)}`, + mxpath1: (dir: string) => `${inverse(greenBright(bold(" PROJECT PATH ")))}${bgBlue(white(bold(` ${dir} `)))}`, + mxpath: (dir: string) => `${inverse(greenBright(bold(" PROJECT PATH ")))}${blue(bold(` ${dir} `))}` }; const buildMeasure = { diff --git a/packages/mpx/src/utils/colors.ts b/packages/mpx/src/utils/colors.ts index b232185f..75278cb5 100644 --- a/packages/mpx/src/utils/colors.ts +++ b/packages/mpx/src/utils/colors.ts @@ -3,6 +3,24 @@ import pc from "picocolors"; // @see https://no-color.org // @see https://www.npmjs.com/package/chalk -export const { bold, cyan, dim, gray, green, red, underline, yellow } = pc.createColors( - env.FORCE_COLOR !== "0" && !env.NO_COLOR -); +export const { + bold, + cyan, + dim, + gray, + green, + greenBright, + red, + underline, + yellow, + blue, + blueBright, + bgBlue, + bgGreen, + black, + inverse, + bgGreenBright, + bgBlackBright, + bgBlack, + white +} = pc.createColors(env.FORCE_COLOR !== "0" && !env.NO_COLOR); diff --git a/packages/mpx/src/utils/project-config.ts b/packages/mpx/src/utils/project-config.ts index 5f62de7f..7befea88 100644 --- a/packages/mpx/src/utils/project-config.ts +++ b/packages/mpx/src/utils/project-config.ts @@ -34,6 +34,7 @@ interface ProjectConfigInputs { } export class ProjectConfig { + #projectPath: string | undefined; /** Output directory for built files */ readonly dist = "dist"; /** Package root directory that contains all widget files shipped with mpk */ @@ -130,8 +131,11 @@ export class ProjectConfig { } async getProjectPath(): Promise { + if (this.#projectPath) { + return this.#projectPath; + } const { pkg } = this; - const projectPath = (() => { + let projectPath = (() => { if (env.MX_PROJECT_PATH) { return env.MX_PROJECT_PATH; } @@ -141,8 +145,10 @@ export class ProjectConfig { return path.join("tests", "testProject"); })(); + projectPath = path.resolve(projectPath); if (await access(projectPath)) { + this.#projectPath = projectPath; return projectPath; } } From b5c3e4a1f3ec1d144907e1b8f103c554a34ac687 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Tue, 15 Jul 2025 16:41:18 +0200 Subject: [PATCH 19/45] chore: start adding plugins --- packages/mpx/package.json | 3 +- packages/mpx/pnpm-lock.yaml | 98 +++++++++++++++++++++++- packages/mpx/pnpm-workspace.yaml | 3 + packages/mpx/src/build.ts | 87 ++++----------------- packages/mpx/src/constants.ts | 2 +- packages/mpx/src/plugins.ts | 18 +++++ packages/mpx/src/rolldown.ts | 90 ++++++++++++++++++++++ packages/mpx/src/utils/project-config.ts | 13 +++- 8 files changed, 235 insertions(+), 79 deletions(-) create mode 100644 packages/mpx/pnpm-workspace.yaml create mode 100644 packages/mpx/src/plugins.ts create mode 100644 packages/mpx/src/rolldown.ts diff --git a/packages/mpx/package.json b/packages/mpx/package.json index a171a31b..401f9ba6 100644 --- a/packages/mpx/package.json +++ b/packages/mpx/package.json @@ -19,6 +19,8 @@ "license": "Apache-2.0", "packageManager": "pnpm@10.13.1+sha512.37ebf1a5c7a30d5fabe0c5df44ee8da4c965ca0c5af3dbab28c3a1681b70a256218d05c81c9c0dcf767ef6b8551eb5b960042b9ed4300c59242336377e01cfad", "devDependencies": { + "@rollup/plugin-image": "^3.0.3", + "@rollup/plugin-url": "^8.0.2", "@tsconfig/node22": "^22.0.2", "@types/node": "^24.0.13", "cac": "^6.7.14", @@ -30,7 +32,6 @@ "premove": "^4.0.0", "prettier": "^3.6.2", "pretty-ms": "^9.2.0", - "rollup": "^4.45.0", "signal-exit": "^4.1.0", "typescript": "^5.8.3", "zip-a-folder": "^3.1.9", diff --git a/packages/mpx/pnpm-lock.yaml b/packages/mpx/pnpm-lock.yaml index c8a7810d..73fac2d5 100644 --- a/packages/mpx/pnpm-lock.yaml +++ b/packages/mpx/pnpm-lock.yaml @@ -12,6 +12,12 @@ importers: specifier: 1.0.0-beta.26 version: 1.0.0-beta.26 devDependencies: + '@rollup/plugin-image': + specifier: ^3.0.3 + version: 3.0.3(rollup@4.45.0) + '@rollup/plugin-url': + specifier: ^8.0.2 + version: 8.0.2(rollup@4.45.0) '@tsconfig/node22': specifier: ^22.0.2 version: 22.0.2 @@ -45,9 +51,6 @@ importers: pretty-ms: specifier: ^9.2.0 version: 9.2.0 - rollup: - specifier: ^4.45.0 - version: 4.45.0 signal-exit: specifier: ^4.1.0 version: 4.1.0 @@ -173,6 +176,33 @@ packages: '@rolldown/pluginutils@1.0.0-beta.26': resolution: {integrity: sha512-r/5po89voz/QRPDmoErL10+hVuTAuz1SHvokx+yWBlOIPB5C41jC7QhLqq9kaebx/+EHyoV3z22/qBfX81Ns8A==} + '@rollup/plugin-image@3.0.3': + resolution: {integrity: sha512-qXWQwsXpvD4trSb8PeFPFajp8JLpRtqqOeNYRUKnEQNHm7e5UP7fuSRcbjQAJ7wDZBbnJvSdY5ujNBQd9B1iFg==} + engines: {node: '>=14.0.0'} + peerDependencies: + rollup: ^1.20.0||^2.0.0||^3.0.0||^4.0.0 + peerDependenciesMeta: + rollup: + optional: true + + '@rollup/plugin-url@8.0.2': + resolution: {integrity: sha512-5yW2LP5NBEgkvIRSSEdJkmxe5cUNZKG3eenKtfJvSkxVm/xTTu7w+ayBtNwhozl1ZnTUCU0xFaRQR+cBl2H7TQ==} + engines: {node: '>=14.0.0'} + peerDependencies: + rollup: ^1.20.0||^2.0.0||^3.0.0||^4.0.0 + peerDependenciesMeta: + rollup: + optional: true + + '@rollup/pluginutils@5.2.0': + resolution: {integrity: sha512-qWJ2ZTbmumwiLFomfzTyt5Kng4hwPi9rwCYN4SHb6eaRU1KNO4ccxINHr/VhH4GgPlt1XfSTLX2LBTme8ne4Zw==} + engines: {node: '>=14.0.0'} + peerDependencies: + rollup: ^1.20.0||^2.0.0||^3.0.0||^4.0.0 + peerDependenciesMeta: + rollup: + optional: true + '@rollup/rollup-android-arm-eabi@4.45.0': resolution: {integrity: sha512-2o/FgACbji4tW1dzXOqAV15Eu7DdgbKsF2QKcxfG4xbh5iwU7yr5RRP5/U+0asQliSYv5M4o7BevlGIoSL0LXg==} cpu: [arm] @@ -415,6 +445,9 @@ packages: emoji-regex@9.2.2: resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + estree-walker@2.0.2: + resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==} + event-target-shim@5.0.1: resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==} engines: {node: '>=6'} @@ -544,6 +577,10 @@ packages: resolution: {integrity: sha512-QIXZUBJUx+2zHUdQujWejBkcD9+cs94tLn0+YL8UrCh+D5sCXZ4c7LaEH48pNwRY3MLDgqUFyhlCyjJPf1WP0A==} engines: {node: 20 || >=22} + make-dir@3.1.0: + resolution: {integrity: sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==} + engines: {node: '>=8'} + merge2@1.4.1: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} @@ -552,6 +589,15 @@ packages: resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} engines: {node: '>=8.6'} + mime@3.0.0: + resolution: {integrity: sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==} + engines: {node: '>=10.0.0'} + hasBin: true + + mini-svg-data-uri@1.4.4: + resolution: {integrity: sha512-r9deDe9p5FJUPZAk3A59wGH7Ii9YrjjWw0jmw/liSbHl2CHiyXj6FcDXDu2K3TjVAXqiJdaw3xxwlZZr9E6nHg==} + hasBin: true + minimatch@10.0.3: resolution: {integrity: sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==} engines: {node: 20 || >=22} @@ -611,6 +657,10 @@ packages: resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} engines: {node: '>=8.6'} + picomatch@4.0.2: + resolution: {integrity: sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==} + engines: {node: '>=12'} + premove@4.0.0: resolution: {integrity: sha512-zim/Hr4+FVdCIM7zL9b9Z0Wfd5Ya3mnKtiuDv7L5lzYzanSq6cOcVJ7EFcgK4I0pt28l8H0jX/x3nyog380XgQ==} engines: {node: '>=6'} @@ -676,6 +726,10 @@ packages: safe-buffer@5.2.1: resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + semver@6.3.1: + resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} + hasBin: true + shebang-command@2.0.0: resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} engines: {node: '>=8'} @@ -874,6 +928,29 @@ snapshots: '@rolldown/pluginutils@1.0.0-beta.26': {} + '@rollup/plugin-image@3.0.3(rollup@4.45.0)': + dependencies: + '@rollup/pluginutils': 5.2.0(rollup@4.45.0) + mini-svg-data-uri: 1.4.4 + optionalDependencies: + rollup: 4.45.0 + + '@rollup/plugin-url@8.0.2(rollup@4.45.0)': + dependencies: + '@rollup/pluginutils': 5.2.0(rollup@4.45.0) + make-dir: 3.1.0 + mime: 3.0.0 + optionalDependencies: + rollup: 4.45.0 + + '@rollup/pluginutils@5.2.0(rollup@4.45.0)': + dependencies: + '@types/estree': 1.0.8 + estree-walker: 2.0.2 + picomatch: 4.0.2 + optionalDependencies: + rollup: 4.45.0 + '@rollup/rollup-android-arm-eabi@4.45.0': optional: true @@ -1078,6 +1155,8 @@ snapshots: emoji-regex@9.2.2: {} + estree-walker@2.0.2: {} + event-target-shim@5.0.1: {} events@3.3.0: {} @@ -1202,6 +1281,10 @@ snapshots: lru-cache@11.1.0: {} + make-dir@3.1.0: + dependencies: + semver: 6.3.1 + merge2@1.4.1: {} micromatch@4.0.8: @@ -1209,6 +1292,10 @@ snapshots: braces: 3.0.3 picomatch: 2.3.1 + mime@3.0.0: {} + + mini-svg-data-uri@1.4.4: {} + minimatch@10.0.3: dependencies: '@isaacs/brace-expansion': 5.0.0 @@ -1253,6 +1340,8 @@ snapshots: picomatch@2.3.1: {} + picomatch@4.0.2: {} + premove@4.0.0: {} prettier@3.6.2: {} @@ -1344,6 +1433,7 @@ snapshots: '@rollup/rollup-win32-ia32-msvc': 4.45.0 '@rollup/rollup-win32-x64-msvc': 4.45.0 fsevents: 2.3.3 + optional: true run-parallel@1.2.0: dependencies: @@ -1353,6 +1443,8 @@ snapshots: safe-buffer@5.2.1: {} + semver@6.3.1: {} + shebang-command@2.0.0: dependencies: shebang-regex: 3.0.0 diff --git a/packages/mpx/pnpm-workspace.yaml b/packages/mpx/pnpm-workspace.yaml new file mode 100644 index 00000000..8956feeb --- /dev/null +++ b/packages/mpx/pnpm-workspace.yaml @@ -0,0 +1,3 @@ +publicHoistPattern: + - "*rollup*" + - "*plugin*" diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index fb21affa..7b43e9e5 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -7,9 +7,10 @@ import { env } from "node:process"; import ms from "pretty-ms"; import { BuildOptions, build as buildBundle, watch } from "rolldown"; import { onExit } from "signal-exit"; -import { STD_EXTERNALS, WIDGET_ASSETS } from "./constants.js"; +import { MODELER_FILES } from "./constants.js"; +import { loadConfig } from "./rolldown.js"; import { bgBlue, blue, bold, dim, green, greenBright, inverse, white } from "./utils/colors.js"; -import { hasEditorConfig, hasEditorPreview, isTypeScriptProject, readPackageJson } from "./utils/fs.js"; +import { isTypeScriptProject, readPackageJson } from "./utils/fs.js"; import { createLogger } from "./utils/logger.js"; import { createMPK } from "./utils/mpk.js"; import { ProjectConfig } from "./utils/project-config.js"; @@ -43,9 +44,10 @@ export async function build(root: string | undefined, options: BuildCommandOptio const bundles = await loadConfig(project); await fs.rm(project.outputDirs.dist, { recursive: true, force: true }); - // console.dir(project.inputFiles); - // console.dir(project.outputDirs); - // console.dir(project.outputFiles); + console.dir(project.inputFiles); + console.dir(project.outputDirs); + console.dir(project.outputFiles); + console.dir(project.assetsPublicPath); if (options.watch) { await tasks.watch({ project, bundles, logger, root }); } else { @@ -74,7 +76,7 @@ const tasks = { logger.success(formatMsg.built(bundle.output?.file!)); } - await tasks.copyWidgetAssets(params); + await tasks.copyModelerFiles(params); await createMPK(project.outputDirs.contentRoot, project.outputFiles.mpk); logger.success(formatMsg.built(project.outputFiles.mpk)); @@ -116,7 +118,7 @@ const tasks = { }); await bundleWatchReady; - await tasks.watchWidgetAssets(params); + await tasks.watchModelerFiles(params); await tasks.watchContent(params); logger.info("Waiting for changes..."); @@ -126,8 +128,8 @@ const tasks = { logger.log("Build watcher stopped"); }); }, - async copyWidgetAssets({ project }: TaskParams): Promise { - const stream = fg.stream(WIDGET_ASSETS); + async copyModelerFiles({ project }: TaskParams): Promise { + const stream = fg.stream(MODELER_FILES); for await (const src of stream) { const f = path.parse(src as string); const dst = path.join(project.outputDirs.contentRoot, f.base); @@ -137,12 +139,12 @@ const tasks = { }); } }, - async watchWidgetAssets(params: TaskParams): Promise { + async watchModelerFiles(params: TaskParams): Promise { const { project, logger } = params; - await tasks.copyWidgetAssets(params); + await tasks.copyModelerFiles(params); - const watcher = chokidar.watch(await fg(WIDGET_ASSETS)); + const watcher = chokidar.watch(await fg(MODELER_FILES)); watcher.on("change", async file => { logger.info(formatMsg.copy(file)); const f = path.parse(file); @@ -181,67 +183,6 @@ const tasks = { } }; -async function defaultConfig(project: ProjectConfig): Promise { - const esmBundle = { - input: project.inputFiles.widgetFile, - external: [...STD_EXTERNALS], - output: { - file: project.outputFiles.esm, - format: "esm" - } - } satisfies BuildOptions; - - const umdBundle = { - input: project.inputFiles.widgetFile, - external: [...STD_EXTERNALS], - output: { - file: project.outputFiles.umd, - format: "umd", - name: `${project.pkg.packagePath}.${project.pkg.widgetName}`, - globals: { - "react/jsx-runtime": "react_jsx_runtime" - } - } - } satisfies BuildOptions; - - const editorConfigBundle = { - input: project.inputFiles.editorConfig, - output: { - file: project.outputFiles.editorConfig, - format: "commonjs" - } - } satisfies BuildOptions; - - const editorPreviewBundle = { - input: project.inputFiles.editorPreview, - output: { - file: project.outputFiles.editorPreview, - format: "commonjs" - } - } satisfies BuildOptions; - - const bundles: BuildOptions[] = [esmBundle, umdBundle]; - - const [addEditorConfig, addEditorPreview] = await Promise.all([ - hasEditorConfig(project), - hasEditorPreview(project) - ]); - - if (addEditorConfig) { - bundles.push(editorConfigBundle); - } - - if (addEditorPreview) { - bundles.push(editorPreviewBundle); - } - - return bundles; -} - -async function loadConfig(project: ProjectConfig): Promise { - return defaultConfig(project); -} - const formatMsg = { built: (file: string) => `Built ${bold(file)}`, rebuilt: (file: string, duration: number) => `Rebuilt ${dim(file)} in ${green(ms(duration))}`, diff --git a/packages/mpx/src/constants.ts b/packages/mpx/src/constants.ts index b4e893e7..1e1b48a5 100644 --- a/packages/mpx/src/constants.ts +++ b/packages/mpx/src/constants.ts @@ -13,4 +13,4 @@ export const STD_EXTERNALS = [ /^big.js$/ ]; -export const WIDGET_ASSETS = ["src/*.xml", "src/*.@(tile|icon)?(.dark).png"]; +export const MODELER_FILES = ["src/*.xml", "src/*.@(tile|icon)?(.dark).png"]; diff --git a/packages/mpx/src/plugins.ts b/packages/mpx/src/plugins.ts new file mode 100644 index 00000000..5d93c844 --- /dev/null +++ b/packages/mpx/src/plugins.ts @@ -0,0 +1,18 @@ +import image from "@rollup/plugin-image"; +import url from "@rollup/plugin-url"; +import { RolldownPlugin } from "rolldown"; + +/** Note: Rollup has issue with exported types https://github.com/rollup/plugins/issues/1329 */ +type RollupUrlFactory = typeof url.default; +export type RollupUrlOptions = Parameters[0]; +export type RollupImageOptions = Parameters[0]; + +export const plugins = { + url(options?: RollupUrlOptions): RolldownPlugin { + const urlPlugin = (url as unknown as RollupUrlFactory)(options); + return urlPlugin as RolldownPlugin; + }, + image(options?: RollupImageOptions): RolldownPlugin { + return (image as any)(options) as RolldownPlugin; + } +}; diff --git a/packages/mpx/src/rolldown.ts b/packages/mpx/src/rolldown.ts new file mode 100644 index 00000000..87f64e71 --- /dev/null +++ b/packages/mpx/src/rolldown.ts @@ -0,0 +1,90 @@ +import { BuildOptions, RolldownPlugin } from "rolldown"; +import { STD_EXTERNALS } from "./constants.js"; +import { plugins, RollupUrlOptions } from "./plugins.js"; +import { hasEditorConfig, hasEditorPreview } from "./utils/fs.js"; +import { ProjectConfig } from "./utils/project-config.js"; + +export async function defaultConfig(project: ProjectConfig): Promise { + const esmBundle = { + input: project.inputFiles.widgetFile, + external: [...STD_EXTERNALS], + plugins: stdPlugins(project), + output: { + file: project.outputFiles.esm, + format: "esm" + } + } satisfies BuildOptions; + + const umdBundle = { + input: project.inputFiles.widgetFile, + external: [...STD_EXTERNALS], + plugins: stdPlugins(project), + output: { + file: project.outputFiles.umd, + format: "umd", + name: `${project.pkg.packagePath}.${project.pkg.widgetName}`, + globals: { + "react/jsx-runtime": "react_jsx_runtime" + } + } + } satisfies BuildOptions; + + const editorConfigBundle = { + input: project.inputFiles.editorConfig, + output: { + file: project.outputFiles.editorConfig, + format: "commonjs" + } + } satisfies BuildOptions; + + const editorPreviewBundle = { + input: project.inputFiles.editorPreview, + output: { + file: project.outputFiles.editorPreview, + format: "commonjs" + } + } satisfies BuildOptions; + + const bundles: BuildOptions[] = [esmBundle, umdBundle]; + + const [addEditorConfig, addEditorPreview] = await Promise.all([ + hasEditorConfig(project), + hasEditorPreview(project) + ]); + + if (addEditorConfig) { + bundles.push(editorConfigBundle); + } + + if (addEditorPreview) { + bundles.push(editorPreviewBundle); + } + + return bundles; +} + +export async function loadConfig(project: ProjectConfig): Promise { + return defaultConfig(project); +} + +function stdPlugins(project: ProjectConfig): RolldownPlugin[] { + const { url, image } = plugins; + + const urlOptions: RollupUrlOptions = { + include: [ + "**/*.svg", + "**/*.png", + "**/*.jp(e)?g", + "**/*.gif", + "**/*.webp", + "**/*.ttf", + "**/*.woff(2)?", + "**/*.eot" + ], + limit: 0, + publicPath: project.assetsPublicPath, + destDir: project.outputDirs.widgetAssetsDir + }; + + return [url(urlOptions), image()]; +} diff --git a/packages/mpx/src/utils/project-config.ts b/packages/mpx/src/utils/project-config.ts index 7befea88..70090c7f 100644 --- a/packages/mpx/src/utils/project-config.ts +++ b/packages/mpx/src/utils/project-config.ts @@ -26,6 +26,7 @@ interface BundleOutputDirs { mpkDir: string; contentRoot: string; widgetDir: string; + widgetAssetsDir: string; } interface ProjectConfigInputs { @@ -48,6 +49,15 @@ export class ProjectConfig { this.isTsProject = inputs.isTsProject; } + /** Public path (aka base url) for widget assets */ + get assetsPublicPath(): string { + const { + pkg: { packagePath, widgetName } + } = this; + const publicPath = ["widgets", ...packagePath.split("."), widgetName.toLowerCase(), "assets"].join("/"); + return `${publicPath}/`; + } + get inputFiles(): BundleInputFiles { const { pkg, isTsProject } = this; const ext = isTsProject ? "ts" : "js"; @@ -96,7 +106,8 @@ export class ProjectConfig { dist: this.dist, mpkDir: path.join(this.dist, this.pkg.version), contentRoot: this.contentRoot, - widgetDir: this.widgetDir + widgetDir: this.widgetDir, + widgetAssetsDir: path.join(this.widgetDir, "assets") }; } From 5a038ff2119f54a52db0fae6a16b6dd56ef80c12 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Tue, 15 Jul 2025 17:24:29 +0200 Subject: [PATCH 20/45] chore: add license plugin --- packages/mpx/package.json | 1 + packages/mpx/pnpm-lock.yaml | 118 ++++++++++++++++++++++- packages/mpx/src/build.ts | 3 +- packages/mpx/src/plugins.ts | 5 + packages/mpx/src/rolldown.ts | 43 ++++++++- packages/mpx/src/utils/project-config.ts | 10 ++ 6 files changed, 174 insertions(+), 6 deletions(-) diff --git a/packages/mpx/package.json b/packages/mpx/package.json index 401f9ba6..76b1dc8a 100644 --- a/packages/mpx/package.json +++ b/packages/mpx/package.json @@ -32,6 +32,7 @@ "premove": "^4.0.0", "prettier": "^3.6.2", "pretty-ms": "^9.2.0", + "rollup-plugin-license": "^3.6.0", "signal-exit": "^4.1.0", "typescript": "^5.8.3", "zip-a-folder": "^3.1.9", diff --git a/packages/mpx/pnpm-lock.yaml b/packages/mpx/pnpm-lock.yaml index 73fac2d5..49647618 100644 --- a/packages/mpx/pnpm-lock.yaml +++ b/packages/mpx/pnpm-lock.yaml @@ -51,6 +51,9 @@ importers: pretty-ms: specifier: ^9.2.0 version: 9.2.0 + rollup-plugin-license: + specifier: ^3.6.0 + version: 3.6.0(picomatch@4.0.2)(rollup@4.45.0) signal-exit: specifier: ^4.1.0 version: 4.1.0 @@ -87,6 +90,9 @@ packages: resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} engines: {node: '>=12'} + '@jridgewell/sourcemap-codec@1.5.4': + resolution: {integrity: sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw==} + '@napi-rs/wasm-runtime@0.2.12': resolution: {integrity: sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==} @@ -347,6 +353,10 @@ packages: resolution: {integrity: sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ==} engines: {node: '>= 14'} + array-find-index@1.0.2: + resolution: {integrity: sha512-M1HQyIXcBGtVywBt8WVdim+lrNaK7VHp99Qt5pSNziXznKHViIBbXWtfRTpEFpF/c4FdfxNAsCCwPp5phBYJtw==} + engines: {node: '>=0.10.0'} + async@3.2.6: resolution: {integrity: sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==} @@ -391,6 +401,9 @@ packages: color-name@1.1.4: resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + commenting@1.1.0: + resolution: {integrity: sha512-YeNK4tavZwtH7jEgK1ZINXzLKm6DZdEMfsaaieOsCAN0S8vsY7UeuO3Q7d/M018EFgE+IeUAuBOKkFccBZsUZA==} + compress-commons@6.0.2: resolution: {integrity: sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==} engines: {node: '>= 14'} @@ -466,6 +479,14 @@ packages: fastq@1.19.1: resolution: {integrity: sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==} + fdir@6.4.6: + resolution: {integrity: sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w==} + peerDependencies: + picomatch: ^3 || ^4 + peerDependenciesMeta: + picomatch: + optional: true + fill-range@7.1.1: resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} engines: {node: '>=8'} @@ -577,6 +598,9 @@ packages: resolution: {integrity: sha512-QIXZUBJUx+2zHUdQujWejBkcD9+cs94tLn0+YL8UrCh+D5sCXZ4c7LaEH48pNwRY3MLDgqUFyhlCyjJPf1WP0A==} engines: {node: 20 || >=22} + magic-string@0.30.17: + resolution: {integrity: sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==} + make-dir@3.1.0: resolution: {integrity: sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==} engines: {node: '>=8'} @@ -617,6 +641,9 @@ packages: resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} engines: {node: '>=16 || 14 >=14.17'} + moment@2.30.1: + resolution: {integrity: sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==} + ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} @@ -631,6 +658,10 @@ packages: package-json-from-dist@1.0.1: resolution: {integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==} + package-name-regex@2.0.6: + resolution: {integrity: sha512-gFL35q7kbE/zBaPA3UKhp2vSzcPYx2ecbYuwv1ucE9Il6IIgBDweBlH8D68UFGZic2MkllKa2KHCfC1IQBQUYA==} + engines: {node: '>=12'} + parse-ms@4.0.0: resolution: {integrity: sha512-TXfryirbmq34y8QBwgqCVLi+8oA3oWx2eAnSn62ITyEhEYaWRlVZ2DvMM9eZbMs/RfxPu/PK/aBLyGj4IrqMHw==} engines: {node: '>=18'} @@ -712,6 +743,12 @@ packages: resolution: {integrity: sha512-2rad1JDFst/GD1J86RuqN1SIP8O8Xv4UbqNyKaVayXTjgF0D6HpvTnUZ1RQ6tANpZweGmq4v6Ay0uyRNEycFPw==} hasBin: true + rollup-plugin-license@3.6.0: + resolution: {integrity: sha512-1ieLxTCaigI5xokIfszVDRoy6c/Wmlot1fDEnea7Q/WXSR8AqOjYljHDLObAx7nFxHC2mbxT3QnTSPhaic2IYw==} + engines: {node: '>=14.0.0'} + peerDependencies: + rollup: ^1.0.0 || ^2.0.0 || ^3.0.0 || ^4.0.0 + rollup@4.45.0: resolution: {integrity: sha512-WLjEcJRIo7i3WDDgOIJqVI2d+lAC3EwvOGy+Xfq6hs+GQuAA4Di/H72xmXkOhrIWFg2PFYSKZYfH0f4vfKXN4A==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} @@ -746,6 +783,27 @@ packages: resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} engines: {node: '>=14'} + spdx-compare@1.0.0: + resolution: {integrity: sha512-C1mDZOX0hnu0ep9dfmuoi03+eOdDoz2yvK79RxbcrVEG1NO1Ph35yW102DHWKN4pk80nwCgeMmSY5L25VE4D9A==} + + spdx-exceptions@2.5.0: + resolution: {integrity: sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==} + + spdx-expression-parse@3.0.1: + resolution: {integrity: sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==} + + spdx-expression-validate@2.0.0: + resolution: {integrity: sha512-b3wydZLM+Tc6CFvaRDBOF9d76oGIHNCLYFeHbftFXUWjnfZWganmDmvtM5sm1cRwJc/VDBMLyGGrsLFd1vOxbg==} + + spdx-license-ids@3.0.21: + resolution: {integrity: sha512-Bvg/8F5XephndSK3JffaRqdT+gyhfqIPwDHpX80tJrF8QQRYMo8sNMeaZ2Dp5+jhwKnUmIOyFFQfHRkjJm5nXg==} + + spdx-ranges@2.1.1: + resolution: {integrity: sha512-mcdpQFV7UDAgLpXEE/jOMqvK4LBoO0uTQg0uvXUewmEFhpiZx5yJSZITHB8w1ZahKdhfZqP5GPEOKLyEq5p8XA==} + + spdx-satisfies@5.0.1: + resolution: {integrity: sha512-Nwor6W6gzFp8XX4neaKQ7ChV4wmpSh2sSDemMFSzHxpTw460jxFYeOn+jq4ybnSSw/5sc3pjka9MQPouksQNpw==} + streamx@2.22.1: resolution: {integrity: sha512-znKXEBxfatz2GBNK02kRnCXjV+AA4kjZIUxeWSr3UGirZMJfTE9uiwKHobnbgxWyL/JWro8tTq+vOqAK1/qbSA==} @@ -862,6 +920,8 @@ snapshots: wrap-ansi: 8.1.0 wrap-ansi-cjs: wrap-ansi@7.0.0 + '@jridgewell/sourcemap-codec@1.5.4': {} + '@napi-rs/wasm-runtime@0.2.12': dependencies: '@emnapi/core': 1.4.4 @@ -1060,6 +1120,8 @@ snapshots: tar-stream: 3.1.7 zip-stream: 6.0.1 + array-find-index@1.0.2: {} + async@3.2.6: {} b4a@1.6.7: {} @@ -1098,6 +1160,8 @@ snapshots: color-name@1.1.4: {} + commenting@1.1.0: {} + compress-commons@6.0.2: dependencies: crc-32: 1.2.2 @@ -1175,6 +1239,10 @@ snapshots: dependencies: reusify: 1.1.0 + fdir@6.4.6(picomatch@4.0.2): + optionalDependencies: + picomatch: 4.0.2 + fill-range@7.1.1: dependencies: to-regex-range: 5.0.1 @@ -1281,6 +1349,10 @@ snapshots: lru-cache@11.1.0: {} + magic-string@0.30.17: + dependencies: + '@jridgewell/sourcemap-codec': 1.5.4 + make-dir@3.1.0: dependencies: semver: 6.3.1 @@ -1312,6 +1384,8 @@ snapshots: minipass@7.1.2: {} + moment@2.30.1: {} + ms@2.1.3: {} normalize-path@3.0.0: {} @@ -1320,6 +1394,8 @@ snapshots: package-json-from-dist@1.0.1: {} + package-name-regex@2.0.6: {} + parse-ms@4.0.0: {} path-key@3.1.1: {} @@ -1408,6 +1484,20 @@ snapshots: '@rolldown/binding-win32-ia32-msvc': 1.0.0-beta.26 '@rolldown/binding-win32-x64-msvc': 1.0.0-beta.26 + rollup-plugin-license@3.6.0(picomatch@4.0.2)(rollup@4.45.0): + dependencies: + commenting: 1.1.0 + fdir: 6.4.6(picomatch@4.0.2) + lodash: 4.17.21 + magic-string: 0.30.17 + moment: 2.30.1 + package-name-regex: 2.0.6 + rollup: 4.45.0 + spdx-expression-validate: 2.0.0 + spdx-satisfies: 5.0.1 + transitivePeerDependencies: + - picomatch + rollup@4.45.0: dependencies: '@types/estree': 1.0.8 @@ -1433,7 +1523,6 @@ snapshots: '@rollup/rollup-win32-ia32-msvc': 4.45.0 '@rollup/rollup-win32-x64-msvc': 4.45.0 fsevents: 2.3.3 - optional: true run-parallel@1.2.0: dependencies: @@ -1455,6 +1544,33 @@ snapshots: signal-exit@4.1.0: {} + spdx-compare@1.0.0: + dependencies: + array-find-index: 1.0.2 + spdx-expression-parse: 3.0.1 + spdx-ranges: 2.1.1 + + spdx-exceptions@2.5.0: {} + + spdx-expression-parse@3.0.1: + dependencies: + spdx-exceptions: 2.5.0 + spdx-license-ids: 3.0.21 + + spdx-expression-validate@2.0.0: + dependencies: + spdx-expression-parse: 3.0.1 + + spdx-license-ids@3.0.21: {} + + spdx-ranges@2.1.1: {} + + spdx-satisfies@5.0.1: + dependencies: + spdx-compare: 1.0.0 + spdx-expression-parse: 3.0.1 + spdx-ranges: 2.1.1 + streamx@2.22.1: dependencies: fast-fifo: 1.3.2 diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 7b43e9e5..39f1bd2e 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -9,7 +9,7 @@ import { BuildOptions, build as buildBundle, watch } from "rolldown"; import { onExit } from "signal-exit"; import { MODELER_FILES } from "./constants.js"; import { loadConfig } from "./rolldown.js"; -import { bgBlue, blue, bold, dim, green, greenBright, inverse, white } from "./utils/colors.js"; +import { blue, bold, dim, green, greenBright, inverse } from "./utils/colors.js"; import { isTypeScriptProject, readPackageJson } from "./utils/fs.js"; import { createLogger } from "./utils/logger.js"; import { createMPK } from "./utils/mpk.js"; @@ -187,7 +187,6 @@ const formatMsg = { built: (file: string) => `Built ${bold(file)}`, rebuilt: (file: string, duration: number) => `Rebuilt ${dim(file)} in ${green(ms(duration))}`, copy: (file: string) => `Copy ${bold(file)}`, - mxpath1: (dir: string) => `${inverse(greenBright(bold(" PROJECT PATH ")))}${bgBlue(white(bold(` ${dir} `)))}`, mxpath: (dir: string) => `${inverse(greenBright(bold(" PROJECT PATH ")))}${blue(bold(` ${dir} `))}` }; diff --git a/packages/mpx/src/plugins.ts b/packages/mpx/src/plugins.ts index 5d93c844..436033f5 100644 --- a/packages/mpx/src/plugins.ts +++ b/packages/mpx/src/plugins.ts @@ -1,11 +1,13 @@ import image from "@rollup/plugin-image"; import url from "@rollup/plugin-url"; import { RolldownPlugin } from "rolldown"; +import license from "rollup-plugin-license"; /** Note: Rollup has issue with exported types https://github.com/rollup/plugins/issues/1329 */ type RollupUrlFactory = typeof url.default; export type RollupUrlOptions = Parameters[0]; export type RollupImageOptions = Parameters[0]; +export type RollupLicenseOptions = Parameters[0]; export const plugins = { url(options?: RollupUrlOptions): RolldownPlugin { @@ -14,5 +16,8 @@ export const plugins = { }, image(options?: RollupImageOptions): RolldownPlugin { return (image as any)(options) as RolldownPlugin; + }, + license(options?: RollupLicenseOptions): RolldownPlugin { + return (license as any)(options) as RolldownPlugin; } }; diff --git a/packages/mpx/src/rolldown.ts b/packages/mpx/src/rolldown.ts index 87f64e71..014c942a 100644 --- a/packages/mpx/src/rolldown.ts +++ b/packages/mpx/src/rolldown.ts @@ -1,6 +1,7 @@ import { BuildOptions, RolldownPlugin } from "rolldown"; +import { Dependency } from "rollup-plugin-license"; import { STD_EXTERNALS } from "./constants.js"; -import { plugins, RollupUrlOptions } from "./plugins.js"; +import { plugins, RollupLicenseOptions, RollupUrlOptions } from "./plugins.js"; import { hasEditorConfig, hasEditorPreview } from "./utils/fs.js"; import { ProjectConfig } from "./utils/project-config.js"; @@ -9,6 +10,7 @@ export async function defaultConfig(project: ProjectConfig): Promise + JSON.stringify( + dependencies.map(dependency => { + const repoUrl = + typeof dependency.repository === "string" + ? dependency.repository + : dependency.repository instanceof Object + ? dependency.repository.url + : undefined; + + return { + [dependency.name!]: { + version: dependency.version, + url: dependency.homepage ?? repoUrl + } + }; + }) + ); diff --git a/packages/mpx/src/utils/project-config.ts b/packages/mpx/src/utils/project-config.ts index 70090c7f..883ab45d 100644 --- a/packages/mpx/src/utils/project-config.ts +++ b/packages/mpx/src/utils/project-config.ts @@ -19,6 +19,8 @@ interface BundleOutputFiles { esm: string; umd: string; mpk: string; + dependenciesTxt: string; + dependenciesJson: string; } interface BundleOutputDirs { @@ -137,6 +139,14 @@ export class ProjectConfig { mpk: path.format({ dir: outputDirs.mpkDir, base: `${pkg.packagePath}.${pkg.widgetName}.mpk` + }), + dependenciesTxt: path.format({ + dir: outputDirs.contentRoot, + base: "dependencies.txt" + }), + dependenciesJson: path.format({ + dir: outputDirs.contentRoot, + base: "dependencies.json" }) }; } From 1716019cbae1e5e4192828590c1d3963ddde3c0c Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Tue, 15 Jul 2025 17:45:51 +0200 Subject: [PATCH 21/45] chore: add filesize for mpk --- packages/mpx/package.json | 1 + packages/mpx/pnpm-lock.yaml | 9 +++++++++ packages/mpx/src/build.ts | 24 ++++++++++++++---------- 3 files changed, 24 insertions(+), 10 deletions(-) diff --git a/packages/mpx/package.json b/packages/mpx/package.json index 76b1dc8a..5b5b1c92 100644 --- a/packages/mpx/package.json +++ b/packages/mpx/package.json @@ -28,6 +28,7 @@ "consola": "^3.4.2", "cpx2": "^8.0.0", "fast-glob": "^3.3.3", + "filesize": "^11.0.1", "picocolors": "^1.1.1", "premove": "^4.0.0", "prettier": "^3.6.2", diff --git a/packages/mpx/pnpm-lock.yaml b/packages/mpx/pnpm-lock.yaml index 49647618..c8b200ab 100644 --- a/packages/mpx/pnpm-lock.yaml +++ b/packages/mpx/pnpm-lock.yaml @@ -39,6 +39,9 @@ importers: fast-glob: specifier: ^3.3.3 version: 3.3.3 + filesize: + specifier: ^11.0.1 + version: 11.0.1 picocolors: specifier: ^1.1.1 version: 1.1.1 @@ -487,6 +490,10 @@ packages: picomatch: optional: true + filesize@11.0.1: + resolution: {integrity: sha512-ua1SLPcFgqf1lICRVqTA5d8T6kqg2ZTIm0BImnUk4pZzfAlwhqO9zKv7GCE5FGl3zIbBZZSmq7yLikFNsi5eXw==} + engines: {node: '>= 10.4.0'} + fill-range@7.1.1: resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} engines: {node: '>=8'} @@ -1243,6 +1250,8 @@ snapshots: optionalDependencies: picomatch: 4.0.2 + filesize@11.0.1: {} + fill-range@7.1.1: dependencies: to-regex-range: 5.0.1 diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 39f1bd2e..65aeb48b 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -1,6 +1,7 @@ import chokidar from "chokidar"; import { ConsolaInstance } from "consola"; import fg from "fast-glob"; +import { filesize } from "filesize"; import fs from "node:fs/promises"; import path from "node:path"; import { env } from "node:process"; @@ -77,9 +78,7 @@ const tasks = { } await tasks.copyModelerFiles(params); - - await createMPK(project.outputDirs.contentRoot, project.outputFiles.mpk); - logger.success(formatMsg.built(project.outputFiles.mpk)); + await tasks.buildMpk(params); const buildInfo = buildMeasure.end(); logger.success("Done in", green(ms(buildInfo.duration))); @@ -156,9 +155,9 @@ const tasks = { watcher.close(); }); }, - - async watchContent({ logger, project }: TaskParams): Promise { - await createMPK(project.outputDirs.contentRoot, project.outputFiles.mpk); + async watchContent(params: TaskParams): Promise { + const { project } = params; + await tasks.buildMpk({ ...params, quiet: true }); const watcher = chokidar.watch(project.outputDirs.contentRoot); let debounceTimer: NodeJS.Timeout | null = null; @@ -168,10 +167,7 @@ const tasks = { clearTimeout(debounceTimer); } - debounceTimer = setTimeout(async () => { - await createMPK(project.outputDirs.contentRoot, project.outputFiles.mpk); - logger.success(formatMsg.built(project.outputFiles.mpk)); - }, 30); + debounceTimer = setTimeout(() => tasks.buildMpk(params), 30); }); onExit(() => { @@ -180,11 +176,19 @@ const tasks = { } watcher.close(); }); + }, + async buildMpk({ project, logger, quiet = false }: TaskParams & { quiet?: boolean }): Promise { + await createMPK(project.outputDirs.contentRoot, project.outputFiles.mpk); + const mpkStat = await fs.stat(project.outputFiles.mpk); + if (!quiet) { + logger.success(formatMsg.builtSize(project.outputFiles.mpk, mpkStat.size)); + } } }; const formatMsg = { built: (file: string) => `Built ${bold(file)}`, + builtSize: (file: string, size: number) => `Built ${bold(file)} (${dim(filesize(size, { standard: "jedec" }))})`, rebuilt: (file: string, duration: number) => `Rebuilt ${dim(file)} in ${green(ms(duration))}`, copy: (file: string) => `Copy ${bold(file)}`, mxpath: (dir: string) => `${inverse(greenBright(bold(" PROJECT PATH ")))}${blue(bold(` ${dir} `))}` From c222b381aa4caa7627aa4cff398bc05feb1b45ca Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Tue, 15 Jul 2025 17:47:12 +0200 Subject: [PATCH 22/45] chore: remove cpx2 --- packages/mpx/package.json | 1 - packages/mpx/pnpm-lock.yaml | 166 ------------------------------------ 2 files changed, 167 deletions(-) diff --git a/packages/mpx/package.json b/packages/mpx/package.json index 5b5b1c92..ba1ff135 100644 --- a/packages/mpx/package.json +++ b/packages/mpx/package.json @@ -26,7 +26,6 @@ "cac": "^6.7.14", "chokidar": "^4.0.3", "consola": "^3.4.2", - "cpx2": "^8.0.0", "fast-glob": "^3.3.3", "filesize": "^11.0.1", "picocolors": "^1.1.1", diff --git a/packages/mpx/pnpm-lock.yaml b/packages/mpx/pnpm-lock.yaml index c8b200ab..4e268600 100644 --- a/packages/mpx/pnpm-lock.yaml +++ b/packages/mpx/pnpm-lock.yaml @@ -33,9 +33,6 @@ importers: consola: specifier: ^3.4.2 version: 3.4.2 - cpx2: - specifier: ^8.0.0 - version: 8.0.0 fast-glob: specifier: ^3.3.3 version: 3.3.3 @@ -418,11 +415,6 @@ packages: core-util-is@1.0.3: resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==} - cpx2@8.0.0: - resolution: {integrity: sha512-RxD9jrSVNSOmfcbiPlr3XnKbUKH9K1w2HCv0skczUKhsZTueiDBecxuaSAKQkYSLQaGVA4ZQJZlTj5hVNNEvKg==} - engines: {node: ^20.0.0 || >=22.0.0, npm: '>=10'} - hasBin: true - crc-32@1.2.2: resolution: {integrity: sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==} engines: {node: '>=0.8'} @@ -436,22 +428,6 @@ packages: resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} engines: {node: '>= 8'} - debounce@2.2.0: - resolution: {integrity: sha512-Xks6RUDLZFdz8LIdR6q0MTH44k7FikOmnh5xkSjMig6ch45afc8sjTjRQf3P6ax8dMgcQrYO/AR2RGWURrruqw==} - engines: {node: '>=18'} - - debug@4.4.1: - resolution: {integrity: sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==} - engines: {node: '>=6.0'} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - - duplexer@0.1.2: - resolution: {integrity: sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==} - eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} @@ -498,33 +474,19 @@ packages: resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} engines: {node: '>=8'} - find-index@0.1.1: - resolution: {integrity: sha512-uJ5vWrfBKMcE6y2Z8834dwEZj9mNGxYa3t3I53OwFeuZ8D9oc2E5zcsrkuhX6h4iYrjhiv0T3szQmxlAV9uxDg==} - foreground-child@3.3.1: resolution: {integrity: sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==} engines: {node: '>=14'} - fs-extra@11.3.0: - resolution: {integrity: sha512-Z4XaCL6dUDHfP/jT25jJKMmtxvuwbkrD1vNSMFlo9lNLY2c5FHYSQgHPRZUjAB26TpDEoW9HCOgplrdbaPV/ew==} - engines: {node: '>=14.14'} - fsevents@2.3.3: resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} os: [darwin] - function-bind@1.1.2: - resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} - glob-parent@5.1.2: resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} engines: {node: '>= 6'} - glob2base@0.0.12: - resolution: {integrity: sha512-ZyqlgowMbfj2NPjxaZZ/EtsXlOch28FRXgMd64vqZWk1bT9+wvSRLYD1om9M7QfQru51zJPAT17qXm4/zd+9QA==} - engines: {node: '>= 0.10'} - glob@10.4.5: resolution: {integrity: sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==} hasBin: true @@ -537,24 +499,12 @@ packages: graceful-fs@4.2.11: resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} - hasown@2.0.2: - resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} - engines: {node: '>= 0.4'} - ieee754@1.2.1: resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} - ignore@6.0.2: - resolution: {integrity: sha512-InwqeHHN2XpumIkMvpl/DCJVrAHgCsG5+cn1XlnLWGwtZBm8QJfSusItfrwx81CTp5agNZqpKU2J/ccC5nGT4A==} - engines: {node: '>= 4'} - inherits@2.0.4: resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} - is-core-module@2.16.1: - resolution: {integrity: sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==} - engines: {node: '>= 0.4'} - is-extglob@2.1.1: resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} engines: {node: '>=0.10.0'} @@ -588,9 +538,6 @@ packages: resolution: {integrity: sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==} engines: {node: 20 || >=22} - jsonfile@6.1.0: - resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} - lazystream@1.0.1: resolution: {integrity: sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==} engines: {node: '>= 0.6.3'} @@ -641,9 +588,6 @@ packages: resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==} engines: {node: '>=16 || 14 >=14.17'} - minimist@1.2.8: - resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} - minipass@7.1.2: resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} engines: {node: '>=16 || 14 >=14.17'} @@ -651,17 +595,10 @@ packages: moment@2.30.1: resolution: {integrity: sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==} - ms@2.1.3: - resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} - normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} engines: {node: '>=0.10.0'} - p-map@7.0.3: - resolution: {integrity: sha512-VkndIv2fIB99swvQoA65bm+fsmt6UNdGeIB0oxBs+WhAhdh08QA04JXpI7rbB9r08/nkbysKoya9rtDERYOYMA==} - engines: {node: '>=18'} - package-json-from-dist@1.0.1: resolution: {integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==} @@ -677,9 +614,6 @@ packages: resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} engines: {node: '>=8'} - path-parse@1.0.7: - resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} - path-scurry@1.11.1: resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} engines: {node: '>=16 || 14 >=14.18'} @@ -737,11 +671,6 @@ packages: resolution: {integrity: sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==} engines: {node: '>= 14.18.0'} - resolve@1.22.10: - resolution: {integrity: sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==} - engines: {node: '>= 0.4'} - hasBin: true - reusify@1.1.0: resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} @@ -782,10 +711,6 @@ packages: resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} engines: {node: '>=8'} - shell-quote@1.8.3: - resolution: {integrity: sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==} - engines: {node: '>= 0.4'} - signal-exit@4.1.0: resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} engines: {node: '>=14'} @@ -836,13 +761,6 @@ packages: resolution: {integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==} engines: {node: '>=12'} - subarg@1.0.0: - resolution: {integrity: sha512-RIrIdRY0X1xojthNcVtgT9sjpOGagEUKpZdgBUi054OEPFo282yg+zE+t1Rj3+RqKq2xStL7uUHhY+AjbC4BXg==} - - supports-preserve-symlinks-flag@1.0.0: - resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} - engines: {node: '>= 0.4'} - tar-stream@3.1.7: resolution: {integrity: sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==} @@ -864,10 +782,6 @@ packages: undici-types@7.8.0: resolution: {integrity: sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw==} - universalify@2.0.1: - resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==} - engines: {node: '>= 10.0.0'} - util-deprecate@1.0.2: resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} @@ -1181,24 +1095,6 @@ snapshots: core-util-is@1.0.3: {} - cpx2@8.0.0: - dependencies: - debounce: 2.2.0 - debug: 4.4.1 - duplexer: 0.1.2 - fs-extra: 11.3.0 - glob: 11.0.3 - glob2base: 0.0.12 - ignore: 6.0.2 - minimatch: 10.0.3 - p-map: 7.0.3 - resolve: 1.22.10 - safe-buffer: 5.2.1 - shell-quote: 1.8.3 - subarg: 1.0.0 - transitivePeerDependencies: - - supports-color - crc-32@1.2.2: {} crc32-stream@6.0.0: @@ -1212,14 +1108,6 @@ snapshots: shebang-command: 2.0.0 which: 2.0.2 - debounce@2.2.0: {} - - debug@4.4.1: - dependencies: - ms: 2.1.3 - - duplexer@0.1.2: {} - eastasianwidth@0.2.0: {} emoji-regex@8.0.0: {} @@ -1256,32 +1144,18 @@ snapshots: dependencies: to-regex-range: 5.0.1 - find-index@0.1.1: {} - foreground-child@3.3.1: dependencies: cross-spawn: 7.0.6 signal-exit: 4.1.0 - fs-extra@11.3.0: - dependencies: - graceful-fs: 4.2.11 - jsonfile: 6.1.0 - universalify: 2.0.1 - fsevents@2.3.3: optional: true - function-bind@1.1.2: {} - glob-parent@5.1.2: dependencies: is-glob: 4.0.3 - glob2base@0.0.12: - dependencies: - find-index: 0.1.1 - glob@10.4.5: dependencies: foreground-child: 3.3.1 @@ -1302,20 +1176,10 @@ snapshots: graceful-fs@4.2.11: {} - hasown@2.0.2: - dependencies: - function-bind: 1.1.2 - ieee754@1.2.1: {} - ignore@6.0.2: {} - inherits@2.0.4: {} - is-core-module@2.16.1: - dependencies: - hasown: 2.0.2 - is-extglob@2.1.1: {} is-fullwidth-code-point@3.0.0: {} @@ -1342,12 +1206,6 @@ snapshots: dependencies: '@isaacs/cliui': 8.0.2 - jsonfile@6.1.0: - dependencies: - universalify: 2.0.1 - optionalDependencies: - graceful-fs: 4.2.11 - lazystream@1.0.1: dependencies: readable-stream: 2.3.8 @@ -1389,18 +1247,12 @@ snapshots: dependencies: brace-expansion: 2.0.2 - minimist@1.2.8: {} - minipass@7.1.2: {} moment@2.30.1: {} - ms@2.1.3: {} - normalize-path@3.0.0: {} - p-map@7.0.3: {} - package-json-from-dist@1.0.1: {} package-name-regex@2.0.6: {} @@ -1409,8 +1261,6 @@ snapshots: path-key@3.1.1: {} - path-parse@1.0.7: {} - path-scurry@1.11.1: dependencies: lru-cache: 10.4.3 @@ -1465,12 +1315,6 @@ snapshots: readdirp@4.1.2: {} - resolve@1.22.10: - dependencies: - is-core-module: 2.16.1 - path-parse: 1.0.7 - supports-preserve-symlinks-flag: 1.0.0 - reusify@1.1.0: {} rolldown@1.0.0-beta.26: @@ -1549,8 +1393,6 @@ snapshots: shebang-regex@3.0.0: {} - shell-quote@1.8.3: {} - signal-exit@4.1.0: {} spdx-compare@1.0.0: @@ -1615,12 +1457,6 @@ snapshots: dependencies: ansi-regex: 6.1.0 - subarg@1.0.0: - dependencies: - minimist: 1.2.8 - - supports-preserve-symlinks-flag@1.0.0: {} - tar-stream@3.1.7: dependencies: b4a: 1.6.7 @@ -1642,8 +1478,6 @@ snapshots: undici-types@7.8.0: {} - universalify@2.0.1: {} - util-deprecate@1.0.2: {} which@2.0.2: From af68434ed68d3fd790727a485e219d7483f31c8c Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Tue, 15 Jul 2025 18:01:05 +0200 Subject: [PATCH 23/45] chore: rename task --- packages/mpx/src/build.ts | 22 ++++++++++++---------- packages/mpx/src/constants.ts | 15 ++++++++++----- 2 files changed, 22 insertions(+), 15 deletions(-) diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 65aeb48b..c543dc94 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -8,7 +8,7 @@ import { env } from "node:process"; import ms from "pretty-ms"; import { BuildOptions, build as buildBundle, watch } from "rolldown"; import { onExit } from "signal-exit"; -import { MODELER_FILES } from "./constants.js"; +import { PACKAGE_FILES } from "./constants.js"; import { loadConfig } from "./rolldown.js"; import { blue, bold, dim, green, greenBright, inverse } from "./utils/colors.js"; import { isTypeScriptProject, readPackageJson } from "./utils/fs.js"; @@ -77,7 +77,7 @@ const tasks = { logger.success(formatMsg.built(bundle.output?.file!)); } - await tasks.copyModelerFiles(params); + await tasks.copyPackageFiles(params); await tasks.buildMpk(params); const buildInfo = buildMeasure.end(); @@ -117,8 +117,8 @@ const tasks = { }); await bundleWatchReady; - await tasks.watchModelerFiles(params); - await tasks.watchContent(params); + await tasks.watchPackageFiles(params); + await tasks.watchPackageContent(params); logger.info("Waiting for changes..."); onExit(() => { @@ -127,8 +127,8 @@ const tasks = { logger.log("Build watcher stopped"); }); }, - async copyModelerFiles({ project }: TaskParams): Promise { - const stream = fg.stream(MODELER_FILES); + async copyPackageFiles({ project }: TaskParams): Promise { + const stream = fg.stream(PACKAGE_FILES); for await (const src of stream) { const f = path.parse(src as string); const dst = path.join(project.outputDirs.contentRoot, f.base); @@ -138,12 +138,13 @@ const tasks = { }); } }, - async watchModelerFiles(params: TaskParams): Promise { + /** Watch & copy static package files */ + async watchPackageFiles(params: TaskParams): Promise { const { project, logger } = params; - await tasks.copyModelerFiles(params); + await tasks.copyPackageFiles(params); - const watcher = chokidar.watch(await fg(MODELER_FILES)); + const watcher = chokidar.watch(await fg(PACKAGE_FILES)); watcher.on("change", async file => { logger.info(formatMsg.copy(file)); const f = path.parse(file); @@ -155,7 +156,8 @@ const tasks = { watcher.close(); }); }, - async watchContent(params: TaskParams): Promise { + /** Setup package content watcher to build mpk whenever package files change */ + async watchPackageContent(params: TaskParams): Promise { const { project } = params; await tasks.buildMpk({ ...params, quiet: true }); const watcher = chokidar.watch(project.outputDirs.contentRoot); diff --git a/packages/mpx/src/constants.ts b/packages/mpx/src/constants.ts index 1e1b48a5..1d91bfd6 100644 --- a/packages/mpx/src/constants.ts +++ b/packages/mpx/src/constants.ts @@ -1,8 +1,6 @@ -import { readFileSync } from "node:fs"; +import pkg from "../package.json" with { type: "json" }; -const { version } = JSON.parse(readFileSync(new URL("../package.json", import.meta.url)).toString()); - -export const VERSION = version as string; +export const VERSION = pkg.version as string; export const STD_EXTERNALS = [ // "mendix" and internals under "mendix/" @@ -13,4 +11,11 @@ export const STD_EXTERNALS = [ /^big.js$/ ]; -export const MODELER_FILES = ["src/*.xml", "src/*.@(tile|icon)?(.dark).png"]; +export const PACKAGE_FILES = [ + // XML files + "src/*.xml", + // Modeler icons + "src/*.@(tile|icon)?(.dark).png", + // License file + "{licen[cs]e,LICEN[CS]E}?(.*)" +]; From 9cc77282a76232a2c4b49267b3648d1173d90dd1 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Tue, 15 Jul 2025 18:51:54 +0200 Subject: [PATCH 24/45] chore: add deploy utility --- packages/mpx/src/build.ts | 46 +++++++++++++----------- packages/mpx/src/utils/fs.ts | 21 ++++++++--- packages/mpx/src/utils/project-config.ts | 12 +++---- 3 files changed, 49 insertions(+), 30 deletions(-) diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index c543dc94..2466a6d0 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -11,7 +11,7 @@ import { onExit } from "signal-exit"; import { PACKAGE_FILES } from "./constants.js"; import { loadConfig } from "./rolldown.js"; import { blue, bold, dim, green, greenBright, inverse } from "./utils/colors.js"; -import { isTypeScriptProject, readPackageJson } from "./utils/fs.js"; +import { deployToMxProject, isTypeScriptProject, readPackageJson } from "./utils/fs.js"; import { createLogger } from "./utils/logger.js"; import { createMPK } from "./utils/mpk.js"; import { ProjectConfig } from "./utils/project-config.js"; @@ -32,27 +32,28 @@ export async function build(root: string | undefined, options: BuildCommandOptio const [pkg, isTsProject] = await Promise.all([readPackageJson(root), isTypeScriptProject(root)]); - const project = new ProjectConfig({ + const config = new ProjectConfig({ pkg, isTsProject }); - const projectPath = await project.getProjectPath(); + const projectPath = await config.getProjectPath(); if (projectPath) { logger.info(formatMsg.mxpath(projectPath)); } - const bundles = await loadConfig(project); + const bundles = await loadConfig(config); - await fs.rm(project.outputDirs.dist, { recursive: true, force: true }); - console.dir(project.inputFiles); - console.dir(project.outputDirs); - console.dir(project.outputFiles); - console.dir(project.assetsPublicPath); + await fs.rm(config.outputDirs.dist, { recursive: true, force: true }); + console.dir(config.inputFiles); + console.dir(config.outputDirs); + console.dir(config.outputFiles); + console.dir(config.assetsPublicPath); + console.dir(config.relativeWidgetPath); if (options.watch) { - await tasks.watch({ project, bundles, logger, root }); + await tasks.watch({ config, bundles, logger, root }); } else { - await tasks.build({ project, bundles, logger, root }); + await tasks.build({ config, bundles, logger, root }); } } catch (error) { logger.error(error); @@ -63,13 +64,13 @@ export async function build(root: string | undefined, options: BuildCommandOptio interface TaskParams { root: string; bundles: BuildOptions[]; - project: ProjectConfig; + config: ProjectConfig; logger: ConsolaInstance; } const tasks = { async build(params: TaskParams): Promise { - const { project, bundles, logger } = params; + const { config, bundles, logger } = params; buildMeasure.start(); for (const bundle of bundles) { @@ -82,6 +83,11 @@ const tasks = { const buildInfo = buildMeasure.end(); logger.success("Done in", green(ms(buildInfo.duration))); + + const projectPath = await config.getProjectPath(); + if (projectPath) { + await deployToMxProject(config, projectPath); + } }, async watch(params: TaskParams): Promise { const { root, bundles, logger } = params; @@ -127,11 +133,11 @@ const tasks = { logger.log("Build watcher stopped"); }); }, - async copyPackageFiles({ project }: TaskParams): Promise { + async copyPackageFiles({ config }: TaskParams): Promise { const stream = fg.stream(PACKAGE_FILES); for await (const src of stream) { const f = path.parse(src as string); - const dst = path.join(project.outputDirs.contentRoot, f.base); + const dst = path.join(config.outputDirs.contentRoot, f.base); await fs.cp(src as string, dst, { recursive: true @@ -140,7 +146,7 @@ const tasks = { }, /** Watch & copy static package files */ async watchPackageFiles(params: TaskParams): Promise { - const { project, logger } = params; + const { config, logger } = params; await tasks.copyPackageFiles(params); @@ -148,7 +154,7 @@ const tasks = { watcher.on("change", async file => { logger.info(formatMsg.copy(file)); const f = path.parse(file); - const dst = path.join(project.outputDirs.contentRoot, f.base); + const dst = path.join(config.outputDirs.contentRoot, f.base); await fs.cp(file, dst); }); @@ -158,9 +164,9 @@ const tasks = { }, /** Setup package content watcher to build mpk whenever package files change */ async watchPackageContent(params: TaskParams): Promise { - const { project } = params; + const { config } = params; await tasks.buildMpk({ ...params, quiet: true }); - const watcher = chokidar.watch(project.outputDirs.contentRoot); + const watcher = chokidar.watch(config.outputDirs.contentRoot); let debounceTimer: NodeJS.Timeout | null = null; @@ -179,7 +185,7 @@ const tasks = { watcher.close(); }); }, - async buildMpk({ project, logger, quiet = false }: TaskParams & { quiet?: boolean }): Promise { + async buildMpk({ config: project, logger, quiet = false }: TaskParams & { quiet?: boolean }): Promise { await createMPK(project.outputDirs.contentRoot, project.outputFiles.mpk); const mpkStat = await fs.stat(project.outputFiles.mpk); if (!quiet) { diff --git a/packages/mpx/src/utils/fs.ts b/packages/mpx/src/utils/fs.ts index 2efae808..607f5a11 100644 --- a/packages/mpx/src/utils/fs.ts +++ b/packages/mpx/src/utils/fs.ts @@ -15,12 +15,12 @@ export async function isTypeScriptProject(root: string): Promise { return access(path.resolve(root, "tsconfig.json")); } -export async function hasEditorConfig(project: ProjectConfig): Promise { - return access(path.resolve(project.inputFiles.editorConfig)); +export async function hasEditorConfig(config: ProjectConfig): Promise { + return access(path.resolve(config.inputFiles.editorConfig)); } -export async function hasEditorPreview(project: ProjectConfig): Promise { - return access(path.resolve(project.inputFiles.editorPreview)); +export async function hasEditorPreview(config: ProjectConfig): Promise { + return access(path.resolve(config.inputFiles.editorPreview)); } export async function readPackageJson(root: string): Promise { @@ -31,3 +31,16 @@ export async function readPackageJson(root: string): Promise { throw parsePackageError(error); } } + +export async function deployToMxProject(config: ProjectConfig, projectPath: string): Promise { + const mpkDst = path.join(projectPath, "widgets"); + const widgetDst = path.join(projectPath, "deployment", "web", "widgets", config.relativeWidgetPath); + + await fs.mkdir(widgetDst, { recursive: true }); + await fs.mkdir(mpkDst, { recursive: true }); + // Copy widget assets to deployment + // Note: in pwt we copy all files (including xml) which probably not needed + await fs.cp(config.outputDirs.widgetDir, widgetDst, { recursive: true, force: true }); + // Copy mpk to "widgets" directory + await fs.cp(config.outputDirs.mpkDir, mpkDst, { recursive: true, force: true }); +} diff --git a/packages/mpx/src/utils/project-config.ts b/packages/mpx/src/utils/project-config.ts index 883ab45d..dd23c6c6 100644 --- a/packages/mpx/src/utils/project-config.ts +++ b/packages/mpx/src/utils/project-config.ts @@ -97,19 +97,19 @@ export class ProjectConfig { return { editorConfig, editorPreview, packageXml, widgetFile, widgetXml }; } - /** Directory where widget bundles will be output */ - get widgetDir(): string { - const { pkg, contentRoot } = this; - return path.join(contentRoot, ...pkg.packagePath.split("."), pkg.widgetName.toLowerCase()); + /** Relative path to the widget directory from the "widgets" */ + get relativeWidgetPath(): string { + return path.join(...this.pkg.packagePath.split("."), this.pkg.widgetName.toLowerCase()); } get outputDirs(): BundleOutputDirs { + const widgetDir = path.join(this.contentRoot, this.relativeWidgetPath); return { dist: this.dist, mpkDir: path.join(this.dist, this.pkg.version), contentRoot: this.contentRoot, - widgetDir: this.widgetDir, - widgetAssetsDir: path.join(this.widgetDir, "assets") + widgetDir, + widgetAssetsDir: path.join(widgetDir, "assets") }; } From 49248ebdb6cbf4f97b0cc4e38bb6091d4ec4c36a Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Tue, 15 Jul 2025 19:04:33 +0200 Subject: [PATCH 25/45] chore: easy config print --- packages/mpx/src/build.ts | 14 +++++--------- packages/mpx/src/utils/project-config.ts | 16 ++++++++++++++++ 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 2466a6d0..2aefe8b3 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -45,11 +45,7 @@ export async function build(root: string | undefined, options: BuildCommandOptio const bundles = await loadConfig(config); await fs.rm(config.outputDirs.dist, { recursive: true, force: true }); - console.dir(config.inputFiles); - console.dir(config.outputDirs); - console.dir(config.outputFiles); - console.dir(config.assetsPublicPath); - console.dir(config.relativeWidgetPath); + console.dir(await config.toPlainObject(), { depth: 3 }); if (options.watch) { await tasks.watch({ config, bundles, logger, root }); } else { @@ -185,11 +181,11 @@ const tasks = { watcher.close(); }); }, - async buildMpk({ config: project, logger, quiet = false }: TaskParams & { quiet?: boolean }): Promise { - await createMPK(project.outputDirs.contentRoot, project.outputFiles.mpk); - const mpkStat = await fs.stat(project.outputFiles.mpk); + async buildMpk({ config, logger, quiet = false }: TaskParams & { quiet?: boolean }): Promise { + await createMPK(config.outputDirs.contentRoot, config.outputFiles.mpk); + const mpkStat = await fs.stat(config.outputFiles.mpk); if (!quiet) { - logger.success(formatMsg.builtSize(project.outputFiles.mpk, mpkStat.size)); + logger.success(formatMsg.builtSize(config.outputFiles.mpk, mpkStat.size)); } } }; diff --git a/packages/mpx/src/utils/project-config.ts b/packages/mpx/src/utils/project-config.ts index dd23c6c6..359a4532 100644 --- a/packages/mpx/src/utils/project-config.ts +++ b/packages/mpx/src/utils/project-config.ts @@ -173,4 +173,20 @@ export class ProjectConfig { return projectPath; } } + + async toPlainObject(): Promise> { + const projectPath = await this.getProjectPath(); + return { + dist: this.dist, + contentRoot: this.contentRoot, + pkg: this.pkg, + isTsProject: this.isTsProject, + projectPath, + inputFiles: this.inputFiles, + outputDirs: this.outputDirs, + outputFiles: this.outputFiles, + assetsPublicPath: this.assetsPublicPath, + relativeWidgetPath: this.relativeWidgetPath + }; + } } From ab70a681115882d4ad8c97a933a6f9718a003cfc Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Wed, 16 Jul 2025 09:34:46 +0200 Subject: [PATCH 26/45] chore: change how config is created --- packages/mpx/src/build.ts | 14 ++++---- packages/mpx/src/utils/project-config.ts | 46 ++++++++++++------------ 2 files changed, 30 insertions(+), 30 deletions(-) diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 2aefe8b3..2569f58f 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -32,20 +32,19 @@ export async function build(root: string | undefined, options: BuildCommandOptio const [pkg, isTsProject] = await Promise.all([readPackageJson(root), isTypeScriptProject(root)]); - const config = new ProjectConfig({ + const config = await ProjectConfig.create({ pkg, isTsProject }); - const projectPath = await config.getProjectPath(); - if (projectPath) { - logger.info(formatMsg.mxpath(projectPath)); + if (config.projectPath) { + logger.info(formatMsg.mxpath(config.projectPath)); } const bundles = await loadConfig(config); await fs.rm(config.outputDirs.dist, { recursive: true, force: true }); - console.dir(await config.toPlainObject(), { depth: 3 }); + console.dir(config.toPlainObject(), { depth: 3 }); if (options.watch) { await tasks.watch({ config, bundles, logger, root }); } else { @@ -80,9 +79,8 @@ const tasks = { const buildInfo = buildMeasure.end(); logger.success("Done in", green(ms(buildInfo.duration))); - const projectPath = await config.getProjectPath(); - if (projectPath) { - await deployToMxProject(config, projectPath); + if (config.projectPath) { + await deployToMxProject(config, config.projectPath); } }, async watch(params: TaskParams): Promise { diff --git a/packages/mpx/src/utils/project-config.ts b/packages/mpx/src/utils/project-config.ts index 359a4532..cbe65c54 100644 --- a/packages/mpx/src/utils/project-config.ts +++ b/packages/mpx/src/utils/project-config.ts @@ -37,7 +37,7 @@ interface ProjectConfigInputs { } export class ProjectConfig { - #projectPath: string | undefined; + readonly projectPath: string | null = null; /** Output directory for built files */ readonly dist = "dist"; /** Package root directory that contains all widget files shipped with mpk */ @@ -46,9 +46,10 @@ export class ProjectConfig { readonly pkg: PackageJson; readonly isTsProject: boolean; - constructor(inputs: ProjectConfigInputs) { + constructor(inputs: ProjectConfigInputs, projectPath: string | null) { this.pkg = inputs.pkg; this.isTsProject = inputs.isTsProject; + this.projectPath = projectPath; } /** Public path (aka base url) for widget assets */ @@ -151,11 +152,22 @@ export class ProjectConfig { }; } - async getProjectPath(): Promise { - if (this.#projectPath) { - return this.#projectPath; - } - const { pkg } = this; + toPlainObject(): Record { + return { + dist: this.dist, + contentRoot: this.contentRoot, + pkg: this.pkg, + isTsProject: this.isTsProject, + projectPath: this.projectPath, + inputFiles: this.inputFiles, + outputDirs: this.outputDirs, + outputFiles: this.outputFiles, + assetsPublicPath: this.assetsPublicPath, + relativeWidgetPath: this.relativeWidgetPath + }; + } + + static async getProjectPath(pkg: PackageJson): Promise { let projectPath = (() => { if (env.MX_PROJECT_PATH) { return env.MX_PROJECT_PATH; @@ -169,24 +181,14 @@ export class ProjectConfig { projectPath = path.resolve(projectPath); if (await access(projectPath)) { - this.#projectPath = projectPath; return projectPath; } + + return null; } - async toPlainObject(): Promise> { - const projectPath = await this.getProjectPath(); - return { - dist: this.dist, - contentRoot: this.contentRoot, - pkg: this.pkg, - isTsProject: this.isTsProject, - projectPath, - inputFiles: this.inputFiles, - outputDirs: this.outputDirs, - outputFiles: this.outputFiles, - assetsPublicPath: this.assetsPublicPath, - relativeWidgetPath: this.relativeWidgetPath - }; + static async create(inputs: ProjectConfigInputs): Promise { + const projectPath = await ProjectConfig.getProjectPath(inputs.pkg); + return new ProjectConfig(inputs, projectPath); } } From 3559b55e7897f477b8a4a76f8f67fe411e8968be Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Wed, 16 Jul 2025 09:52:25 +0200 Subject: [PATCH 27/45] feat: copy all dirs from content root to deployment --- packages/mpx/src/utils/fs.ts | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/packages/mpx/src/utils/fs.ts b/packages/mpx/src/utils/fs.ts index 607f5a11..c146bcbe 100644 --- a/packages/mpx/src/utils/fs.ts +++ b/packages/mpx/src/utils/fs.ts @@ -34,13 +34,18 @@ export async function readPackageJson(root: string): Promise { export async function deployToMxProject(config: ProjectConfig, projectPath: string): Promise { const mpkDst = path.join(projectPath, "widgets"); - const widgetDst = path.join(projectPath, "deployment", "web", "widgets", config.relativeWidgetPath); + const deploymentDir = path.join(projectPath, "deployment", "web", "widgets"); - await fs.mkdir(widgetDst, { recursive: true }); + // Get the list of files in contentRoot + const files = await fs.readdir(config.outputDirs.contentRoot, { withFileTypes: true }); + + // Copy directories from contentRoot to deploymentDir + for (const file of files.filter(file => file.isDirectory())) { + const src = path.join(config.outputDirs.contentRoot, file.name); + const dst = path.join(deploymentDir, file.name); + await fs.cp(src, dst, { recursive: true, force: true }); + } + // Copy MPK file to widgets directory await fs.mkdir(mpkDst, { recursive: true }); - // Copy widget assets to deployment - // Note: in pwt we copy all files (including xml) which probably not needed - await fs.cp(config.outputDirs.widgetDir, widgetDst, { recursive: true, force: true }); - // Copy mpk to "widgets" directory await fs.cp(config.outputDirs.mpkDir, mpkDst, { recursive: true, force: true }); } From bab21679498635752647e4f7b2dfe4a818daf3d8 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Wed, 16 Jul 2025 09:52:36 +0200 Subject: [PATCH 28/45] chore: add readme --- packages/mpx/README.md | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 packages/mpx/README.md diff --git a/packages/mpx/README.md b/packages/mpx/README.md new file mode 100644 index 00000000..5a62aac9 --- /dev/null +++ b/packages/mpx/README.md @@ -0,0 +1,20 @@ +## mpx + +Building widgets with rolldown + +``` +mpx/0.1.0 + +Usage: + $ mpx [options] + +Commands: + build [root] Build widget + +For more info, run any command with the `--help` flag: + $ mpx build --help + +Options: + -h, --help Display this message + -v, --version Display version number +``` From d44f9c47472b35988d40a849972ce9908e50503f Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Wed, 16 Jul 2025 10:07:21 +0200 Subject: [PATCH 29/45] chore: add dotenv --- packages/mpx/package.json | 1 + packages/mpx/pnpm-lock.yaml | 9 +++++++++ packages/mpx/src/build.ts | 7 +++++++ 3 files changed, 17 insertions(+) diff --git a/packages/mpx/package.json b/packages/mpx/package.json index ba1ff135..85727e87 100644 --- a/packages/mpx/package.json +++ b/packages/mpx/package.json @@ -26,6 +26,7 @@ "cac": "^6.7.14", "chokidar": "^4.0.3", "consola": "^3.4.2", + "dotenv": "^17.2.0", "fast-glob": "^3.3.3", "filesize": "^11.0.1", "picocolors": "^1.1.1", diff --git a/packages/mpx/pnpm-lock.yaml b/packages/mpx/pnpm-lock.yaml index 4e268600..2469a7be 100644 --- a/packages/mpx/pnpm-lock.yaml +++ b/packages/mpx/pnpm-lock.yaml @@ -33,6 +33,9 @@ importers: consola: specifier: ^3.4.2 version: 3.4.2 + dotenv: + specifier: ^17.2.0 + version: 17.2.0 fast-glob: specifier: ^3.3.3 version: 3.3.3 @@ -428,6 +431,10 @@ packages: resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} engines: {node: '>= 8'} + dotenv@17.2.0: + resolution: {integrity: sha512-Q4sgBT60gzd0BB0lSyYD3xM4YxrXA9y4uBDof1JNYGzOXrQdQ6yX+7XIAqoFOGQFOTK1D3Hts5OllpxMDZFONQ==} + engines: {node: '>=12'} + eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} @@ -1108,6 +1115,8 @@ snapshots: shebang-command: 2.0.0 which: 2.0.2 + dotenv@17.2.0: {} + eastasianwidth@0.2.0: {} emoji-regex@8.0.0: {} diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 2569f58f..70666cc8 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -1,5 +1,6 @@ import chokidar from "chokidar"; import { ConsolaInstance } from "consola"; +import dotenv from "dotenv"; import fg from "fast-glob"; import { filesize } from "filesize"; import fs from "node:fs/promises"; @@ -21,6 +22,11 @@ interface BuildCommandOptions { minify?: boolean; } +/** + * Build the widget project. + * @param root - Widget directory containing package.json + * @param options - Build options + */ export async function build(root: string | undefined, options: BuildCommandOptions): Promise { options.watch ??= false; options.minify ??= !!env.CI; @@ -29,6 +35,7 @@ export async function build(root: string | undefined, options: BuildCommandOptio try { root = path.resolve(root ?? ""); process.chdir(root); + dotenv.config(); const [pkg, isTsProject] = await Promise.all([readPackageJson(root), isTypeScriptProject(root)]); From 321fd3af5dd1a4e47d5785723d5eaaf4a0f11872 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Wed, 16 Jul 2025 11:00:24 +0200 Subject: [PATCH 30/45] feat: finish load config --- packages/mpx/src/build.ts | 8 ++++---- packages/mpx/src/rolldown.ts | 22 +++++++++++++++++++++- packages/mpx/src/utils/colors.ts | 4 +++- 3 files changed, 28 insertions(+), 6 deletions(-) diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 70666cc8..80497757 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -35,7 +35,7 @@ export async function build(root: string | undefined, options: BuildCommandOptio try { root = path.resolve(root ?? ""); process.chdir(root); - dotenv.config(); + dotenv.config({ quiet: true }); const [pkg, isTsProject] = await Promise.all([readPackageJson(root), isTypeScriptProject(root)]); @@ -48,10 +48,10 @@ export async function build(root: string | undefined, options: BuildCommandOptio logger.info(formatMsg.mxpath(config.projectPath)); } - const bundles = await loadConfig(config); + const bundles = await loadConfig(config, logger); await fs.rm(config.outputDirs.dist, { recursive: true, force: true }); - console.dir(config.toPlainObject(), { depth: 3 }); + // console.dir(config.toPlainObject(), { depth: 3 }); if (options.watch) { await tasks.watch({ config, bundles, logger, root }); } else { @@ -200,7 +200,7 @@ const formatMsg = { builtSize: (file: string, size: number) => `Built ${bold(file)} (${dim(filesize(size, { standard: "jedec" }))})`, rebuilt: (file: string, duration: number) => `Rebuilt ${dim(file)} in ${green(ms(duration))}`, copy: (file: string) => `Copy ${bold(file)}`, - mxpath: (dir: string) => `${inverse(greenBright(bold(" PROJECT PATH ")))}${blue(bold(` ${dir} `))}` + mxpath: (dir: string) => `${inverse(greenBright(bold(" MX PROJECT PATH ")))}${blue(bold(` ${dir} `))}` }; const buildMeasure = { diff --git a/packages/mpx/src/rolldown.ts b/packages/mpx/src/rolldown.ts index 014c942a..882b3e6e 100644 --- a/packages/mpx/src/rolldown.ts +++ b/packages/mpx/src/rolldown.ts @@ -1,7 +1,12 @@ +import { ConsolaInstance } from "consola"; +import fg from "fast-glob"; +import assert from "node:assert"; +import path from "node:path"; import { BuildOptions, RolldownPlugin } from "rolldown"; import { Dependency } from "rollup-plugin-license"; import { STD_EXTERNALS } from "./constants.js"; import { plugins, RollupLicenseOptions, RollupUrlOptions } from "./plugins.js"; +import { bold, green } from "./utils/colors.js"; import { hasEditorConfig, hasEditorPreview } from "./utils/fs.js"; import { ProjectConfig } from "./utils/project-config.js"; @@ -66,7 +71,18 @@ export async function defaultConfig(project: ProjectConfig): Promise { +export async function loadConfig(project: ProjectConfig, logger: ConsolaInstance): Promise { + const [configFile] = await fg(["rollup.config.{js,mjs}"]); + if (configFile) { + logger.info(formatMsg.usingCustomConfig()); + const { default: customConfig } = await import(path.resolve(configFile)); + assert( + typeof customConfig === "function", + `Rollup config error: expected default export to be a function, got ${typeof customConfig}` + ); + const configDefaultConfig = await defaultConfig(project); + return customConfig({ configDefaultConfig }); + } return defaultConfig(project); } @@ -125,3 +141,7 @@ export const licenseCustomTemplate = (dependencies: Dependency[]) => }; }) ); + +const formatMsg = { + usingCustomConfig: () => green(bold(`Loading custom rollup config...`)) +}; diff --git a/packages/mpx/src/utils/colors.ts b/packages/mpx/src/utils/colors.ts index 75278cb5..c9212377 100644 --- a/packages/mpx/src/utils/colors.ts +++ b/packages/mpx/src/utils/colors.ts @@ -22,5 +22,7 @@ export const { bgGreenBright, bgBlackBright, bgBlack, - white + white, + magenta, + magentaBright } = pc.createColors(env.FORCE_COLOR !== "0" && !env.NO_COLOR); From 2e4f6e5754d38232cbfebfcafca657166cacbc34 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Wed, 16 Jul 2025 11:10:27 +0200 Subject: [PATCH 31/45] refactor: change naming --- packages/mpx/src/rolldown.ts | 57 ++++++++++++++++++------------------ 1 file changed, 29 insertions(+), 28 deletions(-) diff --git a/packages/mpx/src/rolldown.ts b/packages/mpx/src/rolldown.ts index 882b3e6e..c1ccf33f 100644 --- a/packages/mpx/src/rolldown.ts +++ b/packages/mpx/src/rolldown.ts @@ -10,61 +10,62 @@ import { bold, green } from "./utils/colors.js"; import { hasEditorConfig, hasEditorPreview } from "./utils/fs.js"; import { ProjectConfig } from "./utils/project-config.js"; -export async function defaultConfig(project: ProjectConfig): Promise { +export async function defaultConfig(config: ProjectConfig): Promise { const esmBundle = { - input: project.inputFiles.widgetFile, + input: config.inputFiles.widgetFile, external: [...STD_EXTERNALS], - plugins: stdPlugins(project), + plugins: stdPlugins(config), platform: "browser", output: { - file: project.outputFiles.esm, + file: config.outputFiles.esm, format: "esm" } } satisfies BuildOptions; const umdBundle = { - input: project.inputFiles.widgetFile, + input: config.inputFiles.widgetFile, external: [...STD_EXTERNALS], - plugins: stdPlugins(project), + plugins: stdPlugins(config), platform: "browser", output: { - file: project.outputFiles.umd, + file: config.outputFiles.umd, format: "umd", - name: `${project.pkg.packagePath}.${project.pkg.widgetName}`, + name: `${config.pkg.packagePath}.${config.pkg.widgetName}`, globals: { "react/jsx-runtime": "react_jsx_runtime" } } } satisfies BuildOptions; - const editorConfigBundle = { - input: project.inputFiles.editorConfig, - output: { - file: project.outputFiles.editorConfig, - format: "commonjs" - } - } satisfies BuildOptions; - - const editorPreviewBundle = { - input: project.inputFiles.editorPreview, - output: { - file: project.outputFiles.editorPreview, - format: "commonjs" - } - } satisfies BuildOptions; - const bundles: BuildOptions[] = [esmBundle, umdBundle]; - const [addEditorConfig, addEditorPreview] = await Promise.all([ - hasEditorConfig(project), - hasEditorPreview(project) - ]); + const [addEditorConfig, addEditorPreview] = await Promise.all([hasEditorConfig(config), hasEditorPreview(config)]); if (addEditorConfig) { + const editorConfigBundle = { + input: config.inputFiles.editorConfig, + external: [...STD_EXTERNALS], + treeshake: { moduleSideEffects: false }, + output: { + file: config.outputFiles.editorConfig, + format: "commonjs" + } + } satisfies BuildOptions; + bundles.push(editorConfigBundle); } if (addEditorPreview) { + const editorPreviewBundle = { + input: config.inputFiles.editorPreview, + external: [...STD_EXTERNALS], + platform: "browser", + output: { + file: config.outputFiles.editorPreview, + format: "commonjs" + } + } satisfies BuildOptions; + bundles.push(editorPreviewBundle); } From dcdf05841f5696de7c34cd24c04ecc5359e9732c Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Wed, 16 Jul 2025 12:26:44 +0200 Subject: [PATCH 32/45] feat: add split for web and native --- packages/mpx/README.md | 7 + packages/mpx/src/build.ts | 26 ++-- packages/mpx/src/cli.ts | 6 +- .../mpx/src/{rolldown.ts => rolldown.web.ts} | 60 ++------- packages/mpx/src/utils/fs.ts | 32 ++++- packages/mpx/src/utils/helpers.ts | 21 +++ packages/mpx/src/utils/project-config.ts | 127 +++++++++++------- 7 files changed, 164 insertions(+), 115 deletions(-) rename packages/mpx/src/{rolldown.ts => rolldown.web.ts} (59%) create mode 100644 packages/mpx/src/utils/helpers.ts diff --git a/packages/mpx/README.md b/packages/mpx/README.md index 5a62aac9..069700d7 100644 --- a/packages/mpx/README.md +++ b/packages/mpx/README.md @@ -18,3 +18,10 @@ Options: -h, --help Display this message -v, --version Display version number ``` + +``` +TODO: +- MPKOUTPUT +- mpkName +`` +``` diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 80497757..025fcb5d 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -5,21 +5,21 @@ import fg from "fast-glob"; import { filesize } from "filesize"; import fs from "node:fs/promises"; import path from "node:path"; -import { env } from "node:process"; import ms from "pretty-ms"; import { BuildOptions, build as buildBundle, watch } from "rolldown"; import { onExit } from "signal-exit"; import { PACKAGE_FILES } from "./constants.js"; -import { loadConfig } from "./rolldown.js"; +import * as bundlesWeb from "./rolldown.web.js"; import { blue, bold, dim, green, greenBright, inverse } from "./utils/colors.js"; import { deployToMxProject, isTypeScriptProject, readPackageJson } from "./utils/fs.js"; import { createLogger } from "./utils/logger.js"; import { createMPK } from "./utils/mpk.js"; -import { ProjectConfig } from "./utils/project-config.js"; +import { ProjectConfig, ProjectConfigWeb } from "./utils/project-config.js"; interface BuildCommandOptions { watch?: boolean; minify?: boolean; + platform?: "web" | "node"; } /** @@ -28,9 +28,6 @@ interface BuildCommandOptions { * @param options - Build options */ export async function build(root: string | undefined, options: BuildCommandOptions): Promise { - options.watch ??= false; - options.minify ??= !!env.CI; - const logger: ConsolaInstance = createLogger(); try { root = path.resolve(root ?? ""); @@ -39,17 +36,20 @@ export async function build(root: string | undefined, options: BuildCommandOptio const [pkg, isTsProject] = await Promise.all([readPackageJson(root), isTypeScriptProject(root)]); - const config = await ProjectConfig.create({ - pkg, - isTsProject - }); + let config: ProjectConfig; + let bundles: BuildOptions[]; + + if (options.platform === "web") { + config = await ProjectConfigWeb.create({ pkg, isTsProject }); + bundles = await bundlesWeb.loadConfig(config as ProjectConfigWeb, logger); + } else { + throw new Error(`Build for native is not implemented yet`); + } if (config.projectPath) { logger.info(formatMsg.mxpath(config.projectPath)); } - const bundles = await loadConfig(config, logger); - await fs.rm(config.outputDirs.dist, { recursive: true, force: true }); // console.dir(config.toPlainObject(), { depth: 3 }); if (options.watch) { @@ -87,7 +87,7 @@ const tasks = { logger.success("Done in", green(ms(buildInfo.duration))); if (config.projectPath) { - await deployToMxProject(config, config.projectPath); + await deployToMxProject(config, config.projectPath, config.deploymentPath); } }, async watch(params: TaskParams): Promise { diff --git a/packages/mpx/src/cli.ts b/packages/mpx/src/cli.ts index 86d518c8..38819558 100644 --- a/packages/mpx/src/cli.ts +++ b/packages/mpx/src/cli.ts @@ -1,6 +1,7 @@ #!/usr/bin/env node import { cac } from "cac"; +import { env } from "node:process"; import { build } from "./build.js"; import { VERSION } from "./constants.js"; @@ -8,8 +9,9 @@ const name = "mpx"; const cli = cac(name); cli.command("build [root]", "Build widget") - .option("-w, --watch", "watch for changes and rebuild") - .option("-m, --minify", "minify the output (this option is on in CI environment)") + .option("-w, --watch", "watch for changes and rebuild", { default: false }) + .option("-m, --minify", "minify the output (this option is 'on' in CI environment)", { default: Boolean(env.CI) }) + .option("-p, --platform ", "build platform (web or node)", { default: "web" }) .action(build); cli.help(); diff --git a/packages/mpx/src/rolldown.ts b/packages/mpx/src/rolldown.web.ts similarity index 59% rename from packages/mpx/src/rolldown.ts rename to packages/mpx/src/rolldown.web.ts index c1ccf33f..abf3b101 100644 --- a/packages/mpx/src/rolldown.ts +++ b/packages/mpx/src/rolldown.web.ts @@ -1,21 +1,16 @@ import { ConsolaInstance } from "consola"; -import fg from "fast-glob"; -import assert from "node:assert"; -import path from "node:path"; import { BuildOptions, RolldownPlugin } from "rolldown"; -import { Dependency } from "rollup-plugin-license"; import { STD_EXTERNALS } from "./constants.js"; import { plugins, RollupLicenseOptions, RollupUrlOptions } from "./plugins.js"; -import { bold, green } from "./utils/colors.js"; -import { hasEditorConfig, hasEditorPreview } from "./utils/fs.js"; -import { ProjectConfig } from "./utils/project-config.js"; +import { hasEditorConfig, hasEditorPreview, loadCustomConfigFactory } from "./utils/fs.js"; +import { licenseCustomTemplate } from "./utils/helpers.js"; +import { ProjectConfigWeb } from "./utils/project-config.js"; -export async function defaultConfig(config: ProjectConfig): Promise { +export async function defaultConfig(config: ProjectConfigWeb): Promise { const esmBundle = { input: config.inputFiles.widgetFile, external: [...STD_EXTERNALS], plugins: stdPlugins(config), - platform: "browser", output: { file: config.outputFiles.esm, format: "esm" @@ -26,7 +21,6 @@ export async function defaultConfig(config: ProjectConfig): Promise { - const [configFile] = await fg(["rollup.config.{js,mjs}"]); - if (configFile) { - logger.info(formatMsg.usingCustomConfig()); - const { default: customConfig } = await import(path.resolve(configFile)); - assert( - typeof customConfig === "function", - `Rollup config error: expected default export to be a function, got ${typeof customConfig}` - ); - const configDefaultConfig = await defaultConfig(project); - return customConfig({ configDefaultConfig }); +export async function loadConfig(config: ProjectConfigWeb, logger: ConsolaInstance): Promise { + const [configFactory, configDefaultConfig] = await Promise.all([ + loadCustomConfigFactory(logger), + defaultConfig(config) + ]); + if (configFactory) { + return configFactory({ configDefaultConfig }); } - return defaultConfig(project); + return configDefaultConfig; } -function stdPlugins(project: ProjectConfig): RolldownPlugin[] { +function stdPlugins(project: ProjectConfigWeb): RolldownPlugin[] { const { url, image, license } = plugins; const urlOptions: RollupUrlOptions = { @@ -123,26 +112,3 @@ function stdPlugins(project: ProjectConfig): RolldownPlugin[] { return [url(urlOptions), image(), license(licenseOptions)]; } - -export const licenseCustomTemplate = (dependencies: Dependency[]) => - JSON.stringify( - dependencies.map(dependency => { - const repoUrl = - typeof dependency.repository === "string" - ? dependency.repository - : dependency.repository instanceof Object - ? dependency.repository.url - : undefined; - - return { - [dependency.name!]: { - version: dependency.version, - url: dependency.homepage ?? repoUrl - } - }; - }) - ); - -const formatMsg = { - usingCustomConfig: () => green(bold(`Loading custom rollup config...`)) -}; diff --git a/packages/mpx/src/utils/fs.ts b/packages/mpx/src/utils/fs.ts index c146bcbe..11d76ebb 100644 --- a/packages/mpx/src/utils/fs.ts +++ b/packages/mpx/src/utils/fs.ts @@ -1,5 +1,10 @@ +import { ConsolaInstance } from "consola"; +import fg from "fast-glob"; +import assert from "node:assert"; import fs from "node:fs/promises"; import path from "node:path"; +import { BuildOptions } from "rolldown"; +import { bold, green } from "./colors.js"; import { parsePackageError } from "./error.js"; import { PackageJson } from "./parsers/PackageJson.js"; import { ProjectConfig } from "./project-config.js"; @@ -32,7 +37,11 @@ export async function readPackageJson(root: string): Promise { } } -export async function deployToMxProject(config: ProjectConfig, projectPath: string): Promise { +export async function deployToMxProject( + config: ProjectConfig, + projectPath: string, + deploymentPath: string[] +): Promise { const mpkDst = path.join(projectPath, "widgets"); const deploymentDir = path.join(projectPath, "deployment", "web", "widgets"); @@ -49,3 +58,24 @@ export async function deployToMxProject(config: ProjectConfig, projectPath: stri await fs.mkdir(mpkDst, { recursive: true }); await fs.cp(config.outputDirs.mpkDir, mpkDst, { recursive: true, force: true }); } + +type ConfigFactory = (args: { configDefaultConfig: BuildOptions[] }) => Promise; + +export async function loadCustomConfigFactory(logger: ConsolaInstance): Promise { + const [configFile] = await fg(["rollup.config.{js,mjs}"]); + if (configFile) { + logger.info(formatMsg.usingCustomConfig()); + const { default: customConfig } = await import(path.resolve(configFile)); + assert( + typeof customConfig === "function", + `Rollup config error: expected default export to be a function, got ${typeof customConfig}` + ); + + return customConfig; + } + return null; +} + +const formatMsg = { + usingCustomConfig: () => green(bold(`Loading custom rollup config...`)) +}; diff --git a/packages/mpx/src/utils/helpers.ts b/packages/mpx/src/utils/helpers.ts new file mode 100644 index 00000000..61eb613b --- /dev/null +++ b/packages/mpx/src/utils/helpers.ts @@ -0,0 +1,21 @@ +import { Dependency } from "rollup-plugin-license"; + +export function licenseCustomTemplate(dependencies: Dependency[]) { + return JSON.stringify( + dependencies.map(dependency => { + const repoUrl = + typeof dependency.repository === "string" + ? dependency.repository + : dependency.repository instanceof Object + ? dependency.repository.url + : undefined; + + return { + [dependency.name!]: { + version: dependency.version, + url: dependency.homepage ?? repoUrl + } + }; + }) + ); +} diff --git a/packages/mpx/src/utils/project-config.ts b/packages/mpx/src/utils/project-config.ts index cbe65c54..44386d95 100644 --- a/packages/mpx/src/utils/project-config.ts +++ b/packages/mpx/src/utils/project-config.ts @@ -36,29 +36,39 @@ interface ProjectConfigInputs { isTsProject: boolean; } -export class ProjectConfig { +export abstract class ProjectConfig { readonly projectPath: string | null = null; + /** Output directory for built files */ readonly dist = "dist"; /** Package root directory that contains all widget files shipped with mpk */ readonly contentRoot = path.join(this.dist, "tmp", "widgets"); /** Widget package.json */ readonly pkg: PackageJson; + readonly isTsProject: boolean; - constructor(inputs: ProjectConfigInputs, projectPath: string | null) { + readonly platform: "web" | "native"; + + readonly deploymentPath: string[]; + + constructor( + inputs: ProjectConfigInputs & { + projectPath: string | null; + platform: "web" | "native"; + deploymentPath: string[]; + } + ) { + this.projectPath = inputs.projectPath; this.pkg = inputs.pkg; this.isTsProject = inputs.isTsProject; - this.projectPath = projectPath; + this.platform = inputs.platform; + this.deploymentPath = inputs.deploymentPath; } - /** Public path (aka base url) for widget assets */ - get assetsPublicPath(): string { - const { - pkg: { packagePath, widgetName } - } = this; - const publicPath = ["widgets", ...packagePath.split("."), widgetName.toLowerCase(), "assets"].join("/"); - return `${publicPath}/`; + /** Relative path to the widget directory from the "widgets" */ + get relativeWidgetPath(): string { + return path.join(...this.pkg.packagePath.split("."), this.pkg.widgetName.toLowerCase()); } get inputFiles(): BundleInputFiles { @@ -98,11 +108,6 @@ export class ProjectConfig { return { editorConfig, editorPreview, packageXml, widgetFile, widgetXml }; } - /** Relative path to the widget directory from the "widgets" */ - get relativeWidgetPath(): string { - return path.join(...this.pkg.packagePath.split("."), this.pkg.widgetName.toLowerCase()); - } - get outputDirs(): BundleOutputDirs { const widgetDir = path.join(this.contentRoot, this.relativeWidgetPath); return { @@ -114,6 +119,59 @@ export class ProjectConfig { }; } + get outputFiles(): BundleOutputFiles { + throw new Error("Method 'outputFiles' must be implemented."); + } + + toPlainObject(): Record { + return { + dist: this.dist, + contentRoot: this.contentRoot, + pkg: this.pkg, + isTsProject: this.isTsProject, + projectPath: this.projectPath, + inputFiles: this.inputFiles, + outputDirs: this.outputDirs, + outputFiles: this.outputFiles, + relativeWidgetPath: this.relativeWidgetPath + }; + } + + static async getProjectPath(pkg: PackageJson): Promise { + let projectPath = (() => { + if (env.MX_PROJECT_PATH) { + return env.MX_PROJECT_PATH; + } + if (pkg.config?.projectPath) { + return pkg.config.projectPath; + } + + return path.join("tests", "testProject"); + })(); + projectPath = path.resolve(projectPath); + + if (await access(projectPath)) { + return projectPath; + } + + return null; + } +} + +export class ProjectConfigWeb extends ProjectConfig { + constructor(inputs: ProjectConfigInputs, projectPath: string | null) { + super({ ...inputs, projectPath, platform: "web", deploymentPath: ["deployment", "web", "widgets"] }); + } + + /** Public path (aka base url) for widget assets */ + get assetsPublicPath(): string { + const { + pkg: { packagePath, widgetName } + } = this; + const publicPath = ["widgets", ...packagePath.split("."), widgetName.toLowerCase(), "assets"].join("/"); + return `${publicPath}/`; + } + get outputFiles(): BundleOutputFiles { const { pkg, outputDirs } = this; return { @@ -152,43 +210,8 @@ export class ProjectConfig { }; } - toPlainObject(): Record { - return { - dist: this.dist, - contentRoot: this.contentRoot, - pkg: this.pkg, - isTsProject: this.isTsProject, - projectPath: this.projectPath, - inputFiles: this.inputFiles, - outputDirs: this.outputDirs, - outputFiles: this.outputFiles, - assetsPublicPath: this.assetsPublicPath, - relativeWidgetPath: this.relativeWidgetPath - }; - } - - static async getProjectPath(pkg: PackageJson): Promise { - let projectPath = (() => { - if (env.MX_PROJECT_PATH) { - return env.MX_PROJECT_PATH; - } - if (pkg.config?.projectPath) { - return pkg.config.projectPath; - } - - return path.join("tests", "testProject"); - })(); - projectPath = path.resolve(projectPath); - - if (await access(projectPath)) { - return projectPath; - } - - return null; - } - - static async create(inputs: ProjectConfigInputs): Promise { + static async create(inputs: ProjectConfigInputs): Promise { const projectPath = await ProjectConfig.getProjectPath(inputs.pkg); - return new ProjectConfig(inputs, projectPath); + return new ProjectConfigWeb(inputs, projectPath); } } From f743d87ba5e3fa5afb13da5308d2022e14b78595 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Wed, 16 Jul 2025 12:53:50 +0200 Subject: [PATCH 33/45] chore: change cli usage --- packages/mpx/src/cli.ts | 25 ++++++++++--------------- 1 file changed, 10 insertions(+), 15 deletions(-) diff --git a/packages/mpx/src/cli.ts b/packages/mpx/src/cli.ts index 38819558..19826767 100644 --- a/packages/mpx/src/cli.ts +++ b/packages/mpx/src/cli.ts @@ -7,28 +7,23 @@ import { VERSION } from "./constants.js"; const name = "mpx"; const cli = cac(name); +cli.usage( + `[options] [dir] \n\nBuild the widget in the specified directory. If the directory is omitted, use the current directory.` +); -cli.command("build [root]", "Build widget") - .option("-w, --watch", "watch for changes and rebuild", { default: false }) +cli.option("-w, --watch", "watch for changes and rebuild", { default: false }) .option("-m, --minify", "minify the output (this option is 'on' in CI environment)", { default: Boolean(env.CI) }) - .option("-p, --platform ", "build platform (web or node)", { default: "web" }) - .action(build); + .option("-p, --platform ", "build platform (web or node)", { default: "web" }); cli.help(); cli.version(VERSION); -cli.on("command:*", () => { - console.error(`Unknown command: "%s"`, cli.args.join(" ")); - console.error(`See '${name} --help' for a list of available commands.`); - process.exit(1); -}); +const { + args: [root], + options +} = cli.parse(); -cli.parse(); - -if (process.argv.length <= 2) { - cli.outputHelp(); - process.exit(1); -} +build(root, options); process.on("uncaughtException", error => { console.error("Uncaught Exception:", error.message); From 2172b2d8c4fabed080d24b7c6147007148969aaa Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Wed, 16 Jul 2025 14:28:09 +0200 Subject: [PATCH 34/45] chore: some cli fixes --- packages/mpx/src/build.ts | 2 +- packages/mpx/src/cli.ts | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 025fcb5d..d9b4a113 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -176,7 +176,7 @@ const tasks = { clearTimeout(debounceTimer); } - debounceTimer = setTimeout(() => tasks.buildMpk(params), 30); + debounceTimer = setTimeout(() => tasks.buildMpk(params), 150); }); onExit(() => { diff --git a/packages/mpx/src/cli.ts b/packages/mpx/src/cli.ts index 19826767..836415d9 100644 --- a/packages/mpx/src/cli.ts +++ b/packages/mpx/src/cli.ts @@ -23,6 +23,10 @@ const { options } = cli.parse(); +if (options.help || options.version) { + process.exit(0); +} + build(root, options); process.on("uncaughtException", error => { From 3ad689db5a333d6810a5a83c3f104006ef25aca1 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Wed, 16 Jul 2025 15:58:36 +0200 Subject: [PATCH 35/45] feat: add typings generation --- packages/mpx/package.json | 1 + packages/mpx/pnpm-lock.yaml | 23 +++++++++++++++++++++++ packages/mpx/src/build.ts | 30 +++++++++++++++++++++++++++++- packages/mpx/src/constants.ts | 4 +++- packages/mpx/src/rolldown.web.ts | 27 +++++++++++++++++++-------- packages/mpx/tsconfig.json | 1 + 6 files changed, 76 insertions(+), 10 deletions(-) diff --git a/packages/mpx/package.json b/packages/mpx/package.json index 85727e87..4bab5ae4 100644 --- a/packages/mpx/package.json +++ b/packages/mpx/package.json @@ -36,6 +36,7 @@ "rollup-plugin-license": "^3.6.0", "signal-exit": "^4.1.0", "typescript": "^5.8.3", + "xml2js": "^0.6.2", "zip-a-folder": "^3.1.9", "zod": "^4.0.5" }, diff --git a/packages/mpx/pnpm-lock.yaml b/packages/mpx/pnpm-lock.yaml index 2469a7be..f6c8cf5f 100644 --- a/packages/mpx/pnpm-lock.yaml +++ b/packages/mpx/pnpm-lock.yaml @@ -63,6 +63,9 @@ importers: typescript: specifier: ^5.8.3 version: 5.8.3 + xml2js: + specifier: ^0.6.2 + version: 0.6.2 zip-a-folder: specifier: ^3.1.9 version: 3.1.9 @@ -706,6 +709,9 @@ packages: safe-buffer@5.2.1: resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + sax@1.4.1: + resolution: {integrity: sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg==} + semver@6.3.1: resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} hasBin: true @@ -805,6 +811,14 @@ packages: resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} engines: {node: '>=12'} + xml2js@0.6.2: + resolution: {integrity: sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==} + engines: {node: '>=4.0.0'} + + xmlbuilder@11.0.1: + resolution: {integrity: sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==} + engines: {node: '>=4.0'} + zip-a-folder@3.1.9: resolution: {integrity: sha512-0TPP3eK5mbZxHnOE8w/Jg6gwxsxZOrA3hXHMfC3I4mcTvyJwNt7GZP8i6uiAMVNu43QTmVz0ngEMKcjgpLZLmQ==} @@ -1394,6 +1408,8 @@ snapshots: safe-buffer@5.2.1: {} + sax@1.4.1: {} + semver@6.3.1: {} shebang-command@2.0.0: @@ -1505,6 +1521,13 @@ snapshots: string-width: 5.1.2 strip-ansi: 7.1.0 + xml2js@0.6.2: + dependencies: + sax: 1.4.1 + xmlbuilder: 11.0.1 + + xmlbuilder@11.0.1: {} + zip-a-folder@3.1.9: dependencies: archiver: 7.0.1 diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index d9b4a113..39e27c18 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -8,7 +8,8 @@ import path from "node:path"; import ms from "pretty-ms"; import { BuildOptions, build as buildBundle, watch } from "rolldown"; import { onExit } from "signal-exit"; -import { PACKAGE_FILES } from "./constants.js"; +import { transformPackage } from "../../pluggable-widgets-tools/src/typings-generator/index.js"; +import { PACKAGE_FILES, XML_FILES } from "./constants.js"; import * as bundlesWeb from "./rolldown.web.js"; import { blue, bold, dim, green, greenBright, inverse } from "./utils/colors.js"; import { deployToMxProject, isTypeScriptProject, readPackageJson } from "./utils/fs.js"; @@ -73,7 +74,11 @@ interface TaskParams { const tasks = { async build(params: TaskParams): Promise { const { config, bundles, logger } = params; + buildMeasure.start(); + if (config.isTsProject) { + await tasks.generateTypings(params); + } for (const bundle of bundles) { await buildBundle(bundle); @@ -126,6 +131,9 @@ const tasks = { await bundleWatchReady; await tasks.watchPackageFiles(params); await tasks.watchPackageContent(params); + if (params.config.isTsProject) { + await tasks.watchTypings(params); + } logger.info("Waiting for changes..."); onExit(() => { @@ -192,6 +200,25 @@ const tasks = { if (!quiet) { logger.success(formatMsg.builtSize(config.outputFiles.mpk, mpkStat.size)); } + }, + async generateTypings({ config }: TaskParams): Promise { + const packageXml = await fs.readFile(config.inputFiles.packageXml, { encoding: "utf8" }); + const src = path.dirname(config.inputFiles.packageXml); + await transformPackage(packageXml, src); + }, + async watchTypings(params: TaskParams): Promise { + await tasks.generateTypings(params); + + const watcher = chokidar.watch(await fg(XML_FILES)); + + watcher.on("change", async () => { + await tasks.generateTypings(params); + params.logger.info(formatMsg.rebuiltTypings()); + }); + + onExit(() => { + watcher.close(); + }); } }; @@ -199,6 +226,7 @@ const formatMsg = { built: (file: string) => `Built ${bold(file)}`, builtSize: (file: string, size: number) => `Built ${bold(file)} (${dim(filesize(size, { standard: "jedec" }))})`, rebuilt: (file: string, duration: number) => `Rebuilt ${dim(file)} in ${green(ms(duration))}`, + rebuiltTypings: () => `Rebuilt typings`, copy: (file: string) => `Copy ${bold(file)}`, mxpath: (dir: string) => `${inverse(greenBright(bold(" MX PROJECT PATH ")))}${blue(bold(` ${dir} `))}` }; diff --git a/packages/mpx/src/constants.ts b/packages/mpx/src/constants.ts index 1d91bfd6..8dcc97f3 100644 --- a/packages/mpx/src/constants.ts +++ b/packages/mpx/src/constants.ts @@ -11,9 +11,11 @@ export const STD_EXTERNALS = [ /^big.js$/ ]; +export const XML_FILES = "src/*.xml"; + export const PACKAGE_FILES = [ // XML files - "src/*.xml", + XML_FILES, // Modeler icons "src/*.@(tile|icon)?(.dark).png", // License file diff --git a/packages/mpx/src/rolldown.web.ts b/packages/mpx/src/rolldown.web.ts index abf3b101..1f9905e9 100644 --- a/packages/mpx/src/rolldown.web.ts +++ b/packages/mpx/src/rolldown.web.ts @@ -7,10 +7,18 @@ import { licenseCustomTemplate } from "./utils/helpers.js"; import { ProjectConfigWeb } from "./utils/project-config.js"; export async function defaultConfig(config: ProjectConfigWeb): Promise { + const jsx: BuildOptions["jsx"] = { + mode: "classic", + factory: "createElement", + fragment: "Fragment", + importSource: "react" + }; + const esmBundle = { input: config.inputFiles.widgetFile, external: [...STD_EXTERNALS], - plugins: stdPlugins(config), + plugins: [...stdPlugins(config)], + jsx, output: { file: config.outputFiles.esm, format: "esm" @@ -20,13 +28,15 @@ export async function defaultConfig(config: ProjectConfigWeb): Promise Date: Wed, 16 Jul 2025 16:11:24 +0200 Subject: [PATCH 36/45] feat: add support for MPKOUTPUT --- packages/mpx/src/utils/project-config.ts | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/packages/mpx/src/utils/project-config.ts b/packages/mpx/src/utils/project-config.ts index 44386d95..ddf624be 100644 --- a/packages/mpx/src/utils/project-config.ts +++ b/packages/mpx/src/utils/project-config.ts @@ -52,6 +52,9 @@ export abstract class ProjectConfig { readonly deploymentPath: string[]; + /** MPK name including extension */ + readonly mpkBase: string; + constructor( inputs: ProjectConfigInputs & { projectPath: string | null; @@ -59,11 +62,13 @@ export abstract class ProjectConfig { deploymentPath: string[]; } ) { + const { pkg } = inputs; this.projectPath = inputs.projectPath; - this.pkg = inputs.pkg; + this.pkg = pkg; this.isTsProject = inputs.isTsProject; this.platform = inputs.platform; this.deploymentPath = inputs.deploymentPath; + this.mpkBase = env.MPKOUTPUT ?? `${pkg.packagePath}.${pkg.widgetName}.mpk`; } /** Relative path to the widget directory from the "widgets" */ @@ -133,7 +138,8 @@ export abstract class ProjectConfig { inputFiles: this.inputFiles, outputDirs: this.outputDirs, outputFiles: this.outputFiles, - relativeWidgetPath: this.relativeWidgetPath + relativeWidgetPath: this.relativeWidgetPath, + mpkBase: this.mpkBase }; } @@ -197,7 +203,7 @@ export class ProjectConfigWeb extends ProjectConfig { }), mpk: path.format({ dir: outputDirs.mpkDir, - base: `${pkg.packagePath}.${pkg.widgetName}.mpk` + base: this.mpkBase }), dependenciesTxt: path.format({ dir: outputDirs.contentRoot, From dc7b11bfa107939332792b2f4c70163c1e75715d Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Wed, 16 Jul 2025 16:32:19 +0200 Subject: [PATCH 37/45] feat: add show config flag --- packages/mpx/src/build.ts | 28 ++++++++++++++++++++++------ packages/mpx/src/cli.ts | 3 ++- 2 files changed, 24 insertions(+), 7 deletions(-) diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 39e27c18..279dc989 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -5,6 +5,7 @@ import fg from "fast-glob"; import { filesize } from "filesize"; import fs from "node:fs/promises"; import path from "node:path"; +import { env } from "node:process"; import ms from "pretty-ms"; import { BuildOptions, build as buildBundle, watch } from "rolldown"; import { onExit } from "signal-exit"; @@ -20,7 +21,8 @@ import { ProjectConfig, ProjectConfigWeb } from "./utils/project-config.js"; interface BuildCommandOptions { watch?: boolean; minify?: boolean; - platform?: "web" | "node"; + platform?: "web" | "native"; + showConfig?: boolean; } /** @@ -38,11 +40,8 @@ export async function build(root: string | undefined, options: BuildCommandOptio const [pkg, isTsProject] = await Promise.all([readPackageJson(root), isTypeScriptProject(root)]); let config: ProjectConfig; - let bundles: BuildOptions[]; - if (options.platform === "web") { config = await ProjectConfigWeb.create({ pkg, isTsProject }); - bundles = await bundlesWeb.loadConfig(config as ProjectConfigWeb, logger); } else { throw new Error(`Build for native is not implemented yet`); } @@ -51,8 +50,24 @@ export async function build(root: string | undefined, options: BuildCommandOptio logger.info(formatMsg.mxpath(config.projectPath)); } + if (env.MPKOUTPUT) { + logger.info(formatMsg.mpk(env.MPKOUTPUT)); + } + + if (options.showConfig) { + console.dir(config.toPlainObject(), { depth: 3 }); + return; + } + + let bundles: BuildOptions[]; + if (options.platform === "web") { + bundles = await bundlesWeb.loadConfig(config as ProjectConfigWeb, logger); + } else { + throw new Error(`Build for native is not implemented yet`); + } + await fs.rm(config.outputDirs.dist, { recursive: true, force: true }); - // console.dir(config.toPlainObject(), { depth: 3 }); + if (options.watch) { await tasks.watch({ config, bundles, logger, root }); } else { @@ -228,7 +243,8 @@ const formatMsg = { rebuilt: (file: string, duration: number) => `Rebuilt ${dim(file)} in ${green(ms(duration))}`, rebuiltTypings: () => `Rebuilt typings`, copy: (file: string) => `Copy ${bold(file)}`, - mxpath: (dir: string) => `${inverse(greenBright(bold(" MX PROJECT PATH ")))}${blue(bold(` ${dir} `))}` + mxpath: (dir: string) => `${inverse(greenBright(bold(" MX PROJECT PATH ")))}${blue(bold(` ${dir} `))}`, + mpk: (name: string) => `${inverse(bold(" MPKOUTPUT "))}${blue(bold(` ${name} `))}` }; const buildMeasure = { diff --git a/packages/mpx/src/cli.ts b/packages/mpx/src/cli.ts index 836415d9..ed29ae84 100644 --- a/packages/mpx/src/cli.ts +++ b/packages/mpx/src/cli.ts @@ -13,7 +13,8 @@ cli.usage( cli.option("-w, --watch", "watch for changes and rebuild", { default: false }) .option("-m, --minify", "minify the output (this option is 'on' in CI environment)", { default: Boolean(env.CI) }) - .option("-p, --platform ", "build platform (web or node)", { default: "web" }); + .option("-p, --platform ", "build platform (web or node)", { default: "web" }) + .option("--show-config", "print project config and exit", { default: false }); cli.help(); cli.version(VERSION); From 5fa8acd9fd3baae88677a94c7cb13d52bfe7d892 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Wed, 16 Jul 2025 16:36:53 +0200 Subject: [PATCH 38/45] chore: extend method --- packages/mpx/src/utils/project-config.ts | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/mpx/src/utils/project-config.ts b/packages/mpx/src/utils/project-config.ts index ddf624be..ac8b30d3 100644 --- a/packages/mpx/src/utils/project-config.ts +++ b/packages/mpx/src/utils/project-config.ts @@ -216,6 +216,13 @@ export class ProjectConfigWeb extends ProjectConfig { }; } + toPlainObject(): Record { + return { + ...super.toPlainObject(), + assetsPublicPath: this.assetsPublicPath + }; + } + static async create(inputs: ProjectConfigInputs): Promise { const projectPath = await ProjectConfig.getProjectPath(inputs.pkg); return new ProjectConfigWeb(inputs, projectPath); From 996ef9578a543f2a3178c59f9a91f413b5f7e545 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Wed, 16 Jul 2025 17:56:21 +0200 Subject: [PATCH 39/45] feat: add postcss and sass --- packages/mpx/bin/mpx.js | 0 packages/mpx/package.json | 14 +- packages/mpx/pnpm-lock.yaml | 1151 ++++++++++++++++- packages/mpx/pnpm-workspace.yaml | 7 +- packages/mpx/rolldown.config.ts | 9 +- packages/mpx/src/build.ts | 12 +- packages/mpx/src/cli.ts | 3 +- packages/mpx/src/plugins.ts | 5 + packages/mpx/src/rolldown.web.ts | 29 + .../mpx/src/utils/parsers/CliBuildOptions.ts | 10 + packages/mpx/src/utils/project-config.ts | 5 + 11 files changed, 1226 insertions(+), 19 deletions(-) mode change 100644 => 100755 packages/mpx/bin/mpx.js create mode 100644 packages/mpx/src/utils/parsers/CliBuildOptions.ts diff --git a/packages/mpx/bin/mpx.js b/packages/mpx/bin/mpx.js old mode 100644 new mode 100755 diff --git a/packages/mpx/package.json b/packages/mpx/package.json index 4bab5ae4..a8c49df9 100644 --- a/packages/mpx/package.json +++ b/packages/mpx/package.json @@ -3,6 +3,9 @@ "version": "0.1.0", "description": "Mendix tool for bundling pluggable widgets", "type": "module", + "bin": { + "mpx": "./bin/mpx.js" + }, "scripts": { "preinstall": "npx only-allow pnpm", "test": "echo 'test is missing'", @@ -30,17 +33,24 @@ "fast-glob": "^3.3.3", "filesize": "^11.0.1", "picocolors": "^1.1.1", + "postcss": "^8.5.6", "premove": "^4.0.0", "prettier": "^3.6.2", "pretty-ms": "^9.2.0", + "rolldown": "1.0.0-beta.26", "rollup-plugin-license": "^3.6.0", + "rollup-plugin-postcss": "^4.0.2", + "sass": "^1.89.2", "signal-exit": "^4.1.0", "typescript": "^5.8.3", - "xml2js": "^0.6.2", "zip-a-folder": "^3.1.9", "zod": "^4.0.5" }, "dependencies": { - "rolldown": "1.0.0-beta.26" + "postcss": "^8.5.6", + "rolldown": "1.0.0-beta.26", + "rollup-plugin-postcss": "^4.0.2", + "sass": "^1.89.2", + "xml2js": "^0.6.2" } } diff --git a/packages/mpx/pnpm-lock.yaml b/packages/mpx/pnpm-lock.yaml index f6c8cf5f..2eb11cfc 100644 --- a/packages/mpx/pnpm-lock.yaml +++ b/packages/mpx/pnpm-lock.yaml @@ -8,9 +8,21 @@ importers: .: dependencies: + postcss: + specifier: ^8.5.6 + version: 8.5.6 rolldown: specifier: 1.0.0-beta.26 version: 1.0.0-beta.26 + rollup-plugin-postcss: + specifier: ^4.0.2 + version: 4.0.2(postcss@8.5.6) + sass: + specifier: ^1.89.2 + version: 1.89.2 + xml2js: + specifier: ^0.6.2 + version: 0.6.2 devDependencies: '@rollup/plugin-image': specifier: ^3.0.3 @@ -63,9 +75,6 @@ importers: typescript: specifier: ^5.8.3 version: 5.8.3 - xml2js: - specifier: ^0.6.2 - version: 0.6.2 zip-a-folder: specifier: ^3.1.9 version: 3.1.9 @@ -121,6 +130,88 @@ packages: '@oxc-project/types@0.76.0': resolution: {integrity: sha512-CH3THIrSViKal8yV/Wh3FK0pFhp40nzW1MUDCik9fNuid2D/7JJXKJnfFOAvMxInGXDlvmgT6ACAzrl47TqzkQ==} + '@parcel/watcher-android-arm64@2.5.1': + resolution: {integrity: sha512-KF8+j9nNbUN8vzOFDpRMsaKBHZ/mcjEjMToVMJOhTozkDonQFFrRcfdLWn6yWKCmJKmdVxSgHiYvTCef4/qcBA==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [android] + + '@parcel/watcher-darwin-arm64@2.5.1': + resolution: {integrity: sha512-eAzPv5osDmZyBhou8PoF4i6RQXAfeKL9tjb3QzYuccXFMQU0ruIc/POh30ePnaOyD1UXdlKguHBmsTs53tVoPw==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [darwin] + + '@parcel/watcher-darwin-x64@2.5.1': + resolution: {integrity: sha512-1ZXDthrnNmwv10A0/3AJNZ9JGlzrF82i3gNQcWOzd7nJ8aj+ILyW1MTxVk35Db0u91oD5Nlk9MBiujMlwmeXZg==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [darwin] + + '@parcel/watcher-freebsd-x64@2.5.1': + resolution: {integrity: sha512-SI4eljM7Flp9yPuKi8W0ird8TI/JK6CSxju3NojVI6BjHsTyK7zxA9urjVjEKJ5MBYC+bLmMcbAWlZ+rFkLpJQ==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [freebsd] + + '@parcel/watcher-linux-arm-glibc@2.5.1': + resolution: {integrity: sha512-RCdZlEyTs8geyBkkcnPWvtXLY44BCeZKmGYRtSgtwwnHR4dxfHRG3gR99XdMEdQ7KeiDdasJwwvNSF5jKtDwdA==} + engines: {node: '>= 10.0.0'} + cpu: [arm] + os: [linux] + + '@parcel/watcher-linux-arm-musl@2.5.1': + resolution: {integrity: sha512-6E+m/Mm1t1yhB8X412stiKFG3XykmgdIOqhjWj+VL8oHkKABfu/gjFj8DvLrYVHSBNC+/u5PeNrujiSQ1zwd1Q==} + engines: {node: '>= 10.0.0'} + cpu: [arm] + os: [linux] + + '@parcel/watcher-linux-arm64-glibc@2.5.1': + resolution: {integrity: sha512-LrGp+f02yU3BN9A+DGuY3v3bmnFUggAITBGriZHUREfNEzZh/GO06FF5u2kx8x+GBEUYfyTGamol4j3m9ANe8w==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [linux] + + '@parcel/watcher-linux-arm64-musl@2.5.1': + resolution: {integrity: sha512-cFOjABi92pMYRXS7AcQv9/M1YuKRw8SZniCDw0ssQb/noPkRzA+HBDkwmyOJYp5wXcsTrhxO0zq1U11cK9jsFg==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [linux] + + '@parcel/watcher-linux-x64-glibc@2.5.1': + resolution: {integrity: sha512-GcESn8NZySmfwlTsIur+49yDqSny2IhPeZfXunQi48DMugKeZ7uy1FX83pO0X22sHntJ4Ub+9k34XQCX+oHt2A==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [linux] + + '@parcel/watcher-linux-x64-musl@2.5.1': + resolution: {integrity: sha512-n0E2EQbatQ3bXhcH2D1XIAANAcTZkQICBPVaxMeaCVBtOpBZpWJuf7LwyWPSBDITb7In8mqQgJ7gH8CILCURXg==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [linux] + + '@parcel/watcher-win32-arm64@2.5.1': + resolution: {integrity: sha512-RFzklRvmc3PkjKjry3hLF9wD7ppR4AKcWNzH7kXR7GUe0Igb3Nz8fyPwtZCSquGrhU5HhUNDr/mKBqj7tqA2Vw==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [win32] + + '@parcel/watcher-win32-ia32@2.5.1': + resolution: {integrity: sha512-c2KkcVN+NJmuA7CGlaGD1qJh1cLfDnQsHjE89E60vUEMlqduHGCdCLJCID5geFVM0dOtA3ZiIO8BoEQmzQVfpQ==} + engines: {node: '>= 10.0.0'} + cpu: [ia32] + os: [win32] + + '@parcel/watcher-win32-x64@2.5.1': + resolution: {integrity: sha512-9lHBdJITeNR++EvSQVUcaZoWupyHfXe1jZvGZ06O/5MflPcuPLtEphScIBL+AiCWBO46tDSHzWyD0uDmmZqsgA==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [win32] + + '@parcel/watcher@2.5.1': + resolution: {integrity: sha512-dfUnCxiN9H4ap84DvD2ubjw+3vUNpstxa0TneY/Paat8a3R4uQZDLSvWjmznAY/DoahqTHl9V46HF/Zs3F29pg==} + engines: {node: '>= 10.0.0'} + '@pkgjs/parseargs@0.11.0': resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} @@ -315,6 +406,10 @@ packages: cpu: [x64] os: [win32] + '@trysound/sax@0.2.0': + resolution: {integrity: sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA==} + engines: {node: '>=10.13.0'} + '@tsconfig/node22@22.0.2': resolution: {integrity: sha512-Kmwj4u8sDRDrMYRoN9FDEcXD8UpBSaPQQ24Gz+Gamqfm7xxn+GBR7ge/Z7pK8OXNGyUzbSwJj+TH6B+DS/epyA==} @@ -378,6 +473,9 @@ packages: base64-js@1.5.1: resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} + boolbase@1.0.0: + resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==} + brace-expansion@2.0.2: resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} @@ -385,6 +483,11 @@ packages: resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} engines: {node: '>=8'} + browserslist@4.25.1: + resolution: {integrity: sha512-KGj0KoOMXLpSNkkEI6Z6mShmQy0bc1I+T7K9N81k4WWMrfz+6fQ6es80B/YLAeRoKvjYE1YSHHOW1qe9xIVzHw==} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true + buffer-crc32@1.0.0: resolution: {integrity: sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==} engines: {node: '>=8.0.0'} @@ -396,6 +499,16 @@ packages: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} engines: {node: '>=8'} + caniuse-api@3.0.0: + resolution: {integrity: sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==} + + caniuse-lite@1.0.30001727: + resolution: {integrity: sha512-pB68nIHmbN6L/4C6MH1DokyR3bYqFwjaSs/sWDHGj4CTcFtQUQMuJftVwWkXq7mNWOybD3KhUv3oWHoGxgP14Q==} + + chalk@4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} + chokidar@4.0.3: resolution: {integrity: sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==} engines: {node: '>= 14.16.0'} @@ -407,6 +520,13 @@ packages: color-name@1.1.4: resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + colord@2.9.3: + resolution: {integrity: sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw==} + + commander@7.2.0: + resolution: {integrity: sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==} + engines: {node: '>= 10'} + commenting@1.1.0: resolution: {integrity: sha512-YeNK4tavZwtH7jEgK1ZINXzLKm6DZdEMfsaaieOsCAN0S8vsY7UeuO3Q7d/M018EFgE+IeUAuBOKkFccBZsUZA==} @@ -414,6 +534,9 @@ packages: resolution: {integrity: sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==} engines: {node: '>= 14'} + concat-with-sourcemaps@1.1.0: + resolution: {integrity: sha512-4gEjHJFT9e+2W/77h/DS5SGUgwDaOwprX8L/gl5+3ixnzkVJJsZWDSelmN3Oilw3LNDZjZV0yqH1hLG3k6nghg==} + consola@3.4.2: resolution: {integrity: sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==} engines: {node: ^14.18.0 || >=16.10.0} @@ -434,6 +557,68 @@ packages: resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} engines: {node: '>= 8'} + css-declaration-sorter@6.4.1: + resolution: {integrity: sha512-rtdthzxKuyq6IzqX6jEcIzQF/YqccluefyCYheovBOLhFT/drQA9zj/UbRAa9J7C0o6EG6u3E6g+vKkay7/k3g==} + engines: {node: ^10 || ^12 || >=14} + peerDependencies: + postcss: ^8.0.9 + + css-select@4.3.0: + resolution: {integrity: sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ==} + + css-tree@1.1.3: + resolution: {integrity: sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q==} + engines: {node: '>=8.0.0'} + + css-what@6.2.2: + resolution: {integrity: sha512-u/O3vwbptzhMs3L1fQE82ZSLHQQfto5gyZzwteVIEyeaY5Fc7R4dapF/BvRoSYFeqfBk4m0V1Vafq5Pjv25wvA==} + engines: {node: '>= 6'} + + cssesc@3.0.0: + resolution: {integrity: sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==} + engines: {node: '>=4'} + hasBin: true + + cssnano-preset-default@5.2.14: + resolution: {integrity: sha512-t0SFesj/ZV2OTylqQVOrFgEh5uanxbO6ZAdeCrNsUQ6fVuXwYTxJPNAGvGTxHbD68ldIJNec7PyYZDBrfDQ+6A==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + cssnano-utils@3.1.0: + resolution: {integrity: sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + cssnano@5.1.15: + resolution: {integrity: sha512-j+BKgDcLDQA+eDifLx0EO4XSA56b7uut3BQFH+wbSaSTuGLuiyTa/wbRYthUXX8LC9mLg+WWKe8h+qJuwTAbHw==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + csso@4.2.0: + resolution: {integrity: sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA==} + engines: {node: '>=8.0.0'} + + detect-libc@1.0.3: + resolution: {integrity: sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==} + engines: {node: '>=0.10'} + hasBin: true + + dom-serializer@1.4.1: + resolution: {integrity: sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag==} + + domelementtype@2.3.0: + resolution: {integrity: sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==} + + domhandler@4.3.1: + resolution: {integrity: sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==} + engines: {node: '>= 4'} + + domutils@2.8.0: + resolution: {integrity: sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==} + dotenv@17.2.0: resolution: {integrity: sha512-Q4sgBT60gzd0BB0lSyYD3xM4YxrXA9y4uBDof1JNYGzOXrQdQ6yX+7XIAqoFOGQFOTK1D3Hts5OllpxMDZFONQ==} engines: {node: '>=12'} @@ -441,12 +626,25 @@ packages: eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + electron-to-chromium@1.5.185: + resolution: {integrity: sha512-dYOZfUk57hSMPePoIQ1fZWl1Fkj+OshhEVuPacNKWzC1efe56OsHY3l/jCfiAgIICOU3VgOIdoq7ahg7r7n6MQ==} + emoji-regex@8.0.0: resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} emoji-regex@9.2.2: resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + entities@2.2.0: + resolution: {integrity: sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==} + + escalade@3.2.0: + resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} + engines: {node: '>=6'} + + estree-walker@0.6.1: + resolution: {integrity: sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==} + estree-walker@2.0.2: resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==} @@ -454,6 +652,9 @@ packages: resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==} engines: {node: '>=6'} + eventemitter3@4.0.7: + resolution: {integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==} + events@3.3.0: resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} engines: {node: '>=0.8.x'} @@ -493,6 +694,12 @@ packages: engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} os: [darwin] + function-bind@1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + + generic-names@4.0.0: + resolution: {integrity: sha512-ySFolZQfw9FoDb3ed9d80Cm9f0+r7qj+HJkWjeD9RBfpxEVTlVhol+gvaQB/78WbwYfbnNh8nWHHBSlg072y6A==} + glob-parent@5.1.2: resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} engines: {node: '>= 6'} @@ -509,12 +716,44 @@ packages: graceful-fs@4.2.11: resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} + has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} + + hasown@2.0.2: + resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} + engines: {node: '>= 0.4'} + + icss-replace-symbols@1.1.0: + resolution: {integrity: sha512-chIaY3Vh2mh2Q3RGXttaDIzeiPvaVXJ+C4DAh/w3c37SKZ/U6PGMmuicR2EQQp9bKG8zLMCl7I+PtIoOOPp8Gg==} + + icss-utils@5.1.0: + resolution: {integrity: sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA==} + engines: {node: ^10 || ^12 || >= 14} + peerDependencies: + postcss: ^8.1.0 + ieee754@1.2.1: resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} + immutable@5.1.3: + resolution: {integrity: sha512-+chQdDfvscSF1SJqv2gn4SRO2ZyS3xL3r7IW/wWEEzrzLisnOlKiQu5ytC/BVNcS15C39WT2Hg/bjKjDMcu+zg==} + + import-cwd@3.0.0: + resolution: {integrity: sha512-4pnzH16plW+hgvRECbDWpQl3cqtvSofHWh44met7ESfZ8UZOWWddm8hEyDTqREJ9RbYHY8gi8DqmaelApoOGMg==} + engines: {node: '>=8'} + + import-from@3.0.0: + resolution: {integrity: sha512-CiuXOFFSzkU5x/CR0+z7T91Iht4CXgfCxVOFRhh2Zyhg5wOpWvvDLQUsWl+gcN+QscYBjez8hDCt85O7RLDttQ==} + engines: {node: '>=8'} + inherits@2.0.4: resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + is-core-module@2.16.1: + resolution: {integrity: sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==} + engines: {node: '>= 0.4'} + is-extglob@2.1.1: resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} engines: {node: '>=0.10.0'} @@ -552,6 +791,23 @@ packages: resolution: {integrity: sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==} engines: {node: '>= 0.6.3'} + lilconfig@2.1.0: + resolution: {integrity: sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==} + engines: {node: '>=10'} + + loader-utils@3.3.1: + resolution: {integrity: sha512-FMJTLMXfCLMLfJxcX9PFqX5qD88Z5MRGaZCVzfuqeZSPsyiBzs+pahDQjbIWz2QIzPZz0NX9Zy4FX3lmK6YHIg==} + engines: {node: '>= 12.13.0'} + + lodash.camelcase@4.3.0: + resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} + + lodash.memoize@4.1.2: + resolution: {integrity: sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==} + + lodash.uniq@4.5.0: + resolution: {integrity: sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==} + lodash@4.17.21: resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} @@ -569,6 +825,9 @@ packages: resolution: {integrity: sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==} engines: {node: '>=8'} + mdn-data@2.0.14: + resolution: {integrity: sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==} + merge2@1.4.1: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} @@ -605,10 +864,40 @@ packages: moment@2.30.1: resolution: {integrity: sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==} + nanoid@3.3.11: + resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + + node-addon-api@7.1.1: + resolution: {integrity: sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==} + + node-releases@2.0.19: + resolution: {integrity: sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==} + normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} engines: {node: '>=0.10.0'} + normalize-url@6.1.0: + resolution: {integrity: sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==} + engines: {node: '>=10'} + + nth-check@2.1.1: + resolution: {integrity: sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==} + + p-finally@1.0.0: + resolution: {integrity: sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==} + engines: {node: '>=4'} + + p-queue@6.6.2: + resolution: {integrity: sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==} + engines: {node: '>=8'} + + p-timeout@3.2.0: + resolution: {integrity: sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==} + engines: {node: '>=8'} + package-json-from-dist@1.0.1: resolution: {integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==} @@ -624,6 +913,9 @@ packages: resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} engines: {node: '>=8'} + path-parse@1.0.7: + resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} + path-scurry@1.11.1: resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} engines: {node: '>=16 || 14 >=14.18'} @@ -643,6 +935,227 @@ packages: resolution: {integrity: sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==} engines: {node: '>=12'} + pify@5.0.0: + resolution: {integrity: sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA==} + engines: {node: '>=10'} + + postcss-calc@8.2.4: + resolution: {integrity: sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q==} + peerDependencies: + postcss: ^8.2.2 + + postcss-colormin@5.3.1: + resolution: {integrity: sha512-UsWQG0AqTFQmpBegeLLc1+c3jIqBNB0zlDGRWR+dQ3pRKJL1oeMzyqmH3o2PIfn9MBdNrVPWhDbT769LxCTLJQ==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-convert-values@5.1.3: + resolution: {integrity: sha512-82pC1xkJZtcJEfiLw6UXnXVXScgtBrjlO5CBmuDQc+dlb88ZYheFsjTn40+zBVi3DkfF7iezO0nJUPLcJK3pvA==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-discard-comments@5.1.2: + resolution: {integrity: sha512-+L8208OVbHVF2UQf1iDmRcbdjJkuBF6IS29yBDSiWUIzpYaAhtNl6JYnYm12FnkeCwQqF5LeklOu6rAqgfBZqQ==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-discard-duplicates@5.1.0: + resolution: {integrity: sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-discard-empty@5.1.1: + resolution: {integrity: sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-discard-overridden@5.1.0: + resolution: {integrity: sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-load-config@3.1.4: + resolution: {integrity: sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg==} + engines: {node: '>= 10'} + peerDependencies: + postcss: '>=8.0.9' + ts-node: '>=9.0.0' + peerDependenciesMeta: + postcss: + optional: true + ts-node: + optional: true + + postcss-merge-longhand@5.1.7: + resolution: {integrity: sha512-YCI9gZB+PLNskrK0BB3/2OzPnGhPkBEwmwhfYk1ilBHYVAZB7/tkTHFBAnCrvBBOmeYyMYw3DMjT55SyxMBzjQ==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-merge-rules@5.1.4: + resolution: {integrity: sha512-0R2IuYpgU93y9lhVbO/OylTtKMVcHb67zjWIfCiKR9rWL3GUk1677LAqD/BcHizukdZEjT8Ru3oHRoAYoJy44g==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-minify-font-values@5.1.0: + resolution: {integrity: sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-minify-gradients@5.1.1: + resolution: {integrity: sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-minify-params@5.1.4: + resolution: {integrity: sha512-+mePA3MgdmVmv6g+30rn57USjOGSAyuxUmkfiWpzalZ8aiBkdPYjXWtHuwJGm1v5Ojy0Z0LaSYhHaLJQB0P8Jw==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-minify-selectors@5.2.1: + resolution: {integrity: sha512-nPJu7OjZJTsVUmPdm2TcaiohIwxP+v8ha9NehQ2ye9szv4orirRU3SDdtUmKH+10nzn0bAyOXZ0UEr7OpvLehg==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-modules-extract-imports@3.1.0: + resolution: {integrity: sha512-k3kNe0aNFQDAZGbin48pL2VNidTF0w4/eASDsxlyspobzU3wZQLOGj7L9gfRe0Jo9/4uud09DsjFNH7winGv8Q==} + engines: {node: ^10 || ^12 || >= 14} + peerDependencies: + postcss: ^8.1.0 + + postcss-modules-local-by-default@4.2.0: + resolution: {integrity: sha512-5kcJm/zk+GJDSfw+V/42fJ5fhjL5YbFDl8nVdXkJPLLW+Vf9mTD5Xe0wqIaDnLuL2U6cDNpTr+UQ+v2HWIBhzw==} + engines: {node: ^10 || ^12 || >= 14} + peerDependencies: + postcss: ^8.1.0 + + postcss-modules-scope@3.2.1: + resolution: {integrity: sha512-m9jZstCVaqGjTAuny8MdgE88scJnCiQSlSrOWcTQgM2t32UBe+MUmFSO5t7VMSfAf/FJKImAxBav8ooCHJXCJA==} + engines: {node: ^10 || ^12 || >= 14} + peerDependencies: + postcss: ^8.1.0 + + postcss-modules-values@4.0.0: + resolution: {integrity: sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ==} + engines: {node: ^10 || ^12 || >= 14} + peerDependencies: + postcss: ^8.1.0 + + postcss-modules@4.3.1: + resolution: {integrity: sha512-ItUhSUxBBdNamkT3KzIZwYNNRFKmkJrofvC2nWab3CPKhYBQ1f27XXh1PAPE27Psx58jeelPsxWB/+og+KEH0Q==} + peerDependencies: + postcss: ^8.0.0 + + postcss-normalize-charset@5.1.0: + resolution: {integrity: sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-normalize-display-values@5.1.0: + resolution: {integrity: sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-normalize-positions@5.1.1: + resolution: {integrity: sha512-6UpCb0G4eofTCQLFVuI3EVNZzBNPiIKcA1AKVka+31fTVySphr3VUgAIULBhxZkKgwLImhzMR2Bw1ORK+37INg==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-normalize-repeat-style@5.1.1: + resolution: {integrity: sha512-mFpLspGWkQtBcWIRFLmewo8aC3ImN2i/J3v8YCFUwDnPu3Xz4rLohDO26lGjwNsQxB3YF0KKRwspGzE2JEuS0g==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-normalize-string@5.1.0: + resolution: {integrity: sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-normalize-timing-functions@5.1.0: + resolution: {integrity: sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-normalize-unicode@5.1.1: + resolution: {integrity: sha512-qnCL5jzkNUmKVhZoENp1mJiGNPcsJCs1aaRmURmeJGES23Z/ajaln+EPTD+rBeNkSryI+2WTdW+lwcVdOikrpA==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-normalize-url@5.1.0: + resolution: {integrity: sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-normalize-whitespace@5.1.1: + resolution: {integrity: sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-ordered-values@5.1.3: + resolution: {integrity: sha512-9UO79VUhPwEkzbb3RNpqqghc6lcYej1aveQteWY+4POIwlqkYE21HKWaLDF6lWNuqCobEAyTovVhtI32Rbv2RQ==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-reduce-initial@5.1.2: + resolution: {integrity: sha512-dE/y2XRaqAi6OvjzD22pjTUQ8eOfc6m/natGHgKFBK9DxFmIm69YmaRVQrGgFlEfc1HePIurY0TmDeROK05rIg==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-reduce-transforms@5.1.0: + resolution: {integrity: sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-selector-parser@6.1.2: + resolution: {integrity: sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==} + engines: {node: '>=4'} + + postcss-selector-parser@7.1.0: + resolution: {integrity: sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==} + engines: {node: '>=4'} + + postcss-svgo@5.1.0: + resolution: {integrity: sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-unique-selectors@5.1.1: + resolution: {integrity: sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + postcss-value-parser@4.2.0: + resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==} + + postcss@8.5.6: + resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==} + engines: {node: ^10 || ^12 || >=14} + premove@4.0.0: resolution: {integrity: sha512-zim/Hr4+FVdCIM7zL9b9Z0Wfd5Ya3mnKtiuDv7L5lzYzanSq6cOcVJ7EFcgK4I0pt28l8H0jX/x3nyog380XgQ==} engines: {node: '>=6'} @@ -664,6 +1177,10 @@ packages: resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==} engines: {node: '>= 0.6.0'} + promise.series@0.2.0: + resolution: {integrity: sha512-VWQJyU2bcDTgZw8kpfBpB/ejZASlCrzwz5f2hjb/zlujOEB4oeiAhHygAWq8ubsX2GVkD4kCU5V2dwOTaCY5EQ==} + engines: {node: '>=0.12'} + queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} @@ -681,6 +1198,15 @@ packages: resolution: {integrity: sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==} engines: {node: '>= 14.18.0'} + resolve-from@5.0.0: + resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} + engines: {node: '>=8'} + + resolve@1.22.10: + resolution: {integrity: sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==} + engines: {node: '>= 0.4'} + hasBin: true + reusify@1.1.0: resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} @@ -695,6 +1221,15 @@ packages: peerDependencies: rollup: ^1.0.0 || ^2.0.0 || ^3.0.0 || ^4.0.0 + rollup-plugin-postcss@4.0.2: + resolution: {integrity: sha512-05EaY6zvZdmvPUDi3uCcAQoESDcYnv8ogJJQRp6V5kZ6J6P7uAVJlrTZcaaA20wTH527YTnKfkAoPxWI/jPp4w==} + engines: {node: '>=10'} + peerDependencies: + postcss: 8.x + + rollup-pluginutils@2.8.2: + resolution: {integrity: sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ==} + rollup@4.45.0: resolution: {integrity: sha512-WLjEcJRIo7i3WDDgOIJqVI2d+lAC3EwvOGy+Xfq6hs+GQuAA4Di/H72xmXkOhrIWFg2PFYSKZYfH0f4vfKXN4A==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} @@ -709,6 +1244,14 @@ packages: safe-buffer@5.2.1: resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + safe-identifier@0.4.2: + resolution: {integrity: sha512-6pNbSMW6OhAi9j+N8V+U715yBQsaWJ7eyEUaOrawX+isg5ZxhUlV1NipNtgaKHmFGiABwt+ZF04Ii+3Xjkg+8w==} + + sass@1.89.2: + resolution: {integrity: sha512-xCmtksBKd/jdJ9Bt9p7nPKiuqrlBMBuuGkQlkhZjjQk3Ty48lv93k5Dq6OPkKt4XwxDJ7tvlfrTa1MPA9bf+QA==} + engines: {node: '>=14.0.0'} + hasBin: true + sax@1.4.1: resolution: {integrity: sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg==} @@ -728,6 +1271,14 @@ packages: resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} engines: {node: '>=14'} + source-map-js@1.2.1: + resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} + engines: {node: '>=0.10.0'} + + source-map@0.6.1: + resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} + engines: {node: '>=0.10.0'} + spdx-compare@1.0.0: resolution: {integrity: sha512-C1mDZOX0hnu0ep9dfmuoi03+eOdDoz2yvK79RxbcrVEG1NO1Ph35yW102DHWKN4pk80nwCgeMmSY5L25VE4D9A==} @@ -749,9 +1300,16 @@ packages: spdx-satisfies@5.0.1: resolution: {integrity: sha512-Nwor6W6gzFp8XX4neaKQ7ChV4wmpSh2sSDemMFSzHxpTw460jxFYeOn+jq4ybnSSw/5sc3pjka9MQPouksQNpw==} + stable@0.1.8: + resolution: {integrity: sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==} + deprecated: 'Modern JS already guarantees Array#sort() is a stable sort, so this library is deprecated. See the compatibility table on MDN: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/sort#browser_compatibility' + streamx@2.22.1: resolution: {integrity: sha512-znKXEBxfatz2GBNK02kRnCXjV+AA4kjZIUxeWSr3UGirZMJfTE9uiwKHobnbgxWyL/JWro8tTq+vOqAK1/qbSA==} + string-hash@1.1.3: + resolution: {integrity: sha512-kJUvRUFK49aub+a7T1nNE66EJbZBMnBgoC1UbCZ5n6bsZKBRga4KgBRTMn/pFkeCZSYtNeSyMxPDM0AXWELk2A==} + string-width@4.2.3: resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} engines: {node: '>=8'} @@ -774,6 +1332,28 @@ packages: resolution: {integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==} engines: {node: '>=12'} + style-inject@0.3.0: + resolution: {integrity: sha512-IezA2qp+vcdlhJaVm5SOdPPTUu0FCEqfNSli2vRuSIBbu5Nq5UvygTk/VzeCqfLz2Atj3dVII5QBKGZRZ0edzw==} + + stylehacks@5.1.1: + resolution: {integrity: sha512-sBpcd5Hx7G6seo7b1LkpttvTz7ikD0LlH5RmdcBNb6fFR0Fl7LQwHDFr300q4cwUqi+IYrFGmsIHieMBfnN/Bw==} + engines: {node: ^10 || ^12 || >=14.0} + peerDependencies: + postcss: ^8.2.15 + + supports-color@7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + + supports-preserve-symlinks-flag@1.0.0: + resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} + engines: {node: '>= 0.4'} + + svgo@2.8.0: + resolution: {integrity: sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg==} + engines: {node: '>=10.13.0'} + hasBin: true + tar-stream@3.1.7: resolution: {integrity: sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==} @@ -795,6 +1375,12 @@ packages: undici-types@7.8.0: resolution: {integrity: sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw==} + update-browserslist-db@1.1.3: + resolution: {integrity: sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==} + hasBin: true + peerDependencies: + browserslist: '>= 4.21.0' + util-deprecate@1.0.2: resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} @@ -819,6 +1405,10 @@ packages: resolution: {integrity: sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==} engines: {node: '>=4.0'} + yaml@1.10.2: + resolution: {integrity: sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==} + engines: {node: '>= 6'} + zip-a-folder@3.1.9: resolution: {integrity: sha512-0TPP3eK5mbZxHnOE8w/Jg6gwxsxZOrA3hXHMfC3I4mcTvyJwNt7GZP8i6uiAMVNu43QTmVz0ngEMKcjgpLZLmQ==} @@ -887,6 +1477,67 @@ snapshots: '@oxc-project/types@0.76.0': {} + '@parcel/watcher-android-arm64@2.5.1': + optional: true + + '@parcel/watcher-darwin-arm64@2.5.1': + optional: true + + '@parcel/watcher-darwin-x64@2.5.1': + optional: true + + '@parcel/watcher-freebsd-x64@2.5.1': + optional: true + + '@parcel/watcher-linux-arm-glibc@2.5.1': + optional: true + + '@parcel/watcher-linux-arm-musl@2.5.1': + optional: true + + '@parcel/watcher-linux-arm64-glibc@2.5.1': + optional: true + + '@parcel/watcher-linux-arm64-musl@2.5.1': + optional: true + + '@parcel/watcher-linux-x64-glibc@2.5.1': + optional: true + + '@parcel/watcher-linux-x64-musl@2.5.1': + optional: true + + '@parcel/watcher-win32-arm64@2.5.1': + optional: true + + '@parcel/watcher-win32-ia32@2.5.1': + optional: true + + '@parcel/watcher-win32-x64@2.5.1': + optional: true + + '@parcel/watcher@2.5.1': + dependencies: + detect-libc: 1.0.3 + is-glob: 4.0.3 + micromatch: 4.0.8 + node-addon-api: 7.1.1 + optionalDependencies: + '@parcel/watcher-android-arm64': 2.5.1 + '@parcel/watcher-darwin-arm64': 2.5.1 + '@parcel/watcher-darwin-x64': 2.5.1 + '@parcel/watcher-freebsd-x64': 2.5.1 + '@parcel/watcher-linux-arm-glibc': 2.5.1 + '@parcel/watcher-linux-arm-musl': 2.5.1 + '@parcel/watcher-linux-arm64-glibc': 2.5.1 + '@parcel/watcher-linux-arm64-musl': 2.5.1 + '@parcel/watcher-linux-x64-glibc': 2.5.1 + '@parcel/watcher-linux-x64-musl': 2.5.1 + '@parcel/watcher-win32-arm64': 2.5.1 + '@parcel/watcher-win32-ia32': 2.5.1 + '@parcel/watcher-win32-x64': 2.5.1 + optional: true + '@pkgjs/parseargs@0.11.0': optional: true @@ -1013,6 +1664,8 @@ snapshots: '@rollup/rollup-win32-x64-msvc@4.45.0': optional: true + '@trysound/sax@0.2.0': {} + '@tsconfig/node22@22.0.2': {} '@tybys/wasm-util@0.10.0': @@ -1075,6 +1728,8 @@ snapshots: base64-js@1.5.1: {} + boolbase@1.0.0: {} + brace-expansion@2.0.2: dependencies: balanced-match: 1.0.2 @@ -1083,6 +1738,13 @@ snapshots: dependencies: fill-range: 7.1.1 + browserslist@4.25.1: + dependencies: + caniuse-lite: 1.0.30001727 + electron-to-chromium: 1.5.185 + node-releases: 2.0.19 + update-browserslist-db: 1.1.3(browserslist@4.25.1) + buffer-crc32@1.0.0: {} buffer@6.0.3: @@ -1092,6 +1754,20 @@ snapshots: cac@6.7.14: {} + caniuse-api@3.0.0: + dependencies: + browserslist: 4.25.1 + caniuse-lite: 1.0.30001727 + lodash.memoize: 4.1.2 + lodash.uniq: 4.5.0 + + caniuse-lite@1.0.30001727: {} + + chalk@4.1.2: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + chokidar@4.0.3: dependencies: readdirp: 4.1.2 @@ -1102,6 +1778,10 @@ snapshots: color-name@1.1.4: {} + colord@2.9.3: {} + + commander@7.2.0: {} + commenting@1.1.0: {} compress-commons@6.0.2: @@ -1112,6 +1792,10 @@ snapshots: normalize-path: 3.0.0 readable-stream: 4.7.0 + concat-with-sourcemaps@1.1.0: + dependencies: + source-map: 0.6.1 + consola@3.4.2: {} core-util-is@1.0.3: {} @@ -1129,18 +1813,118 @@ snapshots: shebang-command: 2.0.0 which: 2.0.2 + css-declaration-sorter@6.4.1(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + css-select@4.3.0: + dependencies: + boolbase: 1.0.0 + css-what: 6.2.2 + domhandler: 4.3.1 + domutils: 2.8.0 + nth-check: 2.1.1 + + css-tree@1.1.3: + dependencies: + mdn-data: 2.0.14 + source-map: 0.6.1 + + css-what@6.2.2: {} + + cssesc@3.0.0: {} + + cssnano-preset-default@5.2.14(postcss@8.5.6): + dependencies: + css-declaration-sorter: 6.4.1(postcss@8.5.6) + cssnano-utils: 3.1.0(postcss@8.5.6) + postcss: 8.5.6 + postcss-calc: 8.2.4(postcss@8.5.6) + postcss-colormin: 5.3.1(postcss@8.5.6) + postcss-convert-values: 5.1.3(postcss@8.5.6) + postcss-discard-comments: 5.1.2(postcss@8.5.6) + postcss-discard-duplicates: 5.1.0(postcss@8.5.6) + postcss-discard-empty: 5.1.1(postcss@8.5.6) + postcss-discard-overridden: 5.1.0(postcss@8.5.6) + postcss-merge-longhand: 5.1.7(postcss@8.5.6) + postcss-merge-rules: 5.1.4(postcss@8.5.6) + postcss-minify-font-values: 5.1.0(postcss@8.5.6) + postcss-minify-gradients: 5.1.1(postcss@8.5.6) + postcss-minify-params: 5.1.4(postcss@8.5.6) + postcss-minify-selectors: 5.2.1(postcss@8.5.6) + postcss-normalize-charset: 5.1.0(postcss@8.5.6) + postcss-normalize-display-values: 5.1.0(postcss@8.5.6) + postcss-normalize-positions: 5.1.1(postcss@8.5.6) + postcss-normalize-repeat-style: 5.1.1(postcss@8.5.6) + postcss-normalize-string: 5.1.0(postcss@8.5.6) + postcss-normalize-timing-functions: 5.1.0(postcss@8.5.6) + postcss-normalize-unicode: 5.1.1(postcss@8.5.6) + postcss-normalize-url: 5.1.0(postcss@8.5.6) + postcss-normalize-whitespace: 5.1.1(postcss@8.5.6) + postcss-ordered-values: 5.1.3(postcss@8.5.6) + postcss-reduce-initial: 5.1.2(postcss@8.5.6) + postcss-reduce-transforms: 5.1.0(postcss@8.5.6) + postcss-svgo: 5.1.0(postcss@8.5.6) + postcss-unique-selectors: 5.1.1(postcss@8.5.6) + + cssnano-utils@3.1.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + cssnano@5.1.15(postcss@8.5.6): + dependencies: + cssnano-preset-default: 5.2.14(postcss@8.5.6) + lilconfig: 2.1.0 + postcss: 8.5.6 + yaml: 1.10.2 + + csso@4.2.0: + dependencies: + css-tree: 1.1.3 + + detect-libc@1.0.3: + optional: true + + dom-serializer@1.4.1: + dependencies: + domelementtype: 2.3.0 + domhandler: 4.3.1 + entities: 2.2.0 + + domelementtype@2.3.0: {} + + domhandler@4.3.1: + dependencies: + domelementtype: 2.3.0 + + domutils@2.8.0: + dependencies: + dom-serializer: 1.4.1 + domelementtype: 2.3.0 + domhandler: 4.3.1 + dotenv@17.2.0: {} eastasianwidth@0.2.0: {} + electron-to-chromium@1.5.185: {} + emoji-regex@8.0.0: {} emoji-regex@9.2.2: {} + entities@2.2.0: {} + + escalade@3.2.0: {} + + estree-walker@0.6.1: {} + estree-walker@2.0.2: {} event-target-shim@5.0.1: {} + eventemitter3@4.0.7: {} + events@3.3.0: {} fast-fifo@1.3.2: {} @@ -1175,6 +1959,12 @@ snapshots: fsevents@2.3.3: optional: true + function-bind@1.1.2: {} + + generic-names@4.0.0: + dependencies: + loader-utils: 3.3.1 + glob-parent@5.1.2: dependencies: is-glob: 4.0.3 @@ -1199,10 +1989,36 @@ snapshots: graceful-fs@4.2.11: {} + has-flag@4.0.0: {} + + hasown@2.0.2: + dependencies: + function-bind: 1.1.2 + + icss-replace-symbols@1.1.0: {} + + icss-utils@5.1.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + ieee754@1.2.1: {} + immutable@5.1.3: {} + + import-cwd@3.0.0: + dependencies: + import-from: 3.0.0 + + import-from@3.0.0: + dependencies: + resolve-from: 5.0.0 + inherits@2.0.4: {} + is-core-module@2.16.1: + dependencies: + hasown: 2.0.2 + is-extglob@2.1.1: {} is-fullwidth-code-point@3.0.0: {} @@ -1233,6 +2049,16 @@ snapshots: dependencies: readable-stream: 2.3.8 + lilconfig@2.1.0: {} + + loader-utils@3.3.1: {} + + lodash.camelcase@4.3.0: {} + + lodash.memoize@4.1.2: {} + + lodash.uniq@4.5.0: {} + lodash@4.17.21: {} lru-cache@10.4.3: {} @@ -1247,6 +2073,8 @@ snapshots: dependencies: semver: 6.3.1 + mdn-data@2.0.14: {} + merge2@1.4.1: {} micromatch@4.0.8: @@ -1274,8 +2102,32 @@ snapshots: moment@2.30.1: {} + nanoid@3.3.11: {} + + node-addon-api@7.1.1: + optional: true + + node-releases@2.0.19: {} + normalize-path@3.0.0: {} + normalize-url@6.1.0: {} + + nth-check@2.1.1: + dependencies: + boolbase: 1.0.0 + + p-finally@1.0.0: {} + + p-queue@6.6.2: + dependencies: + eventemitter3: 4.0.7 + p-timeout: 3.2.0 + + p-timeout@3.2.0: + dependencies: + p-finally: 1.0.0 + package-json-from-dist@1.0.1: {} package-name-regex@2.0.6: {} @@ -1284,6 +2136,8 @@ snapshots: path-key@3.1.1: {} + path-parse@1.0.7: {} + path-scurry@1.11.1: dependencies: lru-cache: 10.4.3 @@ -1300,6 +2154,214 @@ snapshots: picomatch@4.0.2: {} + pify@5.0.0: {} + + postcss-calc@8.2.4(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-selector-parser: 6.1.2 + postcss-value-parser: 4.2.0 + + postcss-colormin@5.3.1(postcss@8.5.6): + dependencies: + browserslist: 4.25.1 + caniuse-api: 3.0.0 + colord: 2.9.3 + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-convert-values@5.1.3(postcss@8.5.6): + dependencies: + browserslist: 4.25.1 + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-discard-comments@5.1.2(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + postcss-discard-duplicates@5.1.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + postcss-discard-empty@5.1.1(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + postcss-discard-overridden@5.1.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + postcss-load-config@3.1.4(postcss@8.5.6): + dependencies: + lilconfig: 2.1.0 + yaml: 1.10.2 + optionalDependencies: + postcss: 8.5.6 + + postcss-merge-longhand@5.1.7(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + stylehacks: 5.1.1(postcss@8.5.6) + + postcss-merge-rules@5.1.4(postcss@8.5.6): + dependencies: + browserslist: 4.25.1 + caniuse-api: 3.0.0 + cssnano-utils: 3.1.0(postcss@8.5.6) + postcss: 8.5.6 + postcss-selector-parser: 6.1.2 + + postcss-minify-font-values@5.1.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-minify-gradients@5.1.1(postcss@8.5.6): + dependencies: + colord: 2.9.3 + cssnano-utils: 3.1.0(postcss@8.5.6) + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-minify-params@5.1.4(postcss@8.5.6): + dependencies: + browserslist: 4.25.1 + cssnano-utils: 3.1.0(postcss@8.5.6) + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-minify-selectors@5.2.1(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-selector-parser: 6.1.2 + + postcss-modules-extract-imports@3.1.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + postcss-modules-local-by-default@4.2.0(postcss@8.5.6): + dependencies: + icss-utils: 5.1.0(postcss@8.5.6) + postcss: 8.5.6 + postcss-selector-parser: 7.1.0 + postcss-value-parser: 4.2.0 + + postcss-modules-scope@3.2.1(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-selector-parser: 7.1.0 + + postcss-modules-values@4.0.0(postcss@8.5.6): + dependencies: + icss-utils: 5.1.0(postcss@8.5.6) + postcss: 8.5.6 + + postcss-modules@4.3.1(postcss@8.5.6): + dependencies: + generic-names: 4.0.0 + icss-replace-symbols: 1.1.0 + lodash.camelcase: 4.3.0 + postcss: 8.5.6 + postcss-modules-extract-imports: 3.1.0(postcss@8.5.6) + postcss-modules-local-by-default: 4.2.0(postcss@8.5.6) + postcss-modules-scope: 3.2.1(postcss@8.5.6) + postcss-modules-values: 4.0.0(postcss@8.5.6) + string-hash: 1.1.3 + + postcss-normalize-charset@5.1.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + postcss-normalize-display-values@5.1.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-normalize-positions@5.1.1(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-normalize-repeat-style@5.1.1(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-normalize-string@5.1.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-normalize-timing-functions@5.1.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-normalize-unicode@5.1.1(postcss@8.5.6): + dependencies: + browserslist: 4.25.1 + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-normalize-url@5.1.0(postcss@8.5.6): + dependencies: + normalize-url: 6.1.0 + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-normalize-whitespace@5.1.1(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-ordered-values@5.1.3(postcss@8.5.6): + dependencies: + cssnano-utils: 3.1.0(postcss@8.5.6) + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-reduce-initial@5.1.2(postcss@8.5.6): + dependencies: + browserslist: 4.25.1 + caniuse-api: 3.0.0 + postcss: 8.5.6 + + postcss-reduce-transforms@5.1.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-selector-parser@6.1.2: + dependencies: + cssesc: 3.0.0 + util-deprecate: 1.0.2 + + postcss-selector-parser@7.1.0: + dependencies: + cssesc: 3.0.0 + util-deprecate: 1.0.2 + + postcss-svgo@5.1.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + svgo: 2.8.0 + + postcss-unique-selectors@5.1.1(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-selector-parser: 6.1.2 + + postcss-value-parser@4.2.0: {} + + postcss@8.5.6: + dependencies: + nanoid: 3.3.11 + picocolors: 1.1.1 + source-map-js: 1.2.1 + premove@4.0.0: {} prettier@3.6.2: {} @@ -1312,6 +2374,8 @@ snapshots: process@0.11.10: {} + promise.series@0.2.0: {} + queue-microtask@1.2.3: {} readable-stream@2.3.8: @@ -1338,6 +2402,14 @@ snapshots: readdirp@4.1.2: {} + resolve-from@5.0.0: {} + + resolve@1.22.10: + dependencies: + is-core-module: 2.16.1 + path-parse: 1.0.7 + supports-preserve-symlinks-flag: 1.0.0 + reusify@1.1.0: {} rolldown@1.0.0-beta.26: @@ -1374,6 +2446,29 @@ snapshots: transitivePeerDependencies: - picomatch + rollup-plugin-postcss@4.0.2(postcss@8.5.6): + dependencies: + chalk: 4.1.2 + concat-with-sourcemaps: 1.1.0 + cssnano: 5.1.15(postcss@8.5.6) + import-cwd: 3.0.0 + p-queue: 6.6.2 + pify: 5.0.0 + postcss: 8.5.6 + postcss-load-config: 3.1.4(postcss@8.5.6) + postcss-modules: 4.3.1(postcss@8.5.6) + promise.series: 0.2.0 + resolve: 1.22.10 + rollup-pluginutils: 2.8.2 + safe-identifier: 0.4.2 + style-inject: 0.3.0 + transitivePeerDependencies: + - ts-node + + rollup-pluginutils@2.8.2: + dependencies: + estree-walker: 0.6.1 + rollup@4.45.0: dependencies: '@types/estree': 1.0.8 @@ -1408,6 +2503,16 @@ snapshots: safe-buffer@5.2.1: {} + safe-identifier@0.4.2: {} + + sass@1.89.2: + dependencies: + chokidar: 4.0.3 + immutable: 5.1.3 + source-map-js: 1.2.1 + optionalDependencies: + '@parcel/watcher': 2.5.1 + sax@1.4.1: {} semver@6.3.1: {} @@ -1420,6 +2525,10 @@ snapshots: signal-exit@4.1.0: {} + source-map-js@1.2.1: {} + + source-map@0.6.1: {} + spdx-compare@1.0.0: dependencies: array-find-index: 1.0.2 @@ -1447,6 +2556,8 @@ snapshots: spdx-expression-parse: 3.0.1 spdx-ranges: 2.1.1 + stable@0.1.8: {} + streamx@2.22.1: dependencies: fast-fifo: 1.3.2 @@ -1454,6 +2565,8 @@ snapshots: optionalDependencies: bare-events: 2.6.0 + string-hash@1.1.3: {} + string-width@4.2.3: dependencies: emoji-regex: 8.0.0 @@ -1482,6 +2595,30 @@ snapshots: dependencies: ansi-regex: 6.1.0 + style-inject@0.3.0: {} + + stylehacks@5.1.1(postcss@8.5.6): + dependencies: + browserslist: 4.25.1 + postcss: 8.5.6 + postcss-selector-parser: 6.1.2 + + supports-color@7.2.0: + dependencies: + has-flag: 4.0.0 + + supports-preserve-symlinks-flag@1.0.0: {} + + svgo@2.8.0: + dependencies: + '@trysound/sax': 0.2.0 + commander: 7.2.0 + css-select: 4.3.0 + css-tree: 1.1.3 + csso: 4.2.0 + picocolors: 1.1.1 + stable: 0.1.8 + tar-stream@3.1.7: dependencies: b4a: 1.6.7 @@ -1503,6 +2640,12 @@ snapshots: undici-types@7.8.0: {} + update-browserslist-db@1.1.3(browserslist@4.25.1): + dependencies: + browserslist: 4.25.1 + escalade: 3.2.0 + picocolors: 1.1.1 + util-deprecate@1.0.2: {} which@2.0.2: @@ -1528,6 +2671,8 @@ snapshots: xmlbuilder@11.0.1: {} + yaml@1.10.2: {} + zip-a-folder@3.1.9: dependencies: archiver: 7.0.1 diff --git a/packages/mpx/pnpm-workspace.yaml b/packages/mpx/pnpm-workspace.yaml index 8956feeb..6f38a23e 100644 --- a/packages/mpx/pnpm-workspace.yaml +++ b/packages/mpx/pnpm-workspace.yaml @@ -1,3 +1,6 @@ +ignoredBuiltDependencies: + - '@parcel/watcher' + publicHoistPattern: - - "*rollup*" - - "*plugin*" + - '*rollup*' + - '*plugin*' diff --git a/packages/mpx/rolldown.config.ts b/packages/mpx/rolldown.config.ts index 1861eacc..72d6c01a 100644 --- a/packages/mpx/rolldown.config.ts +++ b/packages/mpx/rolldown.config.ts @@ -1,13 +1,18 @@ import type { RolldownOptions } from "rolldown"; import pkg from "./package.json" with { type: "json" }; +const external = [ + ...[Object.keys(pkg.dependencies ?? {})].flat() + // ...[Object.keys(pkg.peerDependencies ?? {})].flat() +]; + const config: RolldownOptions = { input: "./src/cli.ts", - external: [Object.keys(pkg.dependencies ?? {})].flat(), + external, output: { file: "./dist/mpx.js", inlineDynamicImports: true, - minify: false + minify: true }, platform: "node", treeshake: true diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 279dc989..17d675f9 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -16,21 +16,15 @@ import { blue, bold, dim, green, greenBright, inverse } from "./utils/colors.js" import { deployToMxProject, isTypeScriptProject, readPackageJson } from "./utils/fs.js"; import { createLogger } from "./utils/logger.js"; import { createMPK } from "./utils/mpk.js"; +import { CliBuildOptions } from "./utils/parsers/CliBuildOptions.js"; import { ProjectConfig, ProjectConfigWeb } from "./utils/project-config.js"; -interface BuildCommandOptions { - watch?: boolean; - minify?: boolean; - platform?: "web" | "native"; - showConfig?: boolean; -} - /** * Build the widget project. * @param root - Widget directory containing package.json * @param options - Build options */ -export async function build(root: string | undefined, options: BuildCommandOptions): Promise { +export async function build(root: string | undefined, options: CliBuildOptions): Promise { const logger: ConsolaInstance = createLogger(); try { root = path.resolve(root ?? ""); @@ -41,7 +35,7 @@ export async function build(root: string | undefined, options: BuildCommandOptio let config: ProjectConfig; if (options.platform === "web") { - config = await ProjectConfigWeb.create({ pkg, isTsProject }); + config = await ProjectConfigWeb.create({ pkg, isTsProject, minify: options.minify }); } else { throw new Error(`Build for native is not implemented yet`); } diff --git a/packages/mpx/src/cli.ts b/packages/mpx/src/cli.ts index ed29ae84..d978e2a0 100644 --- a/packages/mpx/src/cli.ts +++ b/packages/mpx/src/cli.ts @@ -4,6 +4,7 @@ import { cac } from "cac"; import { env } from "node:process"; import { build } from "./build.js"; import { VERSION } from "./constants.js"; +import { CliBuildOptions } from "./utils/parsers/CliBuildOptions.js"; const name = "mpx"; const cli = cac(name); @@ -28,7 +29,7 @@ if (options.help || options.version) { process.exit(0); } -build(root, options); +build(root, CliBuildOptions.parse(options)); process.on("uncaughtException", error => { console.error("Uncaught Exception:", error.message); diff --git a/packages/mpx/src/plugins.ts b/packages/mpx/src/plugins.ts index 436033f5..3ba5a9a8 100644 --- a/packages/mpx/src/plugins.ts +++ b/packages/mpx/src/plugins.ts @@ -2,12 +2,14 @@ import image from "@rollup/plugin-image"; import url from "@rollup/plugin-url"; import { RolldownPlugin } from "rolldown"; import license from "rollup-plugin-license"; +import postcss from "rollup-plugin-postcss"; /** Note: Rollup has issue with exported types https://github.com/rollup/plugins/issues/1329 */ type RollupUrlFactory = typeof url.default; export type RollupUrlOptions = Parameters[0]; export type RollupImageOptions = Parameters[0]; export type RollupLicenseOptions = Parameters[0]; +export type RollupPostcssOptions = Parameters[0]; export const plugins = { url(options?: RollupUrlOptions): RolldownPlugin { @@ -19,5 +21,8 @@ export const plugins = { }, license(options?: RollupLicenseOptions): RolldownPlugin { return (license as any)(options) as RolldownPlugin; + }, + postcss(options?: RollupPostcssOptions): RolldownPlugin { + return (postcss as any)(options) as RolldownPlugin; } }; diff --git a/packages/mpx/src/rolldown.web.ts b/packages/mpx/src/rolldown.web.ts index 1f9905e9..47e942db 100644 --- a/packages/mpx/src/rolldown.web.ts +++ b/packages/mpx/src/rolldown.web.ts @@ -123,3 +123,32 @@ function stdPlugins(config: ProjectConfigWeb): RolldownPlugin[] { return [url(urlOptions), image(), license(licenseOptions)]; } + +export function widgetPostcssPlugin(config: ProjectConfigWeb) { + return plugins.postcss({ + extensions: [".css", ".sass", ".scss"], + inject: false, + minimize: config.minify, + // plugins: [ + // postcssImport(), + /** + * We need two copies of postcss-url because of final styles bundling in studio (pro). + * On line below, we just copying assets to widget bundle directory (com.mendix.widgets...) + * To make it work, this plugin have few requirements: + * 1. You should put your assets in src/assets/ + * 2. You should use relative path in your .scss files (e.g. url(../assets/icon.png) + * 3. This plugin relies on `to` property of postcss plugin and it should be present, when + * copying files to destination. + */ + // postcssUrl({ url: "copy", assetsPath: "assets" }), + /** + * This instance of postcss-url is just for adjusting asset path. + * Check doc comment for *createCssUrlTransform* for explanation. + */ + // postcssUrl({ url: cssUrlTransform }) + // ], + sourceMap: false, + use: ["sass"] + // to: join(outDir, `${outWidgetFile}.css`) + }); +} diff --git a/packages/mpx/src/utils/parsers/CliBuildOptions.ts b/packages/mpx/src/utils/parsers/CliBuildOptions.ts new file mode 100644 index 00000000..097635a1 --- /dev/null +++ b/packages/mpx/src/utils/parsers/CliBuildOptions.ts @@ -0,0 +1,10 @@ +import * as z from "zod"; + +export const CliBuildOptions = z.object({ + watch: z.boolean(), + minify: z.boolean(), + platform: z.enum(["web", "native"]), + showConfig: z.boolean() +}); + +export type CliBuildOptions = z.infer; diff --git a/packages/mpx/src/utils/project-config.ts b/packages/mpx/src/utils/project-config.ts index ac8b30d3..fa431de6 100644 --- a/packages/mpx/src/utils/project-config.ts +++ b/packages/mpx/src/utils/project-config.ts @@ -34,6 +34,7 @@ interface BundleOutputDirs { interface ProjectConfigInputs { pkg: PackageJson; isTsProject: boolean; + minify: boolean; } export abstract class ProjectConfig { @@ -55,6 +56,9 @@ export abstract class ProjectConfig { /** MPK name including extension */ readonly mpkBase: string; + /** Minify */ + readonly minify: boolean; + constructor( inputs: ProjectConfigInputs & { projectPath: string | null; @@ -69,6 +73,7 @@ export abstract class ProjectConfig { this.platform = inputs.platform; this.deploymentPath = inputs.deploymentPath; this.mpkBase = env.MPKOUTPUT ?? `${pkg.packagePath}.${pkg.widgetName}.mpk`; + this.minify = inputs.minify; } /** Relative path to the widget directory from the "widgets" */ From 331b22fbcb3c7c2e539beeb97ad8e604fb4c3342 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Wed, 16 Jul 2025 18:07:30 +0200 Subject: [PATCH 40/45] feat: allow local install via npm --- packages/mpx/package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/mpx/package.json b/packages/mpx/package.json index a8c49df9..413b0a93 100644 --- a/packages/mpx/package.json +++ b/packages/mpx/package.json @@ -7,10 +7,10 @@ "mpx": "./bin/mpx.js" }, "scripts": { - "preinstall": "npx only-allow pnpm", "test": "echo 'test is missing'", "dev": "premove dist && rolldown -c rolldown.config.ts -w", - "build": "premove dist && rolldown -c rolldown.config.ts" + "build": "premove dist && rolldown -c rolldown.config.ts", + "postinstall": "pnpm build" }, "keywords": [ "mendix", From 762ab668378c45034c0d0a431ac508b6f8d0cb4f Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Wed, 16 Jul 2025 19:08:39 +0200 Subject: [PATCH 41/45] chore: add getting started --- packages/mpx/README.md | 62 ++++++++++++++++++++++++++++++--------- packages/mpx/package.json | 6 +++- 2 files changed, 53 insertions(+), 15 deletions(-) diff --git a/packages/mpx/README.md b/packages/mpx/README.md index 069700d7..c0588cae 100644 --- a/packages/mpx/README.md +++ b/packages/mpx/README.md @@ -2,26 +2,60 @@ Building widgets with rolldown +> NOTE: This tool in alpha + +## Getting started + +### 1. Create tarball + +Let's assume you cloned this repo to `~/code/widgets-tools` + +1. `$ cd ~/code/widgets-tools/packages/mpx` +2. `$ pnpm install` +3. `$ pnpm pack` to create tarball + +### 2. Install to widget + +with `npm` + +> `$ npm install ~/code/mpx/mendix-mpx-0-y-z.tgz` + +with `pnpm` + +> `pnpm install /mpx/mendix-mpx-0-y-z.tgz` + +### 3. Change scripts + +In your `package.json` + +```diff +- "build": "pluggable-widgets-tools build:web" ++ "build": "mpx" +``` + +and + +```diff +- "start": "pluggable-widgets-tools start:server" ++ "start": "mpx -w" +``` + +## Usage + ``` mpx/0.1.0 Usage: - $ mpx [options] - -Commands: - build [root] Build widget + $ mpx [options] [dir] -For more info, run any command with the `--help` flag: - $ mpx build --help +Build the widget in the specified directory. If the directory is omitted, use the current directory. Options: - -h, --help Display this message - -v, --version Display version number -``` + -w, --watch watch for changes and rebuild (default: false) + -m, --minify minify the output (this option is 'on' in CI environment) (default: false) + -p, --platform build platform (web or node) (default: web) + --show-config print project config and exit (default: false) + -h, --help Display this message + -v, --version Display version number ``` -TODO: -- MPKOUTPUT -- mpkName -`` -``` diff --git a/packages/mpx/package.json b/packages/mpx/package.json index 413b0a93..335d1097 100644 --- a/packages/mpx/package.json +++ b/packages/mpx/package.json @@ -6,11 +6,15 @@ "bin": { "mpx": "./bin/mpx.js" }, + "files": [ + "bin", + "dist" + ], "scripts": { "test": "echo 'test is missing'", "dev": "premove dist && rolldown -c rolldown.config.ts -w", "build": "premove dist && rolldown -c rolldown.config.ts", - "postinstall": "pnpm build" + "prepack": "pnpm build" }, "keywords": [ "mendix", From 59df1174715b0341396eacf65a62c0a5e4704ef1 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Wed, 16 Jul 2025 20:26:19 +0200 Subject: [PATCH 42/45] =?UTF-8?q?finish=20with=20plugins=20=E2=9C=A8?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/mpx/package.json | 4 + packages/mpx/pnpm-lock.yaml | 106 +++++++++++++++++++++++ packages/mpx/src/rolldown.web.ts | 77 +++++++++++----- packages/mpx/src/utils/project-config.ts | 44 ++++++++-- 4 files changed, 200 insertions(+), 31 deletions(-) diff --git a/packages/mpx/package.json b/packages/mpx/package.json index 335d1097..79979e92 100644 --- a/packages/mpx/package.json +++ b/packages/mpx/package.json @@ -30,6 +30,8 @@ "@rollup/plugin-url": "^8.0.2", "@tsconfig/node22": "^22.0.2", "@types/node": "^24.0.13", + "@types/postcss-import": "^14.0.3", + "@types/postcss-url": "^10.0.4", "cac": "^6.7.14", "chokidar": "^4.0.3", "consola": "^3.4.2", @@ -52,6 +54,8 @@ }, "dependencies": { "postcss": "^8.5.6", + "postcss-import": "^16.1.1", + "postcss-url": "^10.1.3", "rolldown": "1.0.0-beta.26", "rollup-plugin-postcss": "^4.0.2", "sass": "^1.89.2", diff --git a/packages/mpx/pnpm-lock.yaml b/packages/mpx/pnpm-lock.yaml index 2eb11cfc..3c19dc53 100644 --- a/packages/mpx/pnpm-lock.yaml +++ b/packages/mpx/pnpm-lock.yaml @@ -11,6 +11,12 @@ importers: postcss: specifier: ^8.5.6 version: 8.5.6 + postcss-import: + specifier: ^16.1.1 + version: 16.1.1(postcss@8.5.6) + postcss-url: + specifier: ^10.1.3 + version: 10.1.3(postcss@8.5.6) rolldown: specifier: 1.0.0-beta.26 version: 1.0.0-beta.26 @@ -36,6 +42,12 @@ importers: '@types/node': specifier: ^24.0.13 version: 24.0.13 + '@types/postcss-import': + specifier: ^14.0.3 + version: 14.0.3 + '@types/postcss-url': + specifier: ^10.0.4 + version: 10.0.4 cac: specifier: ^6.7.14 version: 6.7.14 @@ -422,6 +434,12 @@ packages: '@types/node@24.0.13': resolution: {integrity: sha512-Qm9OYVOFHFYg3wJoTSrz80hoec5Lia/dPp84do3X7dZvLikQvM1YpmvTBEdIr/e+U8HTkFjLHLnl78K/qjf+jQ==} + '@types/postcss-import@14.0.3': + resolution: {integrity: sha512-raZhRVTf6Vw5+QbmQ7LOHSDML71A5rj4+EqDzAbrZPfxfoGzFxMHRCq16VlddGIZpHELw0BG4G0YE2ANkdZiIQ==} + + '@types/postcss-url@10.0.4': + resolution: {integrity: sha512-5QIO9NgbWmAkle65haRqkdgYPCOXheNsaFdbTJJQjT302yK3H49ql4t9a4y0NfpuPtU/UBo15VcV64WCSIMJKg==} + abort-controller@3.0.0: resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} engines: {node: '>=6.5'} @@ -476,6 +494,9 @@ packages: boolbase@1.0.0: resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==} + brace-expansion@1.1.12: + resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==} + brace-expansion@2.0.2: resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} @@ -534,6 +555,9 @@ packages: resolution: {integrity: sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==} engines: {node: '>= 14'} + concat-map@0.0.1: + resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + concat-with-sourcemaps@1.1.0: resolution: {integrity: sha512-4gEjHJFT9e+2W/77h/DS5SGUgwDaOwprX8L/gl5+3ixnzkVJJsZWDSelmN3Oilw3LNDZjZV0yqH1hLG3k6nghg==} @@ -601,6 +625,9 @@ packages: resolution: {integrity: sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA==} engines: {node: '>=8.0.0'} + cuint@0.2.2: + resolution: {integrity: sha512-d4ZVpCW31eWwCMe1YT3ur7mUDnTXbgwyzaL320DrcRT45rfjYxkt5QWLrmOJ+/UEAI2+fQgKe/fCjR8l4TpRgw==} + detect-libc@1.0.3: resolution: {integrity: sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==} engines: {node: '>=0.10'} @@ -836,6 +863,11 @@ packages: resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} engines: {node: '>=8.6'} + mime@2.5.2: + resolution: {integrity: sha512-tqkh47FzKeCPD2PUiPB6pkbMzsCasjxAfC62/Wap5qrUWcb+sFasXUC5I3gYM5iBM8v/Qpn4UK0x+j0iHyFPDg==} + engines: {node: '>=4.0.0'} + hasBin: true + mime@3.0.0: resolution: {integrity: sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==} engines: {node: '>=10.0.0'} @@ -849,6 +881,9 @@ packages: resolution: {integrity: sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==} engines: {node: 20 || >=22} + minimatch@3.0.8: + resolution: {integrity: sha512-6FsRAQsxQ61mw+qP1ZzbL9Bc78x2p5OqNgNpnoAFLTrX8n5Kxph0CsnhmKKNXTWjXqU5L0pGPR7hYk+XWZr60Q==} + minimatch@5.1.6: resolution: {integrity: sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==} engines: {node: '>=10'} @@ -935,6 +970,10 @@ packages: resolution: {integrity: sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==} engines: {node: '>=12'} + pify@2.3.0: + resolution: {integrity: sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==} + engines: {node: '>=0.10.0'} + pify@5.0.0: resolution: {integrity: sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA==} engines: {node: '>=10'} @@ -980,6 +1019,12 @@ packages: peerDependencies: postcss: ^8.2.15 + postcss-import@16.1.1: + resolution: {integrity: sha512-2xVS1NCZAfjtVdvXiyegxzJ447GyqCeEI5V7ApgQVOWnros1p5lGNovJNapwPpMombyFBfqDwt7AD3n2l0KOfQ==} + engines: {node: '>=18.0.0'} + peerDependencies: + postcss: ^8.0.0 + postcss-load-config@3.1.4: resolution: {integrity: sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg==} engines: {node: '>= 10'} @@ -1149,6 +1194,12 @@ packages: peerDependencies: postcss: ^8.2.15 + postcss-url@10.1.3: + resolution: {integrity: sha512-FUzyxfI5l2tKmXdYc6VTu3TWZsInayEKPbiyW+P6vmmIrrb4I6CGX0BFoewgYHLK+oIL5FECEK02REYRpBvUCw==} + engines: {node: '>=10'} + peerDependencies: + postcss: ^8.0.0 + postcss-value-parser@4.2.0: resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==} @@ -1184,6 +1235,9 @@ packages: queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + read-cache@1.0.0: + resolution: {integrity: sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==} + readable-stream@2.3.8: resolution: {integrity: sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==} @@ -1405,6 +1459,9 @@ packages: resolution: {integrity: sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==} engines: {node: '>=4.0'} + xxhashjs@0.2.2: + resolution: {integrity: sha512-AkTuIuVTET12tpsVIQo+ZU6f/qDmKuRUcjaqR+OIvm+aCBsZ95i7UVY5WJ9TMsSaZ0DA2WxoZ4acu0sPH+OKAw==} + yaml@1.10.2: resolution: {integrity: sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==} engines: {node: '>= 6'} @@ -1679,6 +1736,15 @@ snapshots: dependencies: undici-types: 7.8.0 + '@types/postcss-import@14.0.3': + dependencies: + postcss: 8.5.6 + + '@types/postcss-url@10.0.4': + dependencies: + '@types/node': 24.0.13 + postcss: 8.5.6 + abort-controller@3.0.0: dependencies: event-target-shim: 5.0.1 @@ -1730,6 +1796,11 @@ snapshots: boolbase@1.0.0: {} + brace-expansion@1.1.12: + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + brace-expansion@2.0.2: dependencies: balanced-match: 1.0.2 @@ -1792,6 +1863,8 @@ snapshots: normalize-path: 3.0.0 readable-stream: 4.7.0 + concat-map@0.0.1: {} + concat-with-sourcemaps@1.1.0: dependencies: source-map: 0.6.1 @@ -1882,6 +1955,8 @@ snapshots: dependencies: css-tree: 1.1.3 + cuint@0.2.2: {} + detect-libc@1.0.3: optional: true @@ -2082,6 +2157,8 @@ snapshots: braces: 3.0.3 picomatch: 2.3.1 + mime@2.5.2: {} + mime@3.0.0: {} mini-svg-data-uri@1.4.4: {} @@ -2090,6 +2167,10 @@ snapshots: dependencies: '@isaacs/brace-expansion': 5.0.0 + minimatch@3.0.8: + dependencies: + brace-expansion: 1.1.12 + minimatch@5.1.6: dependencies: brace-expansion: 2.0.2 @@ -2154,6 +2235,8 @@ snapshots: picomatch@4.0.2: {} + pify@2.3.0: {} + pify@5.0.0: {} postcss-calc@8.2.4(postcss@8.5.6): @@ -2192,6 +2275,13 @@ snapshots: dependencies: postcss: 8.5.6 + postcss-import@16.1.1(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + read-cache: 1.0.0 + resolve: 1.22.10 + postcss-load-config@3.1.4(postcss@8.5.6): dependencies: lilconfig: 2.1.0 @@ -2354,6 +2444,14 @@ snapshots: postcss: 8.5.6 postcss-selector-parser: 6.1.2 + postcss-url@10.1.3(postcss@8.5.6): + dependencies: + make-dir: 3.1.0 + mime: 2.5.2 + minimatch: 3.0.8 + postcss: 8.5.6 + xxhashjs: 0.2.2 + postcss-value-parser@4.2.0: {} postcss@8.5.6: @@ -2378,6 +2476,10 @@ snapshots: queue-microtask@1.2.3: {} + read-cache@1.0.0: + dependencies: + pify: 2.3.0 + readable-stream@2.3.8: dependencies: core-util-is: 1.0.3 @@ -2671,6 +2773,10 @@ snapshots: xmlbuilder@11.0.1: {} + xxhashjs@0.2.2: + dependencies: + cuint: 0.2.2 + yaml@1.10.2: {} zip-a-folder@3.1.9: diff --git a/packages/mpx/src/rolldown.web.ts b/packages/mpx/src/rolldown.web.ts index 47e942db..1f1e08fb 100644 --- a/packages/mpx/src/rolldown.web.ts +++ b/packages/mpx/src/rolldown.web.ts @@ -1,4 +1,7 @@ import { ConsolaInstance } from "consola"; +import path from "node:path"; +import postcssImport from "postcss-import"; +import postcssUrl from "postcss-url"; import { BuildOptions, RolldownPlugin } from "rolldown"; import { STD_EXTERNALS } from "./constants.js"; import { plugins, RollupLicenseOptions, RollupUrlOptions } from "./plugins.js"; @@ -17,7 +20,7 @@ export async function defaultConfig(config: ProjectConfigWeb): Promise + asset.url.startsWith("assets/") ? `${config.publicPath}/${asset.url}` : asset.url; + return plugins.postcss({ extensions: [".css", ".sass", ".scss"], inject: false, + extract: path.resolve(config.outputFiles.css), minimize: config.minify, - // plugins: [ - // postcssImport(), - /** - * We need two copies of postcss-url because of final styles bundling in studio (pro). - * On line below, we just copying assets to widget bundle directory (com.mendix.widgets...) - * To make it work, this plugin have few requirements: - * 1. You should put your assets in src/assets/ - * 2. You should use relative path in your .scss files (e.g. url(../assets/icon.png) - * 3. This plugin relies on `to` property of postcss plugin and it should be present, when - * copying files to destination. - */ - // postcssUrl({ url: "copy", assetsPath: "assets" }), - /** - * This instance of postcss-url is just for adjusting asset path. - * Check doc comment for *createCssUrlTransform* for explanation. - */ - // postcssUrl({ url: cssUrlTransform }) - // ], + plugins: [ + postcssImport(), + /** + * We need two copies of postcss-url because of final styles bundling in studio (pro). + * On line below, we just copying assets to widget bundle directory (com.mendix.widgets...) + * To make it work, this plugin have few requirements: + * 1. You should put your assets in src/assets/ + * 2. You should use relative path in your .scss files (e.g. url(../assets/icon.png) + * 3. This plugin relies on `to` property of postcss plugin and it should be present, when + * copying files to destination. + */ + postcssUrl({ url: "copy", assetsPath: "assets" }), + /** + * This instance of postcss-url is just for adjusting asset path. + * Check doc comment for *createCssUrlTransform* for explanation. + */ + postcssUrl({ url: cssUrlTransform }) + ], sourceMap: false, use: ["sass"] - // to: join(outDir, `${outWidgetFile}.css`) }); } diff --git a/packages/mpx/src/utils/project-config.ts b/packages/mpx/src/utils/project-config.ts index fa431de6..48190e93 100644 --- a/packages/mpx/src/utils/project-config.ts +++ b/packages/mpx/src/utils/project-config.ts @@ -21,6 +21,7 @@ interface BundleOutputFiles { mpk: string; dependenciesTxt: string; dependenciesJson: string; + css: string; } interface BundleOutputDirs { @@ -40,10 +41,15 @@ interface ProjectConfigInputs { export abstract class ProjectConfig { readonly projectPath: string | null = null; - /** Output directory for built files */ + /** Output directory for built files. */ readonly dist = "dist"; - /** Package root directory that contains all widget files shipped with mpk */ + + /** + * Package root directory that contains all widget files shipped with mpk. + * By default "dist/tmp/widgets". + */ readonly contentRoot = path.join(this.dist, "tmp", "widgets"); + /** Widget package.json */ readonly pkg: PackageJson; @@ -53,10 +59,9 @@ export abstract class ProjectConfig { readonly deploymentPath: string[]; - /** MPK name including extension */ + /** MPK name including extension. */ readonly mpkBase: string; - /** Minify */ readonly minify: boolean; constructor( @@ -76,11 +81,23 @@ export abstract class ProjectConfig { this.minify = inputs.minify; } - /** Relative path to the widget directory from the "widgets" */ - get relativeWidgetPath(): string { + /** + * Relative fs path to the widget directory from the "widgets". + * Used to compute widget directory in dist. + * Example: com\\mendix\\widget\\web\\accordion (on Windows) + */ + get widgetDirectory(): string { return path.join(...this.pkg.packagePath.split("."), this.pkg.widgetName.toLowerCase()); } + /** + * Public URL path. Used to compute asset urls. + * Example: com/mendix/widget/web/accordion + */ + get publicPath(): string { + return [...this.pkg.packagePath.split("."), this.pkg.widgetName.toLowerCase()].join("/"); + } + get inputFiles(): BundleInputFiles { const { pkg, isTsProject } = this; const ext = isTsProject ? "ts" : "js"; @@ -119,7 +136,7 @@ export abstract class ProjectConfig { } get outputDirs(): BundleOutputDirs { - const widgetDir = path.join(this.contentRoot, this.relativeWidgetPath); + const widgetDir = path.join(this.contentRoot, this.widgetDirectory); return { dist: this.dist, mpkDir: path.join(this.dist, this.pkg.version), @@ -143,7 +160,8 @@ export abstract class ProjectConfig { inputFiles: this.inputFiles, outputDirs: this.outputDirs, outputFiles: this.outputFiles, - relativeWidgetPath: this.relativeWidgetPath, + widgetDirectory: this.widgetDirectory, + publicPath: this.publicPath, mpkBase: this.mpkBase }; } @@ -174,7 +192,10 @@ export class ProjectConfigWeb extends ProjectConfig { super({ ...inputs, projectPath, platform: "web", deploymentPath: ["deployment", "web", "widgets"] }); } - /** Public path (aka base url) for widget assets */ + /** + * Public path (aka base url) for widget assets. + * Example: widgets/com/mendix/widget/web/accordion/assets + */ get assetsPublicPath(): string { const { pkg: { packagePath, widgetName } @@ -217,6 +238,11 @@ export class ProjectConfigWeb extends ProjectConfig { dependenciesJson: path.format({ dir: outputDirs.contentRoot, base: "dependencies.json" + }), + css: path.format({ + dir: outputDirs.widgetDir, + name: pkg.widgetName, + ext: "css" }) }; } From 3c00af8966bd3b0c52d3b4a7afcebbfa7df248a2 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Thu, 17 Jul 2025 10:38:09 +0200 Subject: [PATCH 43/45] chore: minor fixes --- packages/mpx/src/rolldown.web.ts | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/packages/mpx/src/rolldown.web.ts b/packages/mpx/src/rolldown.web.ts index 1f1e08fb..8595a0f4 100644 --- a/packages/mpx/src/rolldown.web.ts +++ b/packages/mpx/src/rolldown.web.ts @@ -17,11 +17,16 @@ export async function defaultConfig(config: ProjectConfigWeb): Promise Date: Thu, 17 Jul 2025 13:40:29 +0200 Subject: [PATCH 44/45] update deps & change project config --- packages/mpx/package.json | 9 ++---- packages/mpx/pnpm-lock.yaml | 18 ++++++------ packages/mpx/src/utils/project-config.ts | 37 ++++++++++++++---------- packages/mpx/tsconfig.json | 2 +- 4 files changed, 34 insertions(+), 32 deletions(-) diff --git a/packages/mpx/package.json b/packages/mpx/package.json index 79979e92..56dfc77a 100644 --- a/packages/mpx/package.json +++ b/packages/mpx/package.json @@ -26,8 +26,6 @@ "license": "Apache-2.0", "packageManager": "pnpm@10.13.1+sha512.37ebf1a5c7a30d5fabe0c5df44ee8da4c965ca0c5af3dbab28c3a1681b70a256218d05c81c9c0dcf767ef6b8551eb5b960042b9ed4300c59242336377e01cfad", "devDependencies": { - "@rollup/plugin-image": "^3.0.3", - "@rollup/plugin-url": "^8.0.2", "@tsconfig/node22": "^22.0.2", "@types/node": "^24.0.13", "@types/postcss-import": "^14.0.3", @@ -39,24 +37,23 @@ "fast-glob": "^3.3.3", "filesize": "^11.0.1", "picocolors": "^1.1.1", - "postcss": "^8.5.6", "premove": "^4.0.0", "prettier": "^3.6.2", "pretty-ms": "^9.2.0", "rolldown": "1.0.0-beta.26", - "rollup-plugin-license": "^3.6.0", - "rollup-plugin-postcss": "^4.0.2", - "sass": "^1.89.2", "signal-exit": "^4.1.0", "typescript": "^5.8.3", "zip-a-folder": "^3.1.9", "zod": "^4.0.5" }, "dependencies": { + "@rollup/plugin-image": "^3.0.3", + "@rollup/plugin-url": "^8.0.2", "postcss": "^8.5.6", "postcss-import": "^16.1.1", "postcss-url": "^10.1.3", "rolldown": "1.0.0-beta.26", + "rollup-plugin-license": "^3.6.0", "rollup-plugin-postcss": "^4.0.2", "sass": "^1.89.2", "xml2js": "^0.6.2" diff --git a/packages/mpx/pnpm-lock.yaml b/packages/mpx/pnpm-lock.yaml index 3c19dc53..19a8ec8b 100644 --- a/packages/mpx/pnpm-lock.yaml +++ b/packages/mpx/pnpm-lock.yaml @@ -8,6 +8,12 @@ importers: .: dependencies: + '@rollup/plugin-image': + specifier: ^3.0.3 + version: 3.0.3(rollup@4.45.0) + '@rollup/plugin-url': + specifier: ^8.0.2 + version: 8.0.2(rollup@4.45.0) postcss: specifier: ^8.5.6 version: 8.5.6 @@ -20,6 +26,9 @@ importers: rolldown: specifier: 1.0.0-beta.26 version: 1.0.0-beta.26 + rollup-plugin-license: + specifier: ^3.6.0 + version: 3.6.0(picomatch@4.0.2)(rollup@4.45.0) rollup-plugin-postcss: specifier: ^4.0.2 version: 4.0.2(postcss@8.5.6) @@ -30,12 +39,6 @@ importers: specifier: ^0.6.2 version: 0.6.2 devDependencies: - '@rollup/plugin-image': - specifier: ^3.0.3 - version: 3.0.3(rollup@4.45.0) - '@rollup/plugin-url': - specifier: ^8.0.2 - version: 8.0.2(rollup@4.45.0) '@tsconfig/node22': specifier: ^22.0.2 version: 22.0.2 @@ -78,9 +81,6 @@ importers: pretty-ms: specifier: ^9.2.0 version: 9.2.0 - rollup-plugin-license: - specifier: ^3.6.0 - version: 3.6.0(picomatch@4.0.2)(rollup@4.45.0) signal-exit: specifier: ^4.1.0 version: 4.1.0 diff --git a/packages/mpx/src/utils/project-config.ts b/packages/mpx/src/utils/project-config.ts index 48190e93..c82bd060 100644 --- a/packages/mpx/src/utils/project-config.ts +++ b/packages/mpx/src/utils/project-config.ts @@ -26,6 +26,7 @@ interface BundleOutputFiles { interface BundleOutputDirs { dist: string; + tmpDir: string; mpkDir: string; contentRoot: string; widgetDir: string; @@ -41,15 +42,6 @@ interface ProjectConfigInputs { export abstract class ProjectConfig { readonly projectPath: string | null = null; - /** Output directory for built files. */ - readonly dist = "dist"; - - /** - * Package root directory that contains all widget files shipped with mpk. - * By default "dist/tmp/widgets". - */ - readonly contentRoot = path.join(this.dist, "tmp", "widgets"); - /** Widget package.json */ readonly pkg: PackageJson; @@ -136,13 +128,26 @@ export abstract class ProjectConfig { } get outputDirs(): BundleOutputDirs { - const widgetDir = path.join(this.contentRoot, this.widgetDirectory); + // dist + const dist = "dist"; + // dist/tmp + const tmpDir = path.join(dist, "tmp"); + // dist/tmp/widgets + const contentRoot = path.join(tmpDir, "widgets"); + // dist/tmp/widgets/com/mendix/my/awesome/button + const widgetDir = path.join(contentRoot, this.widgetDirectory); + // dist/x.y.z + const mpkDir = path.join(dist, this.pkg.version); + // dist/widgets/com/mendix/my/awesome/button/assets + const widgetAssetsDir = path.join(widgetDir, "assets"); + return { - dist: this.dist, - mpkDir: path.join(this.dist, this.pkg.version), - contentRoot: this.contentRoot, + dist, + tmpDir, + mpkDir, + contentRoot, widgetDir, - widgetAssetsDir: path.join(widgetDir, "assets") + widgetAssetsDir }; } @@ -152,8 +157,8 @@ export abstract class ProjectConfig { toPlainObject(): Record { return { - dist: this.dist, - contentRoot: this.contentRoot, + dist: this.outputDirs.dist, + contentRoot: this.outputDirs.contentRoot, pkg: this.pkg, isTsProject: this.isTsProject, projectPath: this.projectPath, diff --git a/packages/mpx/tsconfig.json b/packages/mpx/tsconfig.json index b90cad84..c9653368 100644 --- a/packages/mpx/tsconfig.json +++ b/packages/mpx/tsconfig.json @@ -1,7 +1,7 @@ { "extends": "@tsconfig/node22/tsconfig.json", "exclude": ["input"], - "include": ["src/**/*", "../../../pluggable-widgets-tools/src/typings-generator"], + "include": ["src/**/*"], "compilerOptions": { "noEmit": true, "resolveJsonModule": true From 594f43ed2259449b4dc433d9f618d9e7b20a01a8 Mon Sep 17 00:00:00 2001 From: Illia Obukhau <8282906+iobuhov@users.noreply.github.com> Date: Thu, 17 Jul 2025 13:42:37 +0200 Subject: [PATCH 45/45] fix: change cleanup strategy --- packages/mpx/src/build.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/mpx/src/build.ts b/packages/mpx/src/build.ts index 17d675f9..154d32fd 100644 --- a/packages/mpx/src/build.ts +++ b/packages/mpx/src/build.ts @@ -60,7 +60,10 @@ export async function build(root: string | undefined, options: CliBuildOptions): throw new Error(`Build for native is not implemented yet`); } - await fs.rm(config.outputDirs.dist, { recursive: true, force: true }); + await Promise.all([ + fs.rm(config.outputDirs.tmpDir, { recursive: true, force: true }), + fs.rm(config.outputDirs.mpkDir, { recursive: true, force: true }) + ]); if (options.watch) { await tasks.watch({ config, bundles, logger, root });