Quellcode durchsuchen

feat(build): add build-as-tool for baking-in configs

Craig Fletcher vor 1 Monat
Ursprung
Commit
9ccaf50b1b
9 geänderte Dateien mit 1167 neuen und 78 gelöschten Zeilen
  1. 18 0
      README.md
  2. 1 0
      package-lock.json
  3. 2 0
      package.json
  4. 538 0
      src/build/build-as-tool.js
  5. 27 0
      src/build/nexe-entrypoint-compat.cjs
  6. 295 10
      src/build/nexe-python-compat.cjs
  7. 5 68
      src/index.js
  8. 95 0
      src/run.js
  9. 186 0
      test/buildAsTool.test.js

+ 18 - 0
README.md

@@ -41,6 +41,19 @@ Usual stuff applies - install as a module locally or globally and call the defau
 You can also build a self-contained executable if you like chunky binaries with `npm run build`. That'll output to the
 `dist/` directory, and you can then put that binary wherever you like (probably somewhere in your $path).
 
+If you want the binary to carry a specific config module and the action handlers it imports, build it with:
+
+`npm run build-as-tool -- --config ./rhedyn.config.js`
+
+That build path stages a generated module tree before running `nexe`, so the config module and any action code it
+imports are bundled into the binary and do not need to ship alongside it afterwards. The content files referenced by
+those tasks are not embedded: markdown, Sass, templates, partials, images, and other source files are still read from
+disk at runtime.
+
+If you want a different binary name without passing a full output path, add `--name`, for example:
+
+`npm run build-as-tool -- --config ./rhedyn.config.js --name md2doc`
+
 ## Caching
 
 Rhedyn includes intelligent caching that tracks both file dependencies and configuration state. Tasks are only re-run when:
@@ -60,6 +73,11 @@ set to `false` to reduce cache file size.
 By default, rhedyn will look for `rhedyn.config.js` in the current directory and fall back to the default if not found.
 You can override this behaviour using the `-c /path/to/config.js` or `--config path/to/config.js` CLI switches.
 
+For single-binary builds, `npm run build-as-tool -- --config /path/to/config.js` embeds that config module into the
+executable. Add `--name my-tool` to write the binary to `dist/my-tool` by default. Because the config executes inside
+the binary at runtime, expressions like `process.cwd()` still resolve when the tool is run, not when it is built, and
+the original config file can be deleted after the build completes.
+
 A working example of how to configure can be found in `src/defaults.js`, with the actions separated out to
 `src/actions/` for tidiness. 
 

+ 1 - 0
package-lock.json

@@ -28,6 +28,7 @@
       "devDependencies": {
         "@eslint/js": "^9.15.0",
         "eslint": "^9.15.0",
+        "meriyah": "^1.9.15",
         "nexe": "^3.3.7"
       }
     },

+ 2 - 0
package.json

@@ -7,6 +7,7 @@
     "generate": "node src/index.js",
     "test": "node --test",
     "build": "nexe -i src/index.js -o dist/rhedyn --build --python python3 --temp .nexe --patch $PWD/src/build/nexe-python-compat.cjs",
+    "build-as-tool": "node src/build/build-as-tool.js",
     "lint-fix": "eslint --fix src/**/*.js"
   },
   "bin": {
@@ -38,6 +39,7 @@
   "devDependencies": {
     "@eslint/js": "^9.15.0",
     "eslint": "^9.15.0",
+    "meriyah": "^1.9.15",
     "nexe": "^3.3.7"
   }
 }

+ 538 - 0
src/build/build-as-tool.js

@@ -0,0 +1,538 @@
+#!/usr/bin/env node
+
+import fs from "node:fs/promises"
+import path from "node:path"
+import process from "node:process"
+import { spawn } from "node:child_process"
+import { fileURLToPath } from "node:url"
+import { parseModule } from "meriyah"
+import { resolve as resolveDependencies } from "resolve-dependencies"
+import {
+  expandTilde,
+  fileExists,
+  readDirectoryRecursively,
+} from "../util/file-system.js"
+
+const modulePath = fileURLToPath(import.meta.url)
+const buildDir = path.dirname(modulePath)
+const rootDir = path.resolve(buildDir, "../..")
+const defaultInputPath = path.join(rootDir, "src/index.js")
+const runnerPath = path.join(rootDir, "src/run.js")
+const patchPath = path.join(rootDir, "src/build/nexe-python-compat.cjs")
+const pluginPath = path.join(rootDir, "src/build/nexe-entrypoint-compat.cjs")
+const defaultOutputPath = path.join(rootDir, "dist/rhedyn")
+const defaultTempPath = path.join(rootDir, ".nexe")
+const moduleFileExtensions = new Set([".js", ".mjs"])
+const stageRootDirName = "modules"
+
+function hasOption(args, optionNames) {
+  return args.some(arg =>
+    optionNames.includes(arg) ||
+    optionNames.some(optionName => arg.startsWith(`${optionName}=`)))
+}
+
+function getOptionValue(args, optionNames) {
+  for (let index = 0; index < args.length; index += 1) {
+    const arg = args[index]
+    for (const optionName of optionNames) {
+      if (arg === optionName) {
+        return args[index + 1]
+      }
+      if (arg.startsWith(`${optionName}=`)) {
+        return arg.slice(optionName.length + 1)
+      }
+    }
+  }
+
+  return null
+}
+
+function resolveArgPath(pathArg, cwd) {
+  return path.resolve(cwd, expandTilde(pathArg))
+}
+
+function validateOutputName(name) {
+  if (!name) {
+    throw new Error("Missing value for --name.")
+  }
+
+  const trimmedName = name.trim()
+  if (!trimmedName) {
+    throw new Error("Missing value for --name.")
+  }
+
+  if (
+    trimmedName === "." ||
+    trimmedName === ".." ||
+    trimmedName.includes("/") ||
+    trimmedName.includes("\\")
+  ) {
+    throw new Error(`Invalid --name value: ${name}`)
+  }
+
+  return trimmedName
+}
+
+export function toImportSpecifier(fromDir, targetPath) {
+  const relativePath = path.relative(fromDir, targetPath).replaceAll(path.sep, "/")
+  if (relativePath.startsWith(".")) {
+    return relativePath
+  }
+  return `./${relativePath}`
+}
+
+function isStageableModuleFile(filePath) {
+  return (
+    typeof filePath === "string" &&
+    path.isAbsolute(filePath) &&
+    !filePath.includes(`${path.sep}node_modules${path.sep}`) &&
+    moduleFileExtensions.has(path.extname(filePath))
+  )
+}
+
+export function getStagedModuleRelativePath(filePath) {
+  const resolvedPath = path.resolve(filePath)
+  const { root } = path.parse(resolvedPath)
+  const rootToken = root === path.sep
+    ? "_root"
+    : root.replaceAll(/[\\/:]+/g, "_").replace(/^_+|_+$/g, "") || "root"
+  const relativeFromRoot = path.relative(root, resolvedPath)
+  const stagedRelativePath = path.join(rootToken, relativeFromRoot)
+
+  return `${stagedRelativePath.slice(0, -path.extname(stagedRelativePath).length)}.mjs`
+}
+
+function getStagedModulePath(stagingRoot, filePath) {
+  return path.join(stagingRoot, getStagedModuleRelativePath(filePath))
+}
+
+function collectSpecifierNodes(node, specifierNodes = []) {
+  if (!node || typeof node !== "object") {
+    return specifierNodes
+  }
+
+  if (
+    (
+      node.type === "ImportDeclaration" ||
+      node.type === "ExportAllDeclaration" ||
+      node.type === "ExportNamedDeclaration"
+    ) &&
+    node.source &&
+    typeof node.source.value === "string"
+  ) {
+    specifierNodes.push(node.source)
+  }
+
+  if (
+    node.type === "ImportExpression" &&
+    node.source &&
+    node.source.type === "Literal" &&
+    typeof node.source.value === "string"
+  ) {
+    specifierNodes.push(node.source)
+  }
+
+  for (const value of Object.values(node)) {
+    if (!value || typeof value !== "object") {
+      continue
+    }
+
+    if (Array.isArray(value)) {
+      for (const item of value) {
+        collectSpecifierNodes(item, specifierNodes)
+      }
+      continue
+    }
+
+    collectSpecifierNodes(value, specifierNodes)
+  }
+
+  return specifierNodes
+}
+
+function applyTextReplacements(source, replacements) {
+  return replacements
+    .sort((left, right) => right.start - left.start)
+    .reduce(
+      (updatedSource, { start, end, text }) =>
+        `${updatedSource.slice(0, start)}${text}${updatedSource.slice(end)}`,
+      source,
+    )
+}
+
+function resolveDependencyRecord(fileRecord, specifier) {
+  if (!fileRecord?.deps) {
+    return null
+  }
+
+  return Object.prototype.hasOwnProperty.call(fileRecord.deps, specifier)
+    ? fileRecord.deps[specifier]
+    : null
+}
+
+function resolveStageableDependencyPath(filePath, specifier, stagedPathsByOriginalPath, fileRecord) {
+  const dependencyRecord = resolveDependencyRecord(fileRecord, specifier)
+  if (dependencyRecord?.absPath) {
+    return dependencyRecord.absPath
+  }
+
+  if (!specifier.startsWith(".") && !path.isAbsolute(specifier)) {
+    return null
+  }
+
+  const candidatePath = path.resolve(path.dirname(filePath), specifier)
+  return stagedPathsByOriginalPath.has(candidatePath) ? candidatePath : null
+}
+
+async function resolveLocalDependencyPath(filePath, specifier, fileRecord) {
+  const dependencyRecord = resolveDependencyRecord(fileRecord, specifier)
+  if (isStageableModuleFile(dependencyRecord?.absPath)) {
+    return dependencyRecord.absPath
+  }
+
+  if (!specifier.startsWith(".") && !path.isAbsolute(specifier)) {
+    return null
+  }
+
+  const resolvedPath = path.resolve(path.dirname(filePath), specifier)
+  return await fileExists(resolvedPath) ? resolvedPath : null
+}
+
+async function collectLocalDependencyPaths({ source, filePath, fileRecord }) {
+  const ast = parseModule(source, { next: true, ranges: true })
+  const dependencyPaths = await Promise.all(
+    collectSpecifierNodes(ast).map(specifierNode =>
+      resolveLocalDependencyPath(filePath, specifierNode.value, fileRecord)),
+  )
+
+  return dependencyPaths.filter(dependencyPath => isStageableModuleFile(dependencyPath))
+}
+
+export function rewriteModuleSpecifiers({
+  source,
+  filePath,
+  currentStagedPath,
+  fileRecord,
+  stagedPathsByOriginalPath,
+}) {
+  const ast = parseModule(source, { next: true, ranges: true })
+  const replacements = collectSpecifierNodes(ast).flatMap(specifierNode => {
+    const specifier = specifierNode.value
+    const resolvedDependencyPath = resolveStageableDependencyPath(
+      filePath,
+      specifier,
+      stagedPathsByOriginalPath,
+      fileRecord,
+    )
+
+    if (!resolvedDependencyPath) {
+      return []
+    }
+
+    const stagedDependencyPath = stagedPathsByOriginalPath.get(resolvedDependencyPath)
+    if (!stagedDependencyPath) {
+      return []
+    }
+
+    const nextSpecifier = toImportSpecifier(
+      path.dirname(currentStagedPath),
+      stagedDependencyPath,
+    )
+
+    return [
+      {
+        start: specifierNode.start,
+        end: specifierNode.end,
+        text: JSON.stringify(nextSpecifier),
+      },
+    ]
+  })
+
+  return applyTextReplacements(source, replacements)
+}
+
+export function parseBuildArgs(argv, cwd = process.cwd()) {
+  const forwardedArgs = []
+  let configPath = null
+  let outputName = null
+
+  for (let index = 0; index < argv.length; index += 1) {
+    const arg = argv[index]
+
+    if (arg === "-c" || arg === "--config") {
+      const pathArg = argv[index + 1]
+      if (!pathArg) {
+        throw new Error("Missing value for --config.")
+      }
+      configPath = resolveArgPath(pathArg, cwd)
+      index += 1
+      continue
+    }
+
+    if (arg.startsWith("--config=")) {
+      configPath = resolveArgPath(arg.slice("--config=".length), cwd)
+      continue
+    }
+
+    if (arg === "--name") {
+      outputName = validateOutputName(argv[index + 1])
+      index += 1
+      continue
+    }
+
+    if (arg.startsWith("--name=")) {
+      outputName = validateOutputName(arg.slice("--name=".length))
+      continue
+    }
+
+    forwardedArgs.push(arg)
+  }
+
+  return { configPath, forwardedArgs, outputName }
+}
+
+export function createConfiguredEntrypointSource({
+  entryDir,
+  stagedConfigPath,
+  stagedRunnerPath,
+}) {
+  return [
+    "(async () => {",
+    "  const configModule = await import(" + JSON.stringify(toImportSpecifier(entryDir, stagedConfigPath)) + ")",
+    "  const { runWithConfig } = await import(" + JSON.stringify(toImportSpecifier(entryDir, stagedRunnerPath)) + ")",
+    "",
+    "  const config = configModule.default || configModule",
+    "  await runWithConfig(config)",
+    "})().catch(err => {",
+    "  console.error(err)",
+    "  process.exit(1)",
+    "})",
+  ].join("\n")
+}
+
+async function resolveModuleGraph(entryPaths) {
+  const { files } = await resolveDependencies({
+    cwd: rootDir,
+    entries: entryPaths,
+    expand: "all",
+    loadContent: true,
+  })
+
+  const localModulePaths = new Set()
+  const queue = entryPaths
+    .map(entryPath => path.resolve(entryPath))
+    .filter(entryPath => isStageableModuleFile(entryPath))
+
+  while (queue.length > 0) {
+    const filePath = queue.pop()
+    if (!filePath || localModulePaths.has(filePath)) {
+      continue
+    }
+
+    localModulePaths.add(filePath)
+
+    const fileRecord = files[filePath]
+    const source = fileRecord?.contents ?? await fs.readFile(filePath, "utf8")
+    const dependencyPaths = await collectLocalDependencyPaths({
+      source,
+      filePath,
+      fileRecord,
+    })
+
+    for (const dependencyPath of dependencyPaths) {
+      if (!localModulePaths.has(dependencyPath)) {
+        queue.push(dependencyPath)
+      }
+    }
+  }
+
+  return {
+    filesByPath: files,
+    localModulePaths: [...localModulePaths].sort(),
+  }
+}
+
+async function stageModuleGraph({ tempDir, configPath }) {
+  const stagingRoot = path.join(tempDir, stageRootDirName)
+  const { filesByPath, localModulePaths } = await resolveModuleGraph([
+    runnerPath,
+    configPath,
+  ])
+  const stagedPathsByOriginalPath = new Map(
+    localModulePaths.map(filePath => [filePath, getStagedModulePath(stagingRoot, filePath)]),
+  )
+
+  await Promise.all(localModulePaths.map(async filePath => {
+    const stagedPath = stagedPathsByOriginalPath.get(filePath)
+    const fileRecord = filesByPath[filePath]
+    const source = fileRecord?.contents ?? await fs.readFile(filePath, "utf8")
+    const stagedSource = rewriteModuleSpecifiers({
+      source,
+      filePath,
+      currentStagedPath: stagedPath,
+      fileRecord,
+      stagedPathsByOriginalPath,
+    })
+
+    await fs.mkdir(path.dirname(stagedPath), { recursive: true })
+    await fs.writeFile(stagedPath, stagedSource, "utf8")
+  }))
+
+  return {
+    stagedPathsByOriginalPath,
+    stagedModulePaths: [...stagedPathsByOriginalPath.values()].sort(),
+  }
+}
+
+async function createConfiguredEntrypoint(configPath) {
+  const tempDir = await fs.mkdtemp(path.join(rootDir, ".rhedyn-build-"))
+  const { stagedPathsByOriginalPath, stagedModulePaths } = await stageModuleGraph({
+    tempDir,
+    configPath,
+  })
+  const stagedConfigPath = stagedPathsByOriginalPath.get(path.resolve(configPath))
+  const stagedRunnerPath = stagedPathsByOriginalPath.get(runnerPath)
+
+  if (!stagedConfigPath || !stagedRunnerPath) {
+    throw new Error("Failed to stage the tool entrypoint module graph.")
+  }
+
+  const entryPath = path.join(tempDir, "entry.cjs")
+  const entrySource = createConfiguredEntrypointSource({
+    entryDir: tempDir,
+    stagedConfigPath,
+    stagedRunnerPath,
+  })
+
+  await fs.writeFile(entryPath, entrySource, "utf8")
+
+  return {
+    entryPath,
+    resourcePaths: stagedModulePaths,
+    cleanup: async () => {
+      await fs.rm(tempDir, { force: true, recursive: true })
+    },
+  }
+}
+
+async function resolveEntrypoint(configPath) {
+  if (!configPath) {
+    return {
+      entryPath: defaultInputPath,
+      resourcePaths: [],
+      cleanup: async () => {},
+    }
+  }
+
+  if (!await fileExists(configPath)) {
+    throw new Error(`Config file not found: ${configPath}`)
+  }
+
+  return createConfiguredEntrypoint(configPath)
+}
+
+function getNexeCommand() {
+  return process.platform === "win32" ? "nexe.cmd" : "nexe"
+}
+
+function resolveOutputPath(outputName) {
+  return outputName ? path.join(rootDir, "dist", outputName) : defaultOutputPath
+}
+
+export function buildNexeArgs(entryPath, forwardedArgs, outputName = null) {
+  return buildNexeArgsWithResources(entryPath, forwardedArgs, [], outputName)
+}
+
+function buildNexeArgsWithResources(entryPath, forwardedArgs, resourcePaths, outputName) {
+  const args = ["-i", entryPath]
+
+  if (!hasOption(forwardedArgs, ["-o", "--output"])) {
+    args.push("-o", resolveOutputPath(outputName))
+  }
+
+  if (!hasOption(forwardedArgs, ["--temp"])) {
+    args.push("--temp", defaultTempPath)
+  }
+
+  if (!hasOption(forwardedArgs, ["--python"])) {
+    args.push("--python", "python3")
+  }
+
+  if (!hasOption(forwardedArgs, ["--patch"])) {
+    args.push("--patch", patchPath)
+  }
+
+  if (!hasOption(forwardedArgs, ["--plugin"])) {
+    args.push("--plugin", pluginPath)
+  }
+
+  if (!hasOption(forwardedArgs, ["--build"])) {
+    args.push("--build")
+  }
+
+  for (const resourcePath of resourcePaths) {
+    args.push("--resource", resourcePath)
+  }
+
+  return [...args, ...forwardedArgs]
+}
+
+async function runNexe(args) {
+  await fs.mkdir(path.dirname(defaultOutputPath), { recursive: true })
+
+  await new Promise((resolve, reject) => {
+    const child = spawn(getNexeCommand(), args, {
+      cwd: rootDir,
+      env: process.env,
+      stdio: "inherit",
+    })
+
+    child.on("error", reject)
+    child.on("close", code => {
+      if (code === 0) {
+        resolve()
+        return
+      }
+      reject(new Error(`nexe exited with code ${code}`))
+    })
+  })
+}
+
+async function invalidatePatchedBinaryCache(args) {
+  const tempArg = getOptionValue(args, ["--temp"])
+  const tempDir = tempArg
+    ? path.resolve(rootDir, tempArg)
+    : defaultTempPath
+
+  if (!await fileExists(tempDir)) {
+    return
+  }
+
+  const tempFiles = await readDirectoryRecursively(tempDir)
+  const compiledNodeBinaries = tempFiles.filter(filePath =>
+    filePath.endsWith(`${path.sep}out${path.sep}Release${path.sep}node`) ||
+    filePath.endsWith(`${path.sep}Release${path.sep}node.exe`))
+
+  await Promise.all(compiledNodeBinaries.map(filePath =>
+    fs.rm(filePath, { force: true })))
+}
+
+export async function buildAsTool(argv = process.argv.slice(2)) {
+  const { configPath, forwardedArgs, outputName } = parseBuildArgs(argv)
+  const { entryPath, resourcePaths, cleanup } = await resolveEntrypoint(configPath)
+
+  try {
+    const nexeArgs = buildNexeArgsWithResources(entryPath, forwardedArgs, resourcePaths, outputName)
+    await invalidatePatchedBinaryCache(nexeArgs)
+    await runNexe(nexeArgs)
+  } finally {
+    await cleanup()
+  }
+}
+
+if (path.resolve(process.argv[1] || "") === modulePath) {
+  await buildAsTool().catch(err => {
+    console.error(err.message)
+    process.exit(1)
+  })
+}

+ 27 - 0
src/build/nexe-entrypoint-compat.cjs

@@ -0,0 +1,27 @@
+const path = require("path")
+
+exports.default = async function nexeEntrypointCompat(compiler, next) {
+  const entryPath = path.resolve(
+    compiler.options.cwd,
+    compiler.entrypoint || compiler.options.input,
+  )
+  const originalSnippet =
+    `const entry = path.resolve(path.dirname(process.execPath),${JSON.stringify(compiler.entrypoint)})`
+  const replacementSnippet = `const entry = ${JSON.stringify(entryPath)}`
+
+  let replaced = false
+  compiler.shims = compiler.shims.map(shim => {
+    if (!shim.includes(originalSnippet)) {
+      return shim
+    }
+
+    replaced = true
+    return shim.replace(originalSnippet, replacementSnippet)
+  })
+
+  if (!replaced) {
+    throw new Error("Failed to patch nexe entrypoint shim.")
+  }
+
+  return next()
+}

+ 295 - 10
src/build/nexe-python-compat.cjs

@@ -1,14 +1,297 @@
+const path = require("path")
+
+function getNexeBootstrapSource(compiler) {
+  const entryPath = path.resolve(
+    compiler.options.cwd,
+    compiler.entrypoint || compiler.options.input,
+  )
+  const absoluteResources = Object.entries(compiler.binaryConfiguration.resources).reduce(
+    (resources, [resourcePath, value]) => {
+      if (!path.isAbsolute(resourcePath)) {
+        resources[path.resolve(compiler.options.cwd, resourcePath)] = value
+      }
+      return resources
+    },
+    {},
+  )
+  const binaryConfiguration = {
+    ...compiler.binaryConfiguration,
+    resources: {
+      ...compiler.binaryConfiguration.resources,
+      ...absoluteResources,
+    },
+  }
+  const originalBinaryConfigurationSnippet =
+    `process.__nexe = ${JSON.stringify(compiler.binaryConfiguration)};`
+  const binaryConfigurationSnippet =
+    `process.__nexe = ${JSON.stringify(binaryConfiguration)};`
+  const originalSnippet =
+    `const entry = path.resolve(path.dirname(process.execPath),${JSON.stringify(compiler.entrypoint)})`
+  const replacementSnippet = `const entry = ${JSON.stringify(entryPath)}`
+  const runtimeShims = compiler.shims
+    .map(shim => shim
+      .replace(originalBinaryConfigurationSnippet, binaryConfigurationSnippet)
+      .replace(originalSnippet, replacementSnippet))
+    .join("\n")
+    .replaceAll(
+      "var filepath = args[0];",
+      "var filepath = typeof args[0] === 'string' ? args[0] : args[1];",
+    )
+    .replace(
+      "Object.assign(fs, nfs);",
+      [
+        "Object.assign(fs, nfs);",
+        "    var toManifestPath = function (filepath) {",
+        "        if (filepath && typeof filepath === 'object' && filepath.protocol === 'file:' && typeof filepath.href === 'string') {",
+        "            try {",
+        "                return require('url').fileURLToPath(filepath);",
+        "            } catch (_) {",
+        "                return filepath;",
+        "            }",
+        "        }",
+        "        return filepath;",
+        "    };",
+        "    if (fs.promises) {",
+        "        var promises = fs.promises;",
+        "        var originalPromisesReadFile = typeof promises.readFile === 'function' ? promises.readFile.bind(promises) : null;",
+        "        var originalPromisesStat = typeof promises.stat === 'function' ? promises.stat.bind(promises) : null;",
+        "        var originalPromisesLstat = typeof promises.lstat === 'function' ? promises.lstat.bind(promises) : null;",
+        "        var originalPromisesRealpath = typeof promises.realpath === 'function' ? promises.realpath.bind(promises) : null;",
+        "        if (originalPromisesReadFile) {",
+        "            promises.readFile = function (filepath, options) {",
+        "                var manifestPath = toManifestPath(filepath);",
+        "                setupManifest();",
+        "                var entry = manifest[getKey(manifestPath)];",
+        "                if (!entry) {",
+        "                    return originalPromisesReadFile(filepath, options);",
+        "                }",
+        "                return Promise.resolve(nfs.readFileSync(manifestPath, options));",
+        "            };",
+        "        }",
+        "        if (originalPromisesStat) {",
+        "            promises.stat = function (filepath) {",
+        "                var manifestPath = toManifestPath(filepath);",
+        "                var stat = ownStat(manifestPath);",
+        "                if (stat) {",
+        "                    return Promise.resolve(stat);",
+        "                }",
+        "                return originalPromisesStat(filepath);",
+        "            };",
+        "        }",
+        "        if (originalPromisesLstat) {",
+        "            promises.lstat = function (filepath) {",
+        "                var manifestPath = toManifestPath(filepath);",
+        "                var stat = ownStat(manifestPath);",
+        "                if (stat) {",
+        "                    return Promise.resolve(stat);",
+        "                }",
+        "                return originalPromisesLstat(filepath);",
+        "            };",
+        "        }",
+        "        if (originalPromisesRealpath) {",
+        "            promises.realpath = function (filepath, options) {",
+        "                var manifestPath = toManifestPath(filepath);",
+        "                setupManifest();",
+        "                var key = getKey(manifestPath);",
+        "                if (manifest[key]) {",
+        "                    return Promise.resolve(manifestPath);",
+        "                }",
+        "                return originalPromisesRealpath(filepath, options);",
+        "            };",
+        "        }",
+        "    }",
+        "    var internalPromises = null;",
+        "    try {",
+        "        internalPromises = require('internal/fs/promises');",
+        "    } catch (_) {}",
+        "    if (internalPromises) {",
+        "        var internalPromisesExports = internalPromises.exports || internalPromises;",
+        "        var originalInternalReadFile = typeof internalPromisesExports.readFile === 'function' ? internalPromisesExports.readFile.bind(internalPromisesExports) : null;",
+        "        var originalInternalStat = typeof internalPromisesExports.stat === 'function' ? internalPromisesExports.stat.bind(internalPromisesExports) : null;",
+        "        var originalInternalLstat = typeof internalPromisesExports.lstat === 'function' ? internalPromisesExports.lstat.bind(internalPromisesExports) : null;",
+        "        var originalInternalRealpath = typeof internalPromisesExports.realpath === 'function' ? internalPromisesExports.realpath.bind(internalPromisesExports) : null;",
+        "        if (originalInternalReadFile) {",
+        "            internalPromisesExports.readFile = function (filepath, options) {",
+        "                var manifestPath = toManifestPath(filepath);",
+        "                setupManifest();",
+        "                var entry = manifest[getKey(manifestPath)];",
+        "                if (!entry) {",
+        "                    return originalInternalReadFile(filepath, options);",
+        "                }",
+        "                return Promise.resolve(nfs.readFileSync(manifestPath, options));",
+        "            };",
+        "        }",
+        "        if (originalInternalStat) {",
+        "            internalPromisesExports.stat = function (filepath) {",
+        "                var manifestPath = toManifestPath(filepath);",
+        "                var stat = ownStat(manifestPath);",
+        "                if (stat) {",
+        "                    return Promise.resolve(stat);",
+        "                }",
+        "                return originalInternalStat(filepath);",
+        "            };",
+        "        }",
+        "        if (originalInternalLstat) {",
+        "            internalPromisesExports.lstat = function (filepath) {",
+        "                var manifestPath = toManifestPath(filepath);",
+        "                var stat = ownStat(manifestPath);",
+        "                if (stat) {",
+        "                    return Promise.resolve(stat);",
+        "                }",
+        "                return originalInternalLstat(filepath);",
+        "            };",
+        "        }",
+        "        if (originalInternalRealpath) {",
+        "            internalPromisesExports.realpath = function (filepath, options) {",
+        "                var manifestPath = toManifestPath(filepath);",
+        "                setupManifest();",
+        "                var key = getKey(manifestPath);",
+        "                if (manifest[key]) {",
+        "                    return Promise.resolve(manifestPath);",
+        "                }",
+        "                return originalInternalRealpath(filepath, options);",
+        "            };",
+        "        }",
+        "    }",
+      ].join("\n"),
+    )
+
+  return [
+    "  if (isMainThread) {",
+    "    const binaryPath = process.execPath || process.argv[0]",
+    "    if (!process.nexe) {",
+    "      const patches = {}",
+    "      const slice = [].slice",
+    "      const noopPatch = function(original) {",
+    "        const args = slice.call(arguments, 1)",
+    "        return original.apply(this, args)",
+    "      }",
+    "      const patch = function(obj, method, patchImpl) {",
+    "        const original = obj[method]",
+    "        if (!original) {",
+    "          return",
+    "        }",
+    "        patches[method] = patchImpl",
+    "        obj[method] = function() {",
+    "          const args = [original].concat(slice.call(arguments))",
+    "          return patches[method].apply(this, args)",
+    "        }",
+    "      }",
+    "      process.nexe = { patches }",
+    "      patch(process.binding('fs'), 'internalModuleReadFile', noopPatch)",
+    "      patch(process.binding('fs'), 'internalModuleReadJSON', noopPatch)",
+    "      patch(process.binding('fs'), 'internalModuleStat', noopPatch)",
+    "    }",
+    "    const fs = require('fs')",
+    "    const fd = fs.openSync(binaryPath, 'r')",
+    "    const stat = fs.statSync(binaryPath)",
+    "    const tailSize = Math.min(stat.size, 16000)",
+    "    const tailWindow = Buffer.alloc(tailSize)",
+    "    fs.readSync(fd, tailWindow, 0, tailSize, stat.size - tailSize)",
+    "    fs.closeSync(fd)",
+    "    const footerPosition = tailWindow.indexOf('<nexe~~sentinel>')",
+    "    if (footerPosition === -1) {",
+    "      throw new Error('Invalid Nexe binary')",
+    "    }",
+    "    const footer = tailWindow.slice(footerPosition, footerPosition + 32)",
+    "    const contentSize = footer.readDoubleLE(16)",
+    "    const resourceSize = footer.readDoubleLE(24)",
+    "    const contentStart = stat.size - tailSize + footerPosition - resourceSize - contentSize",
+    "    const resourceStart = contentStart + contentSize",
+    "    Object.defineProperty(process, '__nexe', (() => {",
+    "      let nexeHeader = null",
+    "      return {",
+    "        get() {",
+    "          return nexeHeader",
+    "        },",
+    "        set(value) {",
+    "          if (nexeHeader) {",
+    "            throw new Error('This property is readonly')",
+    "          }",
+    "          nexeHeader = Object.assign({}, value, {",
+    "            blobPath: binaryPath,",
+    "            layout: {",
+    "              stat,",
+    "              contentSize,",
+    "              contentStart,",
+    "              resourceSize,",
+    "              resourceStart,",
+    "            },",
+    "          })",
+    "          Object.freeze(nexeHeader)",
+    "        },",
+    "        enumerable: false,",
+    "        configurable: false,",
+    "      }",
+    "    })())",
+    ...runtimeShims.split("\n").map(line => `    ${line}`),
+    "  }",
+  ].join("\n")
+}
+
+async function patchCurrentNodeStartup(compiler) {
+  const fileName = "./lib/internal/process/pre_execution.js"
+  const file = await compiler.readFileAsync(fileName)
+  const bootstrapSource = getNexeBootstrapSource(compiler)
+  const hook = "  if (initializeModules) {\n"
+  const returnHook = "  return mainEntry;\n"
+  const nodeCcFileName = "./src/node.cc"
+  const nodeCcFile = await compiler.readFileAsync(nodeCcFileName)
+  const argvHook = [
+    "  std::string first_argv;",
+    "  if (env->argv().size() > 1) {",
+    "    first_argv = env->argv()[1];",
+    "  }",
+  ].join("\n")
+  const argvReplacement = [
+    "  std::string first_argv;",
+    "  if (env->argv().size() > 1) {",
+    "    first_argv = env->argv()[1];",
+    "  } else {",
+    "    return StartExecution(env, \"internal/main/run_main_module\");",
+    "  }",
+  ].join("\n")
+
+  if (
+    file.contents.includes("process.__nexe =")
+  ) {
+    if (!nodeCcFile.contents.includes(argvReplacement)) {
+      if (!nodeCcFile.contents.includes(argvHook)) {
+        throw new Error(`Could not find argv hook in ${nodeCcFileName}`)
+      }
+      nodeCcFile.contents = nodeCcFile.contents.replace(argvHook, argvReplacement)
+    }
+    return
+  }
+
+  file.contents = file.contents.replace(
+    hook,
+    `${bootstrapSource}\n${hook}`,
+  )
+  file.contents = file.contents.replace(
+    returnHook,
+    "  return mainEntry || process.argv[1];\n",
+  )
+
+  if (!nodeCcFile.contents.includes(argvReplacement)) {
+    if (!nodeCcFile.contents.includes(argvHook)) {
+      throw new Error(`Could not find argv hook in ${nodeCcFileName}`)
+    }
+    nodeCcFile.contents = nodeCcFile.contents.replace(argvHook, argvReplacement)
+  }
+}
+
 exports.default = async function nexePythonCompat(compiler, next) {
-  const fileName = "./tools/configure.d/nodedownload.py";
-  const file = await compiler.readFileAsync(fileName);
+  const fileName = "./tools/configure.d/nodedownload.py"
+  const file = await compiler.readFileAsync(fileName)
 
   if (!file.contents.includes("FancyURLopener")) {
-    return next();
+    return next()
   }
 
   const updatedContents = file.contents
     .replace(
-      /try:\n    from urllib\.request import FancyURLopener, URLopener\nexcept ImportError:\n    from urllib import FancyURLopener, URLopener/,
+      /try:\n {4}from urllib\.request import FancyURLopener, URLopener\nexcept ImportError:\n {4}from urllib import FancyURLopener, URLopener/,
       [
         "try:",
         "    from urllib.request import urlretrieve",
@@ -17,19 +300,21 @@ exports.default = async function nexePythonCompat(compiler, next) {
       ].join("\n"),
     )
     .replace(
-      /class ConfigOpener\(FancyURLopener\):\n    """fancy opener used by retrievefile\. Set a UA"""\n    # append to existing version \(UA\)\n    version = '%s node\.js\/configure' % URLopener\.version/,
+      /class ConfigOpener\(FancyURLopener\):\n {4}"""fancy opener used by retrievefile\. Set a UA"""\n {4}# append to existing version \(UA\)\n {4}version = '%s node\.js\/configure' % URLopener\.version/,
       [
         "class ConfigOpener(object):",
-        '    """compat opener used by retrievefile."""',
+        "    \"\"\"compat opener used by retrievefile.\"\"\"",
         "",
         "    def retrieve(self, url, targetfile, reporthook=None):",
         "        return urlretrieve(url, targetfile, reporthook=reporthook)",
       ].join("\n"),
-    );
+    )
 
   if (updatedContents !== file.contents) {
-    await compiler.setFileContentsAsync(fileName, updatedContents);
+    await compiler.setFileContentsAsync(fileName, updatedContents)
   }
 
-  return next();
-};
+  await patchCurrentNodeStartup(compiler)
+
+  return next()
+}

+ 5 - 68
src/index.js

@@ -1,71 +1,8 @@
 #!/usr/bin/env node
 
-import * as defaultConfig from "./defaults.js"
-import { getConfig, getTaskKey, getTaskName, processTask } from "./lib.js"
-import { performance } from "node:perf_hooks"
-import { getLogger } from "./logging.js"
-const startTime = performance.now()
-const { opts, tasks } = await getConfig() || { ...defaultConfig }
-const log = getLogger(opts.logLevel, "main")
-const flatTasks = tasks.flatMap(step => (Array.isArray(step) ? step : [step]))
-const duplicateTaskKeys = []
-const seenTaskKeys = new Set()
-for (const task of flatTasks) {
-  const taskKey = getTaskKey(task)
-  if (!taskKey) {
-    throw new Error("Each task must define `key` (or legacy `name`).")
-  }
-  if (seenTaskKeys.has(taskKey)) {
-    duplicateTaskKeys.push(taskKey)
-  } else {
-    seenTaskKeys.add(taskKey)
-  }
-}
-if (duplicateTaskKeys.length > 0) {
-  const uniqueDuplicates = [...new Set(duplicateTaskKeys)]
-  throw new Error(`Duplicate task keys found: ${uniqueDuplicates.join(", ")}`)
-}
-log.info(`Processing ${tasks.length} steps`)
-log.debug(`Running directory: ${opts.runDir}`)
-log.debug(`Output directory: ${opts.outDir}`)
-if (opts.cacheDir) {
-  log.debug(`Cache directory: ${opts.cacheDir}`)
-} else {
-  log.warn("Cache disabled")
-}
+import defaultConfig from "./defaults.js"
+import { getConfig } from "./lib.js"
+import { runWithConfig } from "./run.js"
 
-const taskRunner = tasks.reduce(
-  async (metaPromise, step) => {
-    const tasks = Array.isArray(step) ? step : [step]
-    const { meta, filesWritten } = await metaPromise
-    const stepTasks = tasks.map(task => getTaskName(task))
-    log.info(`Starting tasks: ${stepTasks.join(", ")}`)
-    const stepResults = await Promise.all(tasks.map(async task => {
-      const log = getLogger(opts.logLevel, getTaskName(task))
-      const taskResult = await processTask(meta, task)
-      log.trace(`taskResult: ${JSON.stringify(taskResult)}`)
-      return taskResult
-    }))
-    const newState = stepResults.reduce((newState, taskResult) => {
-      const resources = Object.keys(taskResult.resources).length > 0 ? {
-        ...newState.meta.resources,
-        [taskResult.key]: taskResult.resources,
-      } : { ...newState.meta.resources }
-      return {
-        meta: {
-          ...newState.meta,
-          resources,
-        },
-        filesWritten: newState.filesWritten + taskResult.filesWritten,
-      }
-    }, { meta, filesWritten })
-    return newState
-  },
-  Promise.resolve({ meta: { opts }, filesWritten: 0 }),
-)
-const finalState = await taskRunner
-log.trace(`Final state: ${JSON.stringify(finalState, null, 2)}`)
-const endTime = performance.now()
-const timeTaken = (endTime - startTime)
-const hrTime = timeTaken > 1000 ? `${Number.parseFloat(timeTaken / 1000).toFixed(2)}s` : `${Number.parseFloat(timeTaken).toFixed(2)}ms`
-log.info(`Completed ${tasks.length} steps in ${hrTime}, wrote ${finalState.filesWritten} files.`)
+const config = await getConfig() || defaultConfig
+await runWithConfig(config)

+ 95 - 0
src/run.js

@@ -0,0 +1,95 @@
+import { performance } from "node:perf_hooks"
+import { getTaskKey, getTaskName, processTask } from "./lib.js"
+import { getLogger } from "./logging.js"
+
+export async function runWithConfig(config) {
+  if (!config || typeof config !== "object") {
+    throw new Error("Rhedyn config must export an object with `opts` and `tasks`.")
+  }
+
+  const startTime = performance.now()
+  const { opts, tasks } = config
+
+  if (!opts || !tasks) {
+    throw new Error("Rhedyn config must define both `opts` and `tasks`.")
+  }
+
+  const log = getLogger(opts.logLevel, "main")
+  const flatTasks = tasks.flatMap(step => (Array.isArray(step) ? step : [step]))
+  const duplicateTaskKeys = []
+  const seenTaskKeys = new Set()
+
+  for (const task of flatTasks) {
+    const taskKey = getTaskKey(task)
+    if (!taskKey) {
+      throw new Error("Each task must define `key` (or legacy `name`).")
+    }
+    if (seenTaskKeys.has(taskKey)) {
+      duplicateTaskKeys.push(taskKey)
+    } else {
+      seenTaskKeys.add(taskKey)
+    }
+  }
+
+  if (duplicateTaskKeys.length > 0) {
+    const uniqueDuplicates = [...new Set(duplicateTaskKeys)]
+    throw new Error(`Duplicate task keys found: ${uniqueDuplicates.join(", ")}`)
+  }
+
+  log.info(`Processing ${tasks.length} steps`)
+  log.debug(`Running directory: ${opts.runDir}`)
+  log.debug(`Output directory: ${opts.outDir}`)
+
+  if (opts.cacheDir) {
+    log.debug(`Cache directory: ${opts.cacheDir}`)
+  } else {
+    log.warn("Cache disabled")
+  }
+
+  const taskRunner = tasks.reduce(
+    async (metaPromise, step) => {
+      const stepTasks = Array.isArray(step) ? step : [step]
+      const { meta, filesWritten } = await metaPromise
+      const stepTaskNames = stepTasks.map(task => getTaskName(task))
+      log.info(`Starting tasks: ${stepTaskNames.join(", ")}`)
+      const stepResults = await Promise.all(stepTasks.map(async task => {
+        const taskLog = getLogger(opts.logLevel, getTaskName(task))
+        const taskResult = await processTask(meta, task)
+        taskLog.trace(`taskResult: ${JSON.stringify(taskResult)}`)
+        return taskResult
+      }))
+      return stepResults.reduce((newState, taskResult) => {
+        const resources = Object.keys(taskResult.resources).length > 0
+          ? {
+            ...newState.meta.resources,
+            [taskResult.key]: taskResult.resources,
+          }
+          : { ...newState.meta.resources }
+
+        return {
+          meta: {
+            ...newState.meta,
+            resources,
+          },
+          filesWritten: newState.filesWritten + taskResult.filesWritten,
+        }
+      }, { meta, filesWritten })
+    },
+    Promise.resolve({ meta: { opts }, filesWritten: 0 }),
+  )
+
+  const finalState = await taskRunner
+  log.trace(`Final state: ${JSON.stringify(finalState, null, 2)}`)
+
+  const endTime = performance.now()
+  const timeTaken = endTime - startTime
+  const hrTime = timeTaken > 1000
+    ? `${Number.parseFloat(timeTaken / 1000).toFixed(2)}s`
+    : `${Number.parseFloat(timeTaken).toFixed(2)}ms`
+
+  log.info(
+    `Completed ${tasks.length} steps in ${hrTime}, wrote ${finalState.filesWritten} files.`,
+  )
+
+  return finalState
+}

+ 186 - 0
test/buildAsTool.test.js

@@ -0,0 +1,186 @@
+import assert from "node:assert/strict"
+import path from "node:path"
+import test from "node:test"
+import {
+  buildNexeArgs,
+  createConfiguredEntrypointSource,
+  getStagedModuleRelativePath,
+  parseBuildArgs,
+  rewriteModuleSpecifiers,
+  toImportSpecifier,
+} from "../src/build/build-as-tool.js"
+
+test("parseBuildArgs extracts config and preserves nexe args", () => {
+  const cwd = "/tmp/rhedyn-project"
+  const { configPath, forwardedArgs, outputName } = parseBuildArgs(
+    [
+      "--config",
+      "./site/rhedyn.config.js",
+      "--target",
+      "linux-x64-20.0.0",
+      "-o",
+      "dist/custom",
+    ],
+    cwd,
+  )
+
+  assert.equal(configPath, path.join(cwd, "site/rhedyn.config.js"))
+  assert.equal(outputName, null)
+  assert.deepEqual(forwardedArgs, [
+    "--target",
+    "linux-x64-20.0.0",
+    "-o",
+    "dist/custom",
+  ])
+})
+
+test("parseBuildArgs supports inline --config syntax", () => {
+  const cwd = "/tmp/rhedyn-project"
+  const { configPath, forwardedArgs, outputName } = parseBuildArgs(
+    [
+      "--config=./rhedyn.config.js",
+      "--python",
+      "python3.12",
+    ],
+    cwd,
+  )
+
+  assert.equal(configPath, path.join(cwd, "rhedyn.config.js"))
+  assert.equal(outputName, null)
+  assert.deepEqual(forwardedArgs, ["--python", "python3.12"])
+})
+
+test("parseBuildArgs extracts output name without forwarding it to nexe", () => {
+  const { outputName, forwardedArgs } = parseBuildArgs([
+    "--name",
+    "md2doc",
+    "--target",
+    "linux-x64-22.16.0",
+  ])
+
+  assert.equal(outputName, "md2doc")
+  assert.deepEqual(forwardedArgs, [
+    "--target",
+    "linux-x64-22.16.0",
+  ])
+})
+
+test("parseBuildArgs supports inline --name syntax", () => {
+  const { outputName, forwardedArgs } = parseBuildArgs([
+    "--name=md2doc",
+    "--python",
+    "python3.12",
+  ])
+
+  assert.equal(outputName, "md2doc")
+  assert.deepEqual(forwardedArgs, ["--python", "python3.12"])
+})
+
+test("toImportSpecifier prefixes sibling paths", () => {
+  assert.equal(
+    toImportSpecifier("/tmp/build-entry", "/tmp/build-entry/entry.js"),
+    "./entry.js",
+  )
+})
+
+test("createConfiguredEntrypointSource loads config and runner with relative literal imports", () => {
+  const entryDir = "/tmp/build-entry"
+  const stagedConfigPath = "/tmp/build-entry/modules/_root/tmp/project/config/rhedyn.config.mjs"
+  const stagedRunnerPath = "/tmp/build-entry/modules/_root/home/me/rhedyn/src/run.mjs"
+  const source = createConfiguredEntrypointSource({
+    entryDir,
+    stagedConfigPath,
+    stagedRunnerPath,
+  })
+
+  assert.match(source, /const configModule = await import\("\.\/modules\/_root\/tmp\/project\/config\/rhedyn\.config\.mjs"\)/)
+  assert.match(source, /const \{ runWithConfig \} = await import\("\.\/modules\/_root\/home\/me\/rhedyn\/src\/run\.mjs"\)/)
+  assert.match(source, /const config = configModule\.default \|\| configModule/)
+  assert.match(source, /await runWithConfig\(config\)/)
+})
+
+test("getStagedModuleRelativePath mirrors absolute paths under the staging root", () => {
+  assert.equal(
+    getStagedModuleRelativePath("/tmp/project/config/rhedyn.config.js"),
+    path.join("_root", "tmp/project/config/rhedyn.config.mjs"),
+  )
+})
+
+test("rewriteModuleSpecifiers retargets staged local imports and exports", () => {
+  const source = [
+    "import { smokeAction } from \"./actions/smoke.js\"",
+    "export { helper } from \"../shared/helper.js\"",
+    "const extra = import(\"./dynamic.js\")",
+  ].join("\n")
+  const filePath = "/tmp/project/config/rhedyn.config.js"
+  const currentStagedPath = "/tmp/build/modules/_root/tmp/project/config/rhedyn.config.mjs"
+  const stagedPathsByOriginalPath = new Map([
+    [
+      "/tmp/project/config/actions/smoke.js",
+      "/tmp/build/modules/_root/tmp/project/config/actions/smoke.mjs",
+    ],
+    [
+      "/tmp/project/shared/helper.js",
+      "/tmp/build/modules/_root/tmp/project/shared/helper.mjs",
+    ],
+    [
+      "/tmp/project/config/dynamic.js",
+      "/tmp/build/modules/_root/tmp/project/config/dynamic.mjs",
+    ],
+  ])
+  const rewritten = rewriteModuleSpecifiers({
+    source,
+    filePath,
+    currentStagedPath,
+    fileRecord: {
+      deps: {
+        "./actions/smoke.js": { absPath: "/tmp/project/config/actions/smoke.js" },
+        "../shared/helper.js": { absPath: "/tmp/project/shared/helper.js" },
+        "./dynamic.js": { absPath: "/tmp/project/config/dynamic.js" },
+      },
+    },
+    stagedPathsByOriginalPath,
+  })
+
+  assert.match(rewritten, /"\.\/actions\/smoke\.mjs"/)
+  assert.match(rewritten, /"\.\.\/shared\/helper\.mjs"/)
+  assert.match(rewritten, /"\.\/dynamic\.mjs"/)
+})
+
+test("rewriteModuleSpecifiers falls back to local path resolution when dependency metadata is absent", () => {
+  const source = "export * from \"./file-system.js\"\n"
+  const rewritten = rewriteModuleSpecifiers({
+    source,
+    filePath: "/tmp/project/util/index.js",
+    currentStagedPath: "/tmp/build/modules/_root/tmp/project/util/index.mjs",
+    fileRecord: { deps: {} },
+    stagedPathsByOriginalPath: new Map([
+      [
+        "/tmp/project/util/file-system.js",
+        "/tmp/build/modules/_root/tmp/project/util/file-system.mjs",
+      ],
+    ]),
+  })
+
+  assert.match(rewritten, /"\.\/file-system\.mjs"/)
+})
+
+test("buildNexeArgs injects the entrypoint plugin by default", () => {
+  const args = buildNexeArgs("/tmp/build-entry/entry.js", [])
+
+  assert.match(args.join(" "), /--plugin .*nexe-entrypoint-compat\.cjs/)
+})
+
+test("buildNexeArgs uses --name for the default output path", () => {
+  const args = buildNexeArgs("/tmp/build-entry/entry.js", [], "md2doc")
+  const outputIndex = args.indexOf("-o")
+
+  assert.equal(args[outputIndex + 1], path.join("/home/leakypixel/code/rhedyn", "dist/md2doc"))
+})
+
+test("buildNexeArgs ignores --name when an explicit output path is provided", () => {
+  const args = buildNexeArgs("/tmp/build-entry/entry.js", ["-o", "dist/custom"], "md2doc")
+  const outputIndex = args.indexOf("-o")
+
+  assert.equal(args[outputIndex + 1], "dist/custom")
+})