浏览代码

Add config override switch

Craig Fletcher 2 天之前
父节点
当前提交
4f24704551
共有 4 个文件被更改,包括 322 次插入264 次删除
  1. 5 1
      README.md
  2. 149 93
      src/lib.js
  3. 153 166
      src/processors.js
  4. 15 4
      src/util/file-system.js

+ 5 - 1
README.md

@@ -6,7 +6,8 @@ Run this script from a directory containing markdown, templates and styles and a
 containing the rendered-out static assets ready for deployment.
 
 While the defaults should work for most (of my) use cases, you can configure behaviour using a `rhedyn.config.js` in the
-directory where you run the tool.
+directory where you run the tool, or pass a path to a config using `-c /path/to/config.js` or `--config
+path/to/config.js`.
 
 "Rhedyn" is Welsh for "fern", and is pronounced a bit like "read in". 
 
@@ -54,6 +55,9 @@ considerably larger cache files.
 
 ## Configuration
 
+By default, rhedyn will look for `rhedyn.config.js` in the current directory and fall back to the default if not found.
+You can override this behaviour using the `-c /path/to/config.js` or `--config path/to/config.js` CLI switches.
+
 A working example of how to configure can be found in `src/defaults.js`, with the processors separated out to
 `src/processors.js` for tidiness. 
 

+ 149 - 93
src/lib.js

@@ -1,7 +1,4 @@
-import {
-  checkCache,
-  updateCache,
-} from "./cache.js"
+import { checkCache, updateCache } from "./cache.js";
 import {
   createTrackedObject,
   fileExists,
@@ -10,96 +7,121 @@ import {
   removeCwd,
   replaceFileExtension,
   getValueAtPath,
-} from "./util/index.js"
-import path from "path"
-import process from "node:process"
-import { getLogger } from "./logging.js"
+  expandTilde
+} from "./util/index.js";
+import path from "path";
+import process from "node:process";
+import { getLogger } from "./logging.js";
 
 export async function getConfig() {
-  const configPath = path.join(process.cwd(), "rhedyn.config.js")
-  const configFileExists = await fileExists(configPath)
+  const args = process.argv.slice(2);
+  const defaultPath = path.join(process.cwd(), "rhedyn.config.js");
+
+  const findConfigPath = (args, index = 0) => {
+    if (index >= args.length) {
+      return defaultPath;
+    }
+    if (
+      (args[index] === "-c" || args[index] === "--config") &&
+      index + 1 < args.length
+    ) {
+      return path.resolve(args[index + 1]);
+    }
+    return findConfigPath(args, index + 1);
+  };
+
+  const configPath = findConfigPath(args);
+  const configFileExists = await fileExists(configPath);
+
   if (configFileExists) {
     try {
-      const config = await import(configPath)
-      return config.default || config
+      const config = await import(configPath);
+      return config.default || config;
     } catch (err) {
-      console.error("Error reading rhedyn.config.js:", err)
-      throw new Error("Failed reading config file")
+      console.error(`Error reading config file at ${configPath}:`, err);
+      throw new Error("Failed reading config file");
     }
   } else {
-    return
+    return;
   }
 }
+
 async function runTask({ meta, config, jobId }) {
-  const log = getLogger(config.logLevel ? config.logLevel : meta.opts.logLevel, jobId)
-  log.trace(`meta: ${JSON.stringify(meta, null, 2)}`)
-  log.trace(`config: ${JSON.stringify(config, null, 2)}`)
+  const log = getLogger(
+    config.logLevel ? config.logLevel : meta.opts.logLevel,
+    jobId
+  );
+  log.trace(`meta: ${JSON.stringify(meta, null, 2)}`);
+  log.trace(`config: ${JSON.stringify(config, null, 2)}`);
 
   const stateObject = {
     meta,
-    config,
-  }
-  const cache = (meta.opts.cacheDir && !config.skipCache) ?
-    await checkCache(
-      jobId,
-      stateObject,
-      meta.opts,
-    )
-    :
-    { disabled: true, reason: "Cache disabled" }
+    config
+  };
+  const cache =
+    meta.opts.cacheDir && !config.skipCache
+      ? await checkCache(jobId, stateObject, meta.opts)
+      : { disabled: true, reason: "Cache disabled" };
 
   if (cache && cache.hit) {
-    log.debug(`Loaded cache for ${jobId}: ${cache.filePath}`)
-    return { ...cache.taskResult, fromCache: true }
+    log.debug(`Loaded cache for ${jobId}: ${cache.filePath}`);
+    return { ...cache.taskResult, fromCache: true };
   }
-  log.debug(`Cache miss for ${jobId} (${cache.reason})`)
+  log.debug(`Cache miss for ${jobId} (${cache.reason})`);
 
   const state = meta.opts.cacheDir
     ? createTrackedObject(stateObject)
-    : { proxy: stateObject }
+    : { proxy: stateObject };
 
   const {
     detail = {},
     paths = [],
     deps: processorDeps,
-    ref,
-  } = await config.processor(state.proxy)
+    ref
+  } = await config.processor(state.proxy);
 
   const taskResult = {
     detail,
-    paths: paths.map(fileOutputPath => fileOutputPath.replace(meta.opts.outDir, "")),
-    ref,
-  }
-  log.debug(`Wrote ${taskResult.paths.length} files for ${jobId}`)
+    paths: paths.map(fileOutputPath =>
+      fileOutputPath.replace(meta.opts.outDir, "")
+    ),
+    ref
+  };
+  log.debug(`Wrote ${taskResult.paths.length} files for ${jobId}`);
   if (cache && !cache.disabled) {
-    log.debug(`Updating cache for ${jobId}: ${cache.filePath}`)
-    const processorPathDeps = processorDeps?.paths || []
-    const processorStateDeps = processorDeps?.state || []
-    const configPathDeps = config.deps?.paths || []
-    const configStateDeps = config.deps?.state || []
-    const stateSelectors = config.stateSelectors || []
+    log.debug(`Updating cache for ${jobId}: ${cache.filePath}`);
+    const processorPathDeps = processorDeps?.paths || [];
+    const processorStateDeps = processorDeps?.state || [];
+    const configPathDeps = config.deps?.paths || [];
+    const configStateDeps = config.deps?.state || [];
+    const stateSelectors = config.stateSelectors || [];
     await updateCache(
       meta.opts.cacheDir,
       jobId,
-      removeCwd([
-        ...configPathDeps, ...processorPathDeps, config?.filePath,
-      ].filter(item => !!item)),
+      removeCwd(
+        [...configPathDeps, ...processorPathDeps, config?.filePath].filter(
+          item => !!item
+        )
+      ),
       [
-        ...configStateDeps, ...stateSelectors, ...processorStateDeps, ...(state?.accessed || []),
+        ...configStateDeps,
+        ...stateSelectors,
+        ...processorStateDeps,
+        ...(state?.accessed || [])
       ].filter(item => !!item),
       taskResult,
       cache.updates,
-      meta.opts.includeStateValues,
-    )
+      meta.opts.includeStateValues
+    );
   }
 
-  return taskResult
+  return taskResult;
 }
 
 async function expandFileTask(patternsToInclude, config, meta) {
-  const filesToProcess = await readFilesByGlob(patternsToInclude)
-  const pathsToStrip = config.stripPaths || []
-  const outputDir = config.outputDir || ""
+  const filesToProcess = await readFilesByGlob(patternsToInclude);
+  const pathsToStrip = (config.stripPaths || []).map(path => expandTilde(path));
+  const outputDir = config.outputDir || "";
   return await Promise.all(
     filesToProcess.map(async filePath => {
       const fileOutputPath = path.join(
@@ -107,66 +129,100 @@ async function expandFileTask(patternsToInclude, config, meta) {
         outputDir,
         replaceFileExtension(
           removeBasePaths(pathsToStrip, filePath),
-          config.outputFileExtension,
-        ),
-      )
+          config.outputFileExtension
+        )
+      );
 
-      const fileOutputDir = path.dirname(fileOutputPath)
+      const fileOutputDir = path.dirname(fileOutputPath);
       const jobConfig = {
         ...config,
-        filePath, fileOutputDir, fileOutputPath,
-      }
-      return runTask({ meta, config: jobConfig, jobId: `${config.name} @ ${filePath}` })
-    }),
-  )
+        filePath,
+        fileOutputDir,
+        fileOutputPath
+      };
+      return runTask({
+        meta,
+        config: jobConfig,
+        jobId: `${config.name} @ ${filePath}`
+      });
+    })
+  );
 }
 
 async function expandStateTask(stateToExpand, config, meta) {
-  const stateToProcess = stateToExpand.map(property => {
-    const values = getValueAtPath(meta, property)
-    const expandedValues = Array.isArray(values) ? values : Object.values(values)
-    return expandedValues.map((value, index) => ({ property, index, value }))
-  }).flat()
+  const stateToProcess = stateToExpand
+    .map(property => {
+      const values = getValueAtPath(meta, property);
+      const expandedValues = Array.isArray(values)
+        ? values
+        : Object.values(values);
+      return expandedValues.map((value, index) => ({ property, index, value }));
+    })
+    .flat();
 
   return await Promise.all(
-    stateToProcess.map(async (stateJob) => {
+    stateToProcess.map(async stateJob => {
       const jobConfig = {
         ...config,
-        ...stateJob.value.detail,
-      }
-      return runTask({ meta, config: jobConfig, jobId: `${config.name} @ ${stateJob.property}:${stateJob.index}` })
-    }),
-  )
+        ...stateJob.value.detail
+      };
+      return runTask({
+        meta,
+        config: jobConfig,
+        jobId: `${config.name} @ ${stateJob.property}:${stateJob.index}`
+      });
+    })
+  );
 }
 
 export async function expandAndRunTask(meta, config) {
-  const includes = meta.opts?.include?.[config.name] || []
-  const patternsToInclude = [...(config?.inputFiles || []), ...includes]
+  const includes = meta.opts?.include?.[config.name] || [];
+  const patternsToInclude = [...(config?.inputFiles || []), ...includes];
 
   if (patternsToInclude.length) {
-    return expandFileTask(patternsToInclude, config, meta)
+    return expandFileTask(patternsToInclude, config, meta);
   }
 
   if (config.stateSelectors) {
-    return expandStateTask(config.stateSelectors, config, meta)
+    return expandStateTask(config.stateSelectors, config, meta);
   }
 
-  const jobId = config.jobId || config.name
-  const taskResult = await runTask({ meta, config, jobId })
-  return [taskResult]
+  const jobId = config.jobId || config.name;
+  const taskResult = await runTask({ meta, config, jobId });
+  return [taskResult];
 }
 
 export async function processTask(meta, task) {
-  const log = getLogger(meta.opts.logLevel, task.name)
-  const startTime = performance.now()
-  const taskResult = await expandAndRunTask(meta, task)
-  const cached = taskResult.filter(taskResult => taskResult.fromCache)
-  const processed = taskResult.filter(taskResult => !taskResult.fromCache)
-  const resources = taskResult.reduce((obj, tResult) => tResult.ref ? ({ ...obj, [tResult.ref]: tResult }) : obj, {})
-  const endTime = performance.now()
-  const timeTaken = (endTime - startTime)
-  const hrTime = timeTaken > 1000 ? `${Number.parseFloat(timeTaken / 1000).toFixed(2)}s` : `${Number.parseFloat(timeTaken).toFixed(2)}ms`
-  const filesWritten = processed.reduce((acc, cur) => acc + cur.paths.length, 0)
-  log.info(`written: ${filesWritten} | processed: ${processed.length} | from cache: ${cached.length} | ${hrTime}`)
-  return { name: task.name, taskResult, cached, processed, resources, filesWritten }
+  const log = getLogger(meta.opts.logLevel, task.name);
+  const startTime = performance.now();
+  const taskResult = await expandAndRunTask(meta, task);
+  const cached = taskResult.filter(taskResult => taskResult.fromCache);
+  const processed = taskResult.filter(taskResult => !taskResult.fromCache);
+  const resources = taskResult.reduce(
+    (obj, tResult) => (tResult.ref ? { ...obj, [tResult.ref]: tResult } : obj),
+    {}
+  );
+  const endTime = performance.now();
+  const timeTaken = endTime - startTime;
+  const hrTime =
+    timeTaken > 1000
+      ? `${Number.parseFloat(timeTaken / 1000).toFixed(2)}s`
+      : `${Number.parseFloat(timeTaken).toFixed(2)}ms`;
+  const filesWritten = processed.reduce(
+    (acc, cur) => acc + cur.paths.length,
+    0
+  );
+  log.info(
+    `written: ${filesWritten} | processed: ${processed.length} | from cache: ${
+      cached.length
+    } | ${hrTime}`
+  );
+  return {
+    name: task.name,
+    taskResult,
+    cached,
+    processed,
+    resources,
+    filesWritten
+  };
 }

+ 153 - 166
src/processors.js

@@ -1,24 +1,24 @@
-import * as sass from "sass"
+import * as sass from "sass";
 import {
   firstFound,
   generateRandomId,
   getCleanPath,
   getHref,
   slugifyString,
-  writeFile,
-} from "./util/index.js"
-import fs from "fs/promises"
-import handlebars from "handlebars"
-import { marked } from "marked"
-import markedCodePreview from "marked-code-preview"
-import matter from "gray-matter"
-import { optimize } from "svgo"
-import sharp from "sharp"
-import path from "path"
-import { minify } from "html-minifier-terser"
-import favicons from "favicons"
+  writeFile
+} from "./util/index.js";
+import fs from "fs/promises";
+import handlebars from "handlebars";
+import { marked } from "marked";
+import markedCodePreview from "marked-code-preview";
+import matter from "gray-matter";
+import { optimize } from "svgo";
+import sharp from "sharp";
+import path from "path";
+import { minify } from "html-minifier-terser";
+import favicons from "favicons";
 
-const templateCache = new Map()
+const templateCache = new Map();
 
 function createMarkdownRenderer(meta) {
   return marked
@@ -27,71 +27,64 @@ function createMarkdownRenderer(meta) {
     .use({
       renderer: {
         image({ href, title, text }) {
-          const attrs = [`alt="${text}"`]
+          const attrs = [`alt="${text}"`];
 
-          const foundSrcSet = meta.resources.images[slugifyString(href)]
+          const foundSrcSet = meta.resources.images?.[slugifyString(href)];
 
           if (foundSrcSet) {
             const srcSetString = foundSrcSet.detail.srcSet
               .map(src => src.join(" "))
-              .join(", ")
-            const defaultSrc = foundSrcSet.detail.srcSet[0][0]
-            attrs.push(`src="${defaultSrc}"`)
-            attrs.push(`srcset="${srcSetString}"`)
-            attrs.push("sizes=\"(min-width: 800px) 40vw, 100vw\"")
+              .join(", ");
+            const defaultSrc = foundSrcSet.detail.srcSet[0][0];
+            attrs.push(`src="${defaultSrc}"`);
+            attrs.push(`srcset="${srcSetString}"`);
+            attrs.push('sizes="(min-width: 800px) 40vw, 100vw"');
             attrs.push(
-              `style="aspect-ratio: ${foundSrcSet.detail.aspectRatio}"`,
-            )
+              `style="aspect-ratio: ${foundSrcSet.detail.aspectRatio}"`
+            );
           } else {
-            attrs.push(`src="${href}"`)
+            attrs.push(`src="${href}"`);
           }
 
           if (title) {
-            attrs.push(`title="${title}"`)
+            attrs.push(`title="${title}"`);
           }
 
-          return `<img ${attrs.join(" ")} >`
-        },
-      },
-    })
+          return `<img ${attrs.join(" ")} >`;
+        }
+      }
+    });
 }
 async function findTemplatePath(templateDirs, templateName) {
-  const templatePath = await firstFound(
-    templateDirs,
-    `${templateName}.hbs`,
-  )
-  if (!templatePath) throw new Error(`Template not found: ${templateName}`)
-  return templatePath
+  const templatePath = await firstFound(templateDirs, `${templateName}.hbs`);
+  if (!templatePath) throw new Error(`Template not found: ${templateName}`);
+  return templatePath;
 }
 
 async function getTemplate(templatePath) {
   if (!templateCache.has(templatePath)) {
-    const templateContent = await fs.readFile(templatePath, "utf8")
+    const templateContent = await fs.readFile(templatePath, "utf8");
     templateCache.set(templatePath, {
       path: templatePath,
-      renderer: handlebars.compile(templateContent),
-    })
+      renderer: handlebars.compile(templateContent)
+    });
   }
-  return templateCache.get(templatePath)
+  return templateCache.get(templatePath);
 }
 
-export async function renderTemplate({
-  config,
-  meta,
-}) {
-  const templatePath = config.filePath || await findTemplatePath(
-    config.templateDirs,
-    config.template,
-  )
-  const fileOutputPath = config.fileOutputPath
-  const href = getHref(fileOutputPath, meta)
+export async function renderTemplate({ config, meta }) {
+  const templatePath =
+    config.filePath ||
+    (await findTemplatePath(config.templateDirs, config.template));
+  const fileOutputPath = config.fileOutputPath;
+  const href = getHref(fileOutputPath, meta);
 
-  const template = await getTemplate(templatePath)
+  const template = await getTemplate(templatePath);
   const html = template.renderer({
     ...meta,
     href,
-    ...config,
-  })
+    ...config
+  });
   if (config.writeOut) {
     const minifiedHtml = await minify(html, {
       collapseWhitespace: true,
@@ -99,188 +92,182 @@ export async function renderTemplate({
       removeRedundantAttributes: true,
       removeEmptyAttributes: true,
       minifyCSS: true,
-      minifyJS: true,
-    })
+      minifyJS: true
+    });
 
-    await writeFile(fileOutputPath, minifiedHtml)
+    await writeFile(fileOutputPath, minifiedHtml);
     return {
       detail: { html },
       deps: {
-        paths: [template.path],
+        paths: [template.path]
       },
       paths: [fileOutputPath],
-      ref: slugifyString(href),
-    }
+      ref: slugifyString(href)
+    };
   }
 
   return {
     detail: { html },
     deps: {
-      paths: [template.path],
+      paths: [template.path]
     },
-    ref: slugifyString(href),
-  }
+    ref: slugifyString(href)
+  };
 }
-export async function renderMarkdownToHtml({
-  config,
-  meta,
-}) {
-  const filePath = config.filePath
-  const fileOutputPath = config.fileOutputPath
-  const content = await fs.readFile(filePath, "utf8")
-  const { data, content: markdown } = matter(content)
-  const href = getHref(fileOutputPath, meta)
+export async function renderMarkdownToHtml({ config, meta }) {
+  const filePath = config.filePath;
+  const fileOutputPath = config.fileOutputPath;
+  const content = await fs.readFile(filePath, "utf8");
+  const { data, content: markdown } = matter(content);
+  const href = getHref(fileOutputPath, meta);
 
-  const renderer = createMarkdownRenderer(meta)
-  const html = renderer(markdown)
+  const renderer = createMarkdownRenderer(meta);
+  const html = renderer(markdown);
 
   return {
     detail: { ...data, href, content: html, fileOutputPath },
-    ref: slugifyString(filePath),
-  }
+    ref: slugifyString(filePath)
+  };
 }
-export async function renderMarkdownWithTemplate({
-  config,
-  meta,
-}) {
-  const filePath = config.filePath
-  const fileOutputPath = config.fileOutputPath
-  const content = await fs.readFile(filePath, "utf8")
-  const { data, content: markdown } = matter(content)
-  const templateName = data.template || config.defaultTemplate
-  const href = getHref(fileOutputPath, meta)
+export async function renderMarkdownWithTemplate({ config, meta }) {
+  const filePath = config.filePath;
+  const fileOutputPath = config.fileOutputPath;
+  const content = await fs.readFile(filePath, "utf8");
+  const { data, content: markdown } = matter(content);
+  const templateName = data.template || config.defaultTemplate;
+  const href = getHref(fileOutputPath, meta);
 
   if (!templateCache.has(templateName)) {
     const templatePath = await firstFound(
       config.templateDirs,
-      `${templateName}.hbs`,
-    )
-    if (!templatePath) throw new Error(`Template not found: ${templateName}`)
-    const templateContent = await fs.readFile(templatePath, "utf8")
+      `${templateName}.hbs`
+    );
+    if (!templatePath) throw new Error(`Template not found: ${templateName}`);
+    const templateContent = await fs.readFile(templatePath, "utf8");
     templateCache.set(templateName, {
       path: templatePath,
-      renderer: handlebars.compile(templateContent),
-    })
+      renderer: handlebars.compile(templateContent)
+    });
   }
-  const template = templateCache.get(templateName)
-  const renderer = createMarkdownRenderer(meta)
+  const template = templateCache.get(templateName);
+  const renderer = createMarkdownRenderer(meta);
   const html = template.renderer({
     ...data,
     ...meta,
     href,
-    content: renderer(markdown),
-  })
+    content: renderer(markdown)
+  });
   const minifiedHtml = await minify(html, {
     collapseWhitespace: true,
     removeComments: true,
     removeRedundantAttributes: true,
     removeEmptyAttributes: true,
     minifyCSS: true,
-    minifyJS: true,
-  })
+    minifyJS: true
+  });
 
-  await writeFile(fileOutputPath, minifiedHtml)
+  await writeFile(fileOutputPath, minifiedHtml);
 
   return {
     detail: { ...data, href },
     paths: [fileOutputPath],
     deps: {
-      paths: [template.path],
+      paths: [template.path]
     },
-    ref: slugifyString(fileOutputPath),
-  }
+    ref: slugifyString(fileOutputPath)
+  };
 }
 
 export async function compileSass({ config, meta }) {
-  const filePath = config.filePath
-  const fileOutputPath = config.fileOutputPath
-  const result = await sass.compileAsync(filePath, { style: "compressed" })
-  await writeFile(fileOutputPath, result.css)
+  const filePath = config.filePath;
+  const fileOutputPath = config.fileOutputPath;
+  const result = await sass.compileAsync(filePath, { style: "compressed" });
+  await writeFile(fileOutputPath, result.css);
   return {
     paths: [fileOutputPath],
     ref: slugifyString(fileOutputPath),
     detail: {
-      href: fileOutputPath.replace(meta.opts.outDir, ""),
+      href: fileOutputPath.replace(meta.opts.outDir, "")
     },
     deps: {
-      paths: [...result.loadedUrls.map(item => item.pathname)],
-    },
-  }
+      paths: [...result.loadedUrls.map(item => item.pathname)]
+    }
+  };
 }
 
 export async function optimiseSvg({ config }) {
-  const filePath = config.filePath
-  const fileOutputPath = config.fileOutputPath
-  const svgString = await fs.readFile(filePath, "utf8")
+  const filePath = config.filePath;
+  const fileOutputPath = config.fileOutputPath;
+  const svgString = await fs.readFile(filePath, "utf8");
   const result = optimize(svgString, {
-    plugins: ["preset-default"],
-  })
-  await writeFile(fileOutputPath, result.data)
+    plugins: ["preset-default"]
+  });
+  await writeFile(fileOutputPath, result.data);
   return {
     paths: [fileOutputPath],
-    ref: slugifyString(fileOutputPath),
-  }
+    ref: slugifyString(fileOutputPath)
+  };
 }
 
 export async function copy({ config }) {
-  const filePath = config.filePath
-  const fileOutputPath = config.fileOutputPath
-  await fs.mkdir(config.fileOutputDir, { recursive: true })
-  await fs.copyFile(filePath, fileOutputPath)
+  const filePath = config.filePath;
+  const fileOutputPath = config.fileOutputPath;
+  await fs.mkdir(config.fileOutputDir, { recursive: true });
+  await fs.copyFile(filePath, fileOutputPath);
   return {
     paths: [fileOutputPath],
-    ref: slugifyString(fileOutputPath),
-  }
+    ref: slugifyString(fileOutputPath)
+  };
 }
 
 export async function imageToWebP({ meta, config }) {
-  const filePath = config.filePath
-  const fileOutputDir = config.fileOutputDir
-  const sourceExtension = path.extname(filePath)
-  const outputExtension = config.outputFileExtension
-  const base = path.basename(filePath, sourceExtension)
-  await fs.mkdir(fileOutputDir, { recursive: true })
+  const filePath = config.filePath;
+  const fileOutputDir = config.fileOutputDir;
+  const sourceExtension = path.extname(filePath);
+  const outputExtension = config.outputFileExtension;
+  const base = path.basename(filePath, sourceExtension);
+  await fs.mkdir(fileOutputDir, { recursive: true });
 
-  const original = sharp(filePath)
-  const metadata = await original.metadata()
-  const { width, height } = metadata
+  const original = sharp(filePath);
+  const metadata = await original.metadata();
+  const { width, height } = metadata;
 
   if (!width || !height) {
-    throw new Error("Could not determine image dimensions")
+    throw new Error("Could not determine image dimensions");
   }
 
-  const aspectRatio = width / height
-  const name = config.uniqueFilenames ? base : `${base}-${generateRandomId()}`
+  const aspectRatio = width / height;
+  const name = config.uniqueFilenames ? base : `${base}-${generateRandomId()}`;
   const srcSet = await Promise.all(
     config.imageSizes.map(async size => {
-      const sizeNum = parseInt(size.replace("w", ""), 10)
+      const sizeNum = parseInt(size.replace("w", ""), 10);
       const outputFile = path.join(
         fileOutputDir,
-        `${name}-${sizeNum}${outputExtension}`,
-      )
+        `${name}-${sizeNum}${outputExtension}`
+      );
 
       await original
         .clone()
         .resize(sizeNum)
         .webp({ quality: config.quality })
-        .toFile(outputFile)
+        .toFile(outputFile);
 
-      return [getCleanPath(outputFile, meta), size]
-    }),
-  )
+      return [getCleanPath(outputFile, meta), size];
+    })
+  );
 
-  const imageRef = slugifyString(getCleanPath(path.join(filePath), meta))
+  const imageRef = slugifyString(getCleanPath(path.join(filePath), meta));
 
   return {
     paths: srcSet.map(src => src[0]),
     detail: { srcSet, aspectRatio },
-    ref: imageRef,
-  }
+    ref: imageRef
+  };
 }
 
 export async function generateFavicons({ meta, config }) {
-  const filePath = config.filePath
-  const fileOutputDir = config.fileOutputDir
+  const filePath = config.filePath;
+  const fileOutputDir = config.fileOutputDir;
   // Configuration for favicons package
   const configuration = {
     path: getCleanPath(fileOutputDir, meta), // Path for overriding default icons path
@@ -309,45 +296,45 @@ export async function generateFavicons({ meta, config }) {
       appleStartup: true,
       favicons: true,
       windows: true,
-      yandex: true,
-    },
-  }
+      yandex: true
+    }
+  };
   try {
-    const response = await favicons(filePath, configuration)
+    const response = await favicons(filePath, configuration);
 
     // Write all generated images to disk
     await Promise.all(
       response.images.map(async image => {
-        const outputPath = path.join(fileOutputDir, image.name)
-        await writeFile(outputPath, image.contents)
-      }),
-    )
+        const outputPath = path.join(fileOutputDir, image.name);
+        await writeFile(outputPath, image.contents);
+      })
+    );
 
     // Write all generated files (manifests, etc.) to disk
     await Promise.all(
       response.files.map(async file => {
-        const outputPath = path.join(fileOutputDir, file.name)
-        await writeFile(outputPath, file.contents)
-      }),
-    )
+        const outputPath = path.join(fileOutputDir, file.name);
+        await writeFile(outputPath, file.contents);
+      })
+    );
 
     // Combine HTML meta tags
-    const htmlMeta = response.html.join("\n    ")
+    const htmlMeta = response.html.join("\n    ");
     return {
       detail: {
-        htmlMeta,
+        htmlMeta
       },
       paths: [
         ...response.images.map(img =>
-          getCleanPath(path.join(fileOutputDir, img.name), meta),
+          getCleanPath(path.join(fileOutputDir, img.name), meta)
         ),
         ...response.files.map(file =>
-          getCleanPath(path.join(fileOutputDir, file.name), meta),
-        ),
+          getCleanPath(path.join(fileOutputDir, file.name), meta)
+        )
       ],
-      ref: "metatags",
-    }
+      ref: "metatags"
+    };
   } catch (error) {
-    throw new Error(`Failed to generate favicons: ${error.message}`)
+    throw new Error(`Failed to generate favicons: ${error.message}`);
   }
 }

+ 15 - 4
src/util/file-system.js

@@ -1,5 +1,6 @@
 import fs from "node:fs/promises"
 import path from "path"
+import os from "os"
 import { glob } from "glob"
 
 export async function fileExists(filePath) {
@@ -44,7 +45,7 @@ export async function readFilesByGlob(globConfigs) {
         ignore: [],
         ...globConfig,
       }
-      const matches = await glob(pattern, {
+      const matches = await glob(expandTilde(pattern), {
         ignore,
         dot,
       })
@@ -56,6 +57,11 @@ export async function readFilesByGlob(globConfigs) {
   return [...new Set(files)]
 }
 
+export function expandTilde(path) {
+  if (!path.startsWith("~")) return path
+  return path.replace(/^~(?=$|\/|\\)/, os.homedir())
+}
+
 export async function checkFilesExist(files, baseDir) {
   const filesToCheck = Array.isArray(files) ? files : [files]
   const fileCheckResults = await Promise.all(
@@ -65,9 +71,14 @@ export async function checkFilesExist(files, baseDir) {
       return { filePath, exists }
     }),
   )
-  return fileCheckResults.reduce((sorted, { filePath, exists }) => {
-    return exists ? { ...sorted, present: [...sorted.present, filePath] } : { ...sorted, absent: [...sorted.absent, filePath] }
-  }, { present: [], absent: [] })
+  return fileCheckResults.reduce(
+    (sorted, { filePath, exists }) => {
+      return exists
+        ? { ...sorted, present: [...sorted.present, filePath] }
+        : { ...sorted, absent: [...sorted.absent, filePath] }
+    },
+    { present: [], absent: [] },
+  )
 }
 
 export async function writeFile(filePath, content) {