2 Angajamente 8988170eaf ... 1a4361527a

Autor SHA1 Permisiunea de a trimite mesaje. Dacă este dezactivată, utilizatorul nu va putea trimite nici un fel de mesaj Data
  Craig Fletcher 1a4361527a Add cache support 5 luni în urmă
  Craig Fletcher 8988170eaf Add cache support 5 luni în urmă
6 a modificat fișierele cu 349 adăugiri și 357 ștergeri
  1. 0 5
      eslint.config.js
  2. 22 20
      src/defaults.js
  3. 17 18
      src/index.js
  4. 60 62
      src/lib.js
  5. 101 103
      src/processors.js
  6. 149 149
      src/util.js

+ 0 - 5
eslint.config.js

@@ -37,11 +37,6 @@ export default [
       "prefer-const": "error",
       quotes: ["error", "double"],
       semi: ["error", "never"],
-      "sort-imports": ["error", { ignoreCase: true }],
-      "sort-keys": [
-        "error", "asc", { caseSensitive: true, natural: true },
-      ],
-      "sort-vars": ["error", { ignoreCase: true }],
     },
   },
 ]

+ 22 - 20
src/defaults.js

@@ -1,11 +1,11 @@
 import {
   compileSass,
-  optimiseSvg,
+  copy,
+  generateFavicons,
   imageToWebP,
+  optimiseSvg,
   renderMarkdownWithTemplate,
-  copy,
-  generateFavicons
-} from "./processors.js";
+} from "./processors.js"
 
 export const tasks = [
   {
@@ -14,7 +14,7 @@ export const tasks = [
     stripPaths: ["styles/"],
     outputDir: "static/styles/",
     outputFileExtension: ".css",
-    processor: compileSass
+    processor: compileSass,
   },
   {
     name: "icons",
@@ -22,7 +22,7 @@ export const tasks = [
     stripPaths: ["images/"],
     outputDir: "static/",
     outputFileExtension: ".svg",
-    processor: optimiseSvg
+    processor: optimiseSvg,
   },
   {
     name: "images",
@@ -30,31 +30,33 @@ export const tasks = [
     stripPaths: ["images/content/"],
     outputDir: "images/",
     outputFileExtension: ".webp",
-    imageSizes: ["640w", "768w", "1024w", "1366w", "1600w", "1920w", "2560w"],
+    imageSizes: [
+      "640w", "768w", "1024w", "1366w", "1600w", "1920w", "2560w",
+    ],
     quality: 80,
-    processor: imageToWebP
+    processor: imageToWebP,
   },
   {
     name: "favicons",
     inputFiles: [{ pattern: "images/favicon/*" }],
     stripPaths: ["images/favicon/"],
     outputDir: "static/meta/",
-    processor: generateFavicons
+    processor: generateFavicons,
   },
   {
     name: "pages",
     inputFiles: [{ pattern: "markdown/*.md" }],
     stripPaths: ["markdown/"],
     outputFileExtension: ".html",
-    processor: renderMarkdownWithTemplate
+    processor: renderMarkdownWithTemplate,
   },
   {
     name: "static files",
     inputFiles: [{ pattern: "static/*" }],
     stripPaths: ["static/"],
-    processor: copy
-  }
-];
+    processor: copy,
+  },
+]
 
 export const opts = {
   outDir: "dist/",
@@ -62,7 +64,7 @@ export const opts = {
   cacheDir: ".cache",
   defaultTemplate: "default",
   include: {
-    styles: [{ pattern: "~/.rhedyn/styles/*.scss" }]
+    styles: [{ pattern: "~/.rhedyn/styles/*.scss" }],
   },
   templateDirs: ["templates/", "~/.rhedyn/templates/"],
   clean: true,
@@ -74,13 +76,13 @@ export const opts = {
     url: "https://www.leakypixel.net",
     language: "en-GB",
     backgroundColor: "#22242c",
-    themeColor: "#f00"
-  }
-};
+    themeColor: "#f00",
+  },
+}
 
 const defaults = {
   opts,
-  tasks
-};
+  tasks,
+}
 
-export default defaults;
+export default defaults

+ 17 - 18
src/index.js

@@ -2,9 +2,9 @@
 
 import * as defaultConfig from "./defaults.js"
 import { getConfig, processFiles } from "./lib.js"
-import { performance } from 'node:perf_hooks';
+import { performance } from "node:perf_hooks"
 
-const startTime = performance.now();
+const startTime = performance.now()
 const { opts, tasks } = await getConfig() || { ...defaultConfig }
 console.info(`[Start] Processing ${tasks.length} tasks`)
 console.info(`[Info] Running directory: ${opts.runDir}`)
@@ -12,7 +12,7 @@ console.info(`[Info] Output directory: ${opts.outDir}`)
 if (opts.cacheDir) {
   console.info(`[Info] Cache directory: ${opts.cacheDir}`)
 } else {
-  console.warn(`[Warn] Cache disabled`)
+  console.warn("[Warn] Cache disabled")
 }
 async function runTask(meta, task) {
   const allResults = await processFiles(task, meta)
@@ -22,34 +22,33 @@ async function runTask(meta, task) {
     ...meta,
     resources: {
       ...meta.resources,
-      [task.name]: allResults.reduce((obj, path) => ({...obj, [path.ref]: path}), {}),
+      [task.name]: allResults.reduce((obj, path) => ({ ...obj, [path.ref]: path }), {}),
     },
   }
-  return {taskResult, cachedResults, processedResults};
+  return { taskResult, cachedResults, processedResults }
 }
 
 const taskRunner = tasks.reduce(
   async (metaPromise, task) => {
-    const initTime = performance.now()
-    const meta = await metaPromise;
-    const startTime = performance.now();
+    const meta = await metaPromise
+    const startTime = performance.now()
     console.group(`[Task] ${task.name}`)
     console.log(`[Start] patterns: ${JSON.stringify(task.inputFiles)}`)
     if (meta.opts.debug) {
-      console.log(task.name, "in meta", JSON.stringify(meta, null, 2));
+      console.log(task.name, "in meta", JSON.stringify(meta, null, 2))
     }
-    const {taskResult, cachedResults, processedResults} = await runTask(meta, task)
-    const endTime = performance.now();
-    const timeTaken = (endTime - startTime);
-    const hrTime = timeTaken > 1000 ? `${Number.parseFloat(timeTaken / 1000).toFixed(2)}s` : `${Number.parseFloat(timeTaken).toFixed(2)}ms`;
+    const { taskResult, cachedResults, processedResults } = await runTask(meta, task)
+    const endTime = performance.now()
+    const timeTaken = (endTime - startTime)
+    const hrTime = timeTaken > 1000 ? `${Number.parseFloat(timeTaken / 1000).toFixed(2)}s` : `${Number.parseFloat(timeTaken).toFixed(2)}ms`
     console.log(`[Done] processed: ${processedResults.length} | from cache: ${cachedResults.length} | ${hrTime}`)
     console.groupEnd()
-    return taskResult;
+    return taskResult
   },
   Promise.resolve({ opts }),
 )
-await taskRunner;
-const endTime = performance.now();
-const timeTaken = (endTime - startTime);
-const hrTime = timeTaken > 1000 ? `${Number.parseFloat(timeTaken / 1000).toFixed(2)}s` : `${Number.parseFloat(timeTaken).toFixed(2)}ms`;
+await taskRunner
+const endTime = performance.now()
+const timeTaken = (endTime - startTime)
+const hrTime = timeTaken > 1000 ? `${Number.parseFloat(timeTaken / 1000).toFixed(2)}s` : `${Number.parseFloat(timeTaken).toFixed(2)}ms`
 console.log(`[Done] ${tasks.length} tasks in ${hrTime}`)

+ 60 - 62
src/lib.js

@@ -1,48 +1,42 @@
 import {
+  checkCache,
+  createTrackedObject,
+  fileExists,
   readFilesByGlob,
   removeBasePaths,
-  resolvePath,
   removeCwd,
   replaceFileExtension,
-  createTrackedObject,
-  getValueAtPath,
   slugifyString,
-  hashObject,
-  getFileHash,
-  checkPathExists,
-  checkCache,
   updateCache,
-  fileExists
-} from "./util.js";
-import fs from "node:fs/promises";
-import { writeFile } from "node:fs/promises";
-import path from "path";
-import process from "node:process";
+} from "./util.js"
+import fs from "node:fs/promises"
+import path from "path"
+import process from "node:process"
 
 export async function getConfig() {
-  const configPath = path.join(process.cwd(), "rhedyn.config.js");
-  const configFileExists = await fileExists(configPath);
+  const configPath = path.join(process.cwd(), "rhedyn.config.js")
+  const configFileExists = await fileExists(configPath)
   if (configFileExists) {
     try {
-      const config = await import(configPath);
-      return config.default || config;
+      const config = await import(configPath)
+      return config.default || config
     } catch (err) {
-      console.error("Error reading rhedyn.config.js:", err);
-      throw new Error("Failed reading config file");
+      console.error("Error reading rhedyn.config.js:", err)
+      throw new Error("Failed reading config file")
     }
   } else {
-    return;
+    return
   }
 }
 
 export async function processFiles(config, meta) {
-  const includes = meta.opts?.include?.[config.name] || [];
-  const patternsToInclude = [...config.inputFiles, ...includes];
-  const filesToProcess = await readFilesByGlob(patternsToInclude);
-  const pathsToStrip = config.stripPaths || [];
-  const outputDir = config.outputDir || "";
-  const configPathDeps = config.deps?.paths || [];
-  const configStateDeps = config.deps?.state || [];
+  const includes = meta.opts?.include?.[config.name] || []
+  const patternsToInclude = [...config.inputFiles, ...includes]
+  const filesToProcess = await readFilesByGlob(patternsToInclude)
+  const pathsToStrip = config.stripPaths || []
+  const outputDir = config.outputDir || ""
+  const configPathDeps = config.deps?.paths || []
+  const configStateDeps = config.deps?.state || []
 
   return await Promise.all(
     filesToProcess.map(async filePath => {
@@ -51,19 +45,19 @@ export async function processFiles(config, meta) {
         outputDir,
         replaceFileExtension(
           removeBasePaths(pathsToStrip, filePath),
-          config.outputFileExtension
-        )
-      );
+          config.outputFileExtension,
+        ),
+      )
 
       if (meta.opts.debug) {
-        console.log("in filePath", filePath);
-        console.log("out fileOutputPath", fileOutputPath);
+        console.log("in filePath", filePath)
+        console.log("out fileOutputPath", fileOutputPath)
       }
 
-      const fileOutputDir = path.dirname(fileOutputPath);
-      const exists = await fileExists(fileOutputDir);
+      const fileOutputDir = path.dirname(fileOutputPath)
+      const exists = await fileExists(fileOutputDir)
       if (!exists) {
-        await fs.mkdir(fileOutputDir, { recursive: true });
+        await fs.mkdir(fileOutputDir, { recursive: true })
       }
 
       const stateObject = {
@@ -71,66 +65,70 @@ export async function processFiles(config, meta) {
         meta,
         fileOutputDir,
         fileOutputPath,
-        config
-      };
+        config,
+      }
 
-      let cache = {};
+      let cache = {}
       if (meta.opts.cacheDir) {
         cache = await checkCache(
           slugifyString(filePath),
           stateObject,
-          meta.opts
-        );
+          meta.opts,
+        )
 
         if (cache && cache.hit) {
-          console.log(`[Info] Loaded cache for ${filePath}`);
-          return { ...cache.taskResult, fromCache: true };
+          console.log(`[Info] Loaded cache for ${filePath}`)
+          return { ...cache.taskResult, fromCache: true }
         }
-        console.log(`[Info] Cache miss for ${filePath} (${cache.reason})`);
+        console.log(`[Info] Cache miss for ${filePath} (${cache.reason})`)
       }
 
       const state = meta.opts.cacheDir
         ? createTrackedObject(stateObject)
-        : { proxy: stateObject };
+        : { proxy: stateObject }
 
       const {
         result,
         detail,
         written,
         deps: processorDeps,
-        ref
-      } = await config.processor(state.proxy);
+        ref,
+      } = await config.processor(state.proxy)
 
       if (!written) {
         await fs.writeFile(fileOutputPath, result, {
-          encoding: "utf8"
-        });
+          encoding: "utf8",
+        })
       }
 
       if (meta.opts.debug) {
-        console.log(filePath, "out detail", detail);
-        console.log(filePath, "out result", result);
+        console.log(filePath, "out detail", detail)
+        console.log(filePath, "out result", result)
       }
-      const taskRef = ref ? slugifyString(ref) : slugifyString(fileOutputPath);
+      const taskRef = ref ? slugifyString(ref) : slugifyString(fileOutputPath)
       const taskResult = {
         detail,
         path: written ? result : [fileOutputPath.replace(meta.opts.outDir, "")],
-        ref: taskRef
-      };
+        ref: taskRef,
+      }
       if (meta.opts.cacheDir) {
-        const processorPathDeps = processorDeps?.paths || [];
-        const processorStateDeps = processorDeps?.state || [];
+        const processorPathDeps = processorDeps?.paths || []
+        const processorStateDeps = processorDeps?.state || []
         await updateCache(
           meta.opts.cacheDir,
           slugifyString(filePath),
-          removeCwd([...configPathDeps, ...processorPathDeps, filePath]),
-          [...configStateDeps, ...processorStateDeps, ...state.accessed],
+          removeCwd([
+            ...configPathDeps, ...processorPathDeps, filePath,
+          ]),
+          [
+            ...configStateDeps, ...processorStateDeps, ...state.accessed,
+          ],
           taskResult,
-          cache.updates
-        );
+          cache.updates,
+        )
       }
 
-      return taskResult;
-    })
-  );
+      return taskResult
+    }),
+  )
 }

+ 101 - 103
src/processors.js

@@ -1,24 +1,24 @@
-import * as sass from "sass";
+import * as sass from "sass"
 import {
   firstFound,
-  stripFileExtension,
+  generateRandomId,
   getCleanPath,
   getHref,
   slugifyString,
-  generateRandomId
-} from "./util.js";
-import fs from "fs/promises";
-import handlebars from "handlebars";
-import { marked } from "marked";
-import markedCodePreview from "marked-code-preview";
-import matter from "gray-matter";
-import { optimize } from "svgo";
-import sharp from "sharp";
-import path from "path";
-import { minify } from "html-minifier-terser";
-import favicons from "favicons";
-
-const templateCache = new Map();
+  stripFileExtension,
+} from "./util.js"
+import fs from "fs/promises"
+import handlebars from "handlebars"
+import { marked } from "marked"
+import markedCodePreview from "marked-code-preview"
+import matter from "gray-matter"
+import { optimize } from "svgo"
+import sharp from "sharp"
+import path from "path"
+import { minify } from "html-minifier-terser"
+import favicons from "favicons"
+
+const templateCache = new Map()
 
 function createMarkdownRenderer(meta) {
   return marked
@@ -27,151 +27,149 @@ function createMarkdownRenderer(meta) {
     .use({
       renderer: {
         image({ href, title, text }) {
-          const hrefWithoutExt = stripFileExtension(href);
-          const attrs = [`alt="${text}"`];
+          const attrs = [`alt="${text}"`]
 
-          const foundSrcSet = meta.resources.images[slugifyString(href)];
+          const foundSrcSet = meta.resources.images[slugifyString(href)]
 
           if (foundSrcSet) {
             const srcSetString = foundSrcSet.detail.srcSet
               .map(src => src.join(" "))
-              .join(", ");
-            const defaultSrc = foundSrcSet.detail.srcSet[0][0];
-            attrs.push(`src="${defaultSrc}"`);
-            attrs.push(`srcset="${srcSetString}"`);
-            attrs.push(`sizes="(min-width: 800px) 40vw, 100vw"`);
+              .join(", ")
+            const defaultSrc = foundSrcSet.detail.srcSet[0][0]
+            attrs.push(`src="${defaultSrc}"`)
+            attrs.push(`srcset="${srcSetString}"`)
+            attrs.push("sizes=\"(min-width: 800px) 40vw, 100vw\"")
             attrs.push(
-              `style="aspect-ratio: ${foundSrcSet.detail.aspectRatio}"`
-            );
+              `style="aspect-ratio: ${foundSrcSet.detail.aspectRatio}"`,
+            )
           } else {
-            attrs.push(`src="${href}"`);
+            attrs.push(`src="${href}"`)
           }
 
           if (title) {
-            attrs.push(`title="${title}"`);
+            attrs.push(`title="${title}"`)
           }
 
-          return `<img ${attrs.join(" ")} >`;
-        }
-      }
-    });
+          return `<img ${attrs.join(" ")} >`
+        },
+      },
+    })
 }
 
 export async function renderMarkdownWithTemplate({
   filePath,
   meta,
-  fileOutputDir,
-  fileOutputPath
+  fileOutputPath,
 }) {
-  const content = await fs.readFile(filePath, "utf8");
-  const { data, content: markdown } = matter(content);
-  const templateName = data.template || meta.opts.defaultTemplate;
-  const href = getHref(fileOutputPath, meta);
+  const content = await fs.readFile(filePath, "utf8")
+  const { data, content: markdown } = matter(content)
+  const templateName = data.template || meta.opts.defaultTemplate
+  const href = getHref(fileOutputPath, meta)
 
   if (!templateCache.has(templateName)) {
     const templatePath = await firstFound(
       meta.opts.templateDirs,
-      `${templateName}.hbs`
-    );
-    if (!templatePath) throw new Error(`Template not found: ${templateName}`);
-    const templateContent = await fs.readFile(templatePath, "utf8");
+      `${templateName}.hbs`,
+    )
+    if (!templatePath) throw new Error(`Template not found: ${templateName}`)
+    const templateContent = await fs.readFile(templatePath, "utf8")
     templateCache.set(templateName, {
       path: templatePath,
-      renderer: handlebars.compile(templateContent)
-    });
+      renderer: handlebars.compile(templateContent),
+    })
   }
-  const template = templateCache.get(templateName);
-  const renderer = createMarkdownRenderer(meta);
+  const template = templateCache.get(templateName)
+  const renderer = createMarkdownRenderer(meta)
   const html = template.renderer({
     ...data,
     ...meta,
     href,
-    content: renderer(markdown)
-  });
+    content: renderer(markdown),
+  })
   const minifiedHtml = await minify(html, {
     collapseWhitespace: true,
     removeComments: true,
     removeRedundantAttributes: true,
     removeEmptyAttributes: true,
     minifyCSS: true,
-    minifyJS: true
-  });
+    minifyJS: true,
+  })
 
   return {
     detail: { ...data, href },
     result: minifiedHtml,
     deps: {
-      paths: [template.path]
-    }
-  };
+      paths: [template.path],
+    },
+  }
 }
 
 export async function compileSass({ filePath }) {
-  const result = await sass.compileAsync(filePath, { style: "compressed" });
+  const result = await sass.compileAsync(filePath, { style: "compressed" })
   return {
     result: result.css,
     deps: {
-      paths: [...result.loadedUrls.map(item => item.pathname)]
-    }
-  };
+      paths: [...result.loadedUrls.map(item => item.pathname)],
+    },
+  }
 }
 
 export async function optimiseSvg({ filePath }) {
-  const svgString = await fs.readFile(filePath, "utf8");
+  const svgString = await fs.readFile(filePath, "utf8")
   const result = optimize(svgString, {
-    plugins: ["preset-default"]
-  });
+    plugins: ["preset-default"],
+  })
   return {
-    result: result.data
-  };
+    result: result.data,
+  }
 }
 
 export async function copy({ filePath }) {
-  const fileContent = await fs.readFile(filePath, "utf8");
-  return { result: fileContent };
+  const fileContent = await fs.readFile(filePath, "utf8")
+  return { result: fileContent }
 }
 
 export async function imageToWebP({ filePath, meta, fileOutputDir, config }) {
-  const sourceExtension = path.extname(filePath);
-  const outputExtension = config.outputFileExtension;
-  const base = path.basename(filePath, sourceExtension);
+  const sourceExtension = path.extname(filePath)
+  const outputExtension = config.outputFileExtension
+  const base = path.basename(filePath, sourceExtension)
 
-  const original = sharp(filePath);
-  const metadata = await original.metadata();
-  const { width, height } = metadata;
+  const original = sharp(filePath)
+  const metadata = await original.metadata()
+  const { width, height } = metadata
 
   if (!width || !height) {
-    throw new Error("Could not determine image dimensions");
+    throw new Error("Could not determine image dimensions")
   }
 
-  const aspectRatio = width / height;
-  const name = config.uniqueFilenames ? base : `${base}-${generateRandomId()}`;
+  const aspectRatio = width / height
+  const name = config.uniqueFilenames ? base : `${base}-${generateRandomId()}`
   const srcSet = await Promise.all(
     config.imageSizes.map(async size => {
-      const sizeNum = parseInt(size.replace("w", ""), 10);
+      const sizeNum = parseInt(size.replace("w", ""), 10)
       const outputFile = path.join(
         fileOutputDir,
-        `${name}-${sizeNum}${outputExtension}`
-      );
+        `${name}-${sizeNum}${outputExtension}`,
+      )
 
       await original
         .clone()
         .resize(sizeNum)
         .webp({ quality: config.quality })
-        .toFile(outputFile);
+        .toFile(outputFile)
 
-      return [getCleanPath(outputFile, meta), size];
-    })
-  );
+      return [getCleanPath(outputFile, meta), size]
+    }),
+  )
 
-  const imageRef = getCleanPath(path.join(filePath), meta);
+  const imageRef = getCleanPath(path.join(filePath), meta)
 
   return {
     result: srcSet.map(src => src[0]),
     detail: { imageRef, srcSet, aspectRatio },
     written: true,
-    ref: imageRef
-  };
+    ref: imageRef,
+  }
 }
 
 export async function generateFavicons({ filePath, meta, fileOutputDir }) {
@@ -203,46 +201,46 @@ export async function generateFavicons({ filePath, meta, fileOutputDir }) {
       appleStartup: true,
       favicons: true,
       windows: true,
-      yandex: true
-    }
-  };
+      yandex: true,
+    },
+  }
   try {
-    const response = await favicons(filePath, configuration);
+    const response = await favicons(filePath, configuration)
 
     // Write all generated images to disk
     await Promise.all(
       response.images.map(async image => {
-        const outputPath = path.join(fileOutputDir, image.name);
-        await fs.writeFile(outputPath, image.contents);
-      })
-    );
+        const outputPath = path.join(fileOutputDir, image.name)
+        await fs.writeFile(outputPath, image.contents)
+      }),
+    )
 
     // Write all generated files (manifests, etc.) to disk
     await Promise.all(
       response.files.map(async file => {
-        const outputPath = path.join(fileOutputDir, file.name);
-        await fs.writeFile(outputPath, file.contents);
-      })
-    );
+        const outputPath = path.join(fileOutputDir, file.name)
+        await fs.writeFile(outputPath, file.contents)
+      }),
+    )
 
     // Combine HTML meta tags
-    const htmlMeta = response.html.join("\n    ");
+    const htmlMeta = response.html.join("\n    ")
     return {
       detail: {
-        htmlMeta
+        htmlMeta,
       },
       result: [
         ...response.images.map(img =>
-          getCleanPath(path.join(fileOutputDir, img.name), meta)
+          getCleanPath(path.join(fileOutputDir, img.name), meta),
         ),
         ...response.files.map(file =>
-          getCleanPath(path.join(fileOutputDir, file.name), meta)
-        )
+          getCleanPath(path.join(fileOutputDir, file.name), meta),
+        ),
       ],
       written: true,
-      ref: "metatags"
-    };
+      ref: "metatags",
+    }
   } catch (error) {
-    throw new Error(`Failed to generate favicons: ${error.message}`);
+    throw new Error(`Failed to generate favicons: ${error.message}`)
   }
 }

+ 149 - 149
src/util.js

@@ -1,37 +1,37 @@
-import fs from "node:fs/promises";
-import os from "node:os";
-import path from "path";
-import { glob } from "glob";
-import { createReadStream } from "fs";
-import { createHash } from "crypto";
-import stableStringify from "json-stable-stringify";
+import fs from "node:fs/promises"
+import os from "node:os"
+import path from "path"
+import { glob } from "glob"
+import { createHash } from "crypto"
+import { createReadStream } from "fs"
+import stableStringify from "json-stable-stringify"
 
 export async function fileExists(filePath) {
   try {
-    const stats = await fs.stat(filePath);
-    return true;
+    await fs.stat(filePath)
+    return true
   } catch (err) {
     if (err.code === "ENOENT") {
-      return false;
+      return false
     }
-    throw err; // re-throw other errors
+    throw err // re-throw other errors
   }
 }
 export async function readDirectoryRecursively(dir, files = []) {
-  const exists = await fileExists(dir);
+  const exists = await fileExists(dir)
   if (!exists) {
-    return files;
+    return files
   }
-  const contents = await fs.readdir(dir, { withFileTypes: true });
+  const contents = await fs.readdir(dir, { withFileTypes: true })
   for (const item of contents) {
-    const itemPath = path.join(dir, item.name);
+    const itemPath = path.join(dir, item.name)
     if (item.isDirectory()) {
-      readDirectoryRecursively(itemPath, files);
+      readDirectoryRecursively(itemPath, files)
     } else {
-      files.push(itemPath);
+      files.push(itemPath)
     }
   }
-  return files;
+  return files
 }
 // type InputConfig
 // {
@@ -44,129 +44,129 @@ export async function readFilesByGlob(globConfigs) {
       const { pattern, ignore, dot } = {
         dot: false,
         ignore: [],
-        ...globConfig
-      };
+        ...globConfig,
+      }
       const matches = await glob(pattern, {
         ignore,
-        dot
-      });
-      return [...(await existingMatches), ...matches];
+        dot,
+      })
+      return [...(await existingMatches), ...matches]
     },
-    []
-  );
-  const files = await matchPromises;
-  return [...new Set(files)];
+    [],
+  )
+  const files = await matchPromises
+  return [...new Set(files)]
 }
 
 export function resolvePath(unresolvedPath) {
-  return path.resolve(unresolvedPath.replace(/^~/, os.homedir()));
+  return path.resolve(unresolvedPath.replace(/^~/, os.homedir()))
 }
 
 export async function firstFound(dirs, fileName) {
   for (const dir of dirs) {
-    const filePath = resolvePath(path.join(dir, fileName));
-    const exists = await fileExists(filePath);
+    const filePath = resolvePath(path.join(dir, fileName))
+    const exists = await fileExists(filePath)
     if (exists) {
-      return filePath;
+      return filePath
     }
   }
-  return null;
+  return null
 }
 
 export function removeCwd(paths) {
-  const cwd = `${process.cwd()}/`;
-  return paths.map(path => path.replace(cwd, ""));
+  const cwd = `${process.cwd()}/`
+  return paths.map(path => path.replace(cwd, ""))
 }
 
 export function removeBasePaths(baseDirs, fullPath) {
   return baseDirs.reduce((cleanedPath, dir) => {
-    return cleanedPath.replace(dir, "");
-  }, fullPath);
+    return cleanedPath.replace(dir, "")
+  }, fullPath)
 }
 
 export function replaceFileExtension(filePath, newExtension) {
   if (!newExtension) {
-    return filePath;
+    return filePath
   }
-  return `${stripFileExtension(filePath)}${newExtension}`;
+  return `${stripFileExtension(filePath)}${newExtension}`
 }
 
 export function stripFileExtension(filePath) {
   return path.join(
     path.dirname(filePath),
-    path.basename(filePath, path.extname(filePath))
-  );
+    path.basename(filePath, path.extname(filePath)),
+  )
 }
 
 export function getCleanPath(filePath, meta) {
-  return filePath.replace(meta.opts.runDir, "").replace(meta.opts.outDir, "/");
+  return filePath.replace(meta.opts.runDir, "").replace(meta.opts.outDir, "/")
 }
 
 export function getHref(filePath, meta) {
-  const route = getCleanPath(filePath, meta);
+  const route = getCleanPath(filePath, meta)
   if (route.includes("index.html")) {
-    return route.replace("index.html", "");
+    return route.replace("index.html", "")
   }
-  return route.replace(".html", "");
+  return route.replace(".html", "")
 }
 
 function stringifyPathPart(part) {
-  return typeof part === "symbol" ? part.toString() : String(part);
+  return typeof part === "symbol" ? part.toString() : String(part)
 }
 
 function trackPropertyAccessDeep(obj, path = [], accessed = new Set()) {
   return new Proxy(obj, {
     get(target, prop, receiver) {
-      const fullPath = [...path, prop].map(stringifyPathPart).join(".");
-      const value = Reflect.get(target, prop, receiver);
+      const fullPath = [...path, prop].map(stringifyPathPart).join(".")
+      const value = Reflect.get(target, prop, receiver)
 
       if (typeof target === "object" && target.hasOwnProperty(prop)) {
-        accessed.add({ path: fullPath, value });
+        accessed.add({ path: fullPath, value })
       }
 
       // Recursively wrap if value is an object and not null
       if (value && typeof value === "object") {
-        return trackPropertyAccessDeep(value, [...path, prop], accessed);
+        return trackPropertyAccessDeep(value, [...path, prop], accessed)
       }
 
-      return value;
-    }
-  });
+      return value
+    },
+  })
 }
 
 export function createTrackedObject(obj) {
-  const accessed = new Set();
-  const proxy = trackPropertyAccessDeep(obj, [], accessed);
-  return { proxy, accessed };
+  const accessed = new Set()
+  const proxy = trackPropertyAccessDeep(obj, [], accessed)
+  return { proxy, accessed }
 }
 
 export function getDeepestPropertiesForKey(paths, key) {
   // Sort paths to make prefix comparison easier
   const sorted = paths.slice().sort((a, b) => {
     if (a[key] < b[key]) {
-      return -1;
+      return -1
     }
     if (a[key] > b[key]) {
-      return 1;
+      return 1
     }
-    return 0;
-  });
-  const result = [];
+    return 0
+  })
+  const result = []
 
   for (let i = 0; i < sorted.length; i++) {
-    const current = sorted[i];
-    const next = sorted[i + 1];
+    const current = sorted[i]
+    const next = sorted[i + 1]
     // If the next path doesn't start with the current + a dot, it's a leaf node
-    const nextKey = next?.[key];
-    const currentKey = current[key];
+    const nextKey = next?.[key]
+    const currentKey = current[key]
     if (nextKey !== currentKey) {
       if (!next || !next[key].startsWith(current[key] + ".")) {
-        result.push(current);
+        result.push(current)
       }
     }
   }
 
-  return result;
+  return result
 }
 
 export function slugifyString(str) {
@@ -177,120 +177,120 @@ export function slugifyString(str) {
     .replace(/\s+/g, "-") // Replace whitespace with dashes
     .replace(/-+/g, "-") // Collapse multiple dashes
     .replace(/\./g, "-") // Replace dots with dashes
-    .replace(/^-+|-+$/g, ""); // Trim leading/trailing dashes
+    .replace(/^-+|-+$/g, "") // Trim leading/trailing dashes
 }
 
 export function getValueAtPath(obj, path) {
-  const parts = path.split(".");
-  let val = obj;
+  const parts = path.split(".")
+  let val = obj
   for (const part of parts) {
-    val = val?.[part];
-    if (val === undefined) break;
+    val = val?.[part]
+    if (val === undefined) break
   }
-  return val;
+  return val
 }
 
 export function hashObject(obj) {
-  const str = stableStringify(obj);
+  const str = stableStringify(obj)
   return createHash("md5")
     .update(str)
-    .digest("hex");
+    .digest("hex")
 }
 
 export async function getFileHash(filePath, algorithm = "md5") {
   return new Promise((resolve, reject) => {
-    const hash = createHash(algorithm);
-    const stream = createReadStream(filePath);
+    const hash = createHash(algorithm)
+    const stream = createReadStream(filePath)
 
-    stream.on("error", reject);
-    stream.on("data", chunk => hash.update(chunk));
-    stream.on("end", () => resolve(hash.digest("hex")));
-  });
+    stream.on("error", reject)
+    stream.on("data", chunk => hash.update(chunk))
+    stream.on("end", () => resolve(hash.digest("hex")))
+  })
 }
 
 export async function checkPathExists(files, baseDir) {
   if (Array.isArray(files)) {
     return (await Promise.all(
-      files.map(file => fileExists(path.join(baseDir, file)))
-    )).every(item => !!item);
+      files.map(file => fileExists(path.join(baseDir, file))),
+    )).every(item => !!item)
   }
-  return fileExists(path.join(baseDir, files));
+  return fileExists(path.join(baseDir, files))
 }
 
 export function generateRandomId(length = 8) {
-  const chars = "abcdefghijklmnopqrstuvwxyz0123456789";
-  let result = "";
+  const chars = "abcdefghijklmnopqrstuvwxyz0123456789"
+  let result = ""
   for (let i = 0; i < length; i++) {
-    result += chars.charAt(Math.floor(Math.random() * chars.length));
+    result += chars.charAt(Math.floor(Math.random() * chars.length))
   }
-  return result;
+  return result
 }
 
 async function getFileHashes(pathDeps) {
   return Promise.all(
     Object.keys(pathDeps).map(async filePath => {
-      const hash = await getFileHash(filePath);
+      const hash = await getFileHash(filePath)
       if (hash !== pathDeps[filePath]) {
-        return Promise.reject({ filePath, hash });
+        return Promise.reject({ filePath, hash })
       }
 
-      return Promise.resolve(pathDeps[filePath]);
-    })
-  );
+      return Promise.resolve(pathDeps[filePath])
+    }),
+  )
 }
 
 function getStatePropsHash(state, props) {
   const stateValues = props.reduce((depmap, dep) => {
-    const value = getValueAtPath(state, dep);
-    return { ...depmap, [dep]: value };
-  }, {});
-  return hashObject(stateValues);
+    const value = getValueAtPath(state, dep)
+    return { ...depmap, [dep]: value }
+  }, {})
+  return hashObject(stateValues)
 }
 
 export async function checkCache(name, currentState, opts) {
-  const existingCacheObject = await readCache(opts.cacheDir, name);
+  const existingCacheObject = await readCache(opts.cacheDir, name)
   if (existingCacheObject) {
-    const outFiles = existingCacheObject.taskResult.path;
-    const outFilesExist = await checkPathExists(outFiles, opts.outDir);
+    const outFiles = existingCacheObject.taskResult.path
+    const outFilesExist = await checkPathExists(outFiles, opts.outDir)
     if (outFilesExist) {
       const stateHash = getStatePropsHash(
         currentState,
-        existingCacheObject.deps.state.props
-      );
+        existingCacheObject.deps.state.props,
+      )
       if (stateHash === existingCacheObject.deps.state.hash) {
         try {
-          await getFileHashes(existingCacheObject.deps.paths);
-          return { hit: true, taskResult: existingCacheObject.taskResult };
+          await getFileHashes(existingCacheObject.deps.paths)
+          return { hit: true, taskResult: existingCacheObject.taskResult }
         } catch (e) {
           const updates = {
             deps: {
-              paths: [e]
-            }
-          };
-          return { hit: false, reason: "File hash mismatch", updates };
+              paths: [e],
+            },
+          }
+          return { hit: false, reason: "File hash mismatch", updates }
         }
       }
       const updates = {
         deps: {
           state: {
             ...existingCacheObject.deps.state,
-            hash: stateHash
-          }
-        }
-      };
-      return { hit: false, reason: "State hash mismatch", updates };
+            hash: stateHash,
+          },
+        },
+      }
+      return { hit: false, reason: "State hash mismatch", updates }
     }
     if (opts.clean) {
       await Promise.all(
         outFiles.map(
           async outFile =>
-            await fs.rm(path.join(opts.outDir, outFile), { force: true })
-        )
-      );
+            await fs.rm(path.join(opts.outDir, outFile), { force: true }),
+        ),
+      )
     }
-    return { hit: false, reason: "Missing output file(s)" };
+    return { hit: false, reason: "Missing output file(s)" }
   }
-  return { hit: false, reason: "Missing cache file" };
+  return { hit: false, reason: "Missing cache file" }
 }
 
 export async function updateCache(
@@ -299,81 +299,81 @@ export async function updateCache(
   pathDeps,
   stateDeps,
   taskResult,
-  updates
+  updates,
 ) {
-  const cacheDirExists = await fileExists(cacheDir);
+  const cacheDirExists = await fileExists(cacheDir)
   if (!cacheDirExists) {
-    await fs.mkdir(cacheDir, { recursive: true });
+    await fs.mkdir(cacheDir, { recursive: true })
   }
-  const accessedState = getDeepestPropertiesForKey([...stateDeps], "path");
+  const accessedState = getDeepestPropertiesForKey([...stateDeps], "path")
   const deps = {
     paths: [...new Set(removeCwd(pathDeps))],
     state: accessedState.reduce(
       (as, { path, value }) => ({ ...as, [path]: value }),
-      {}
-    )
-  };
-  const statePropsList = Object.keys(deps.state);
-  const updatesStateHash = updates?.deps?.state?.props || [];
+      {},
+    ),
+  }
+  const statePropsList = Object.keys(deps.state)
+  const updatesStateHash = updates?.deps?.state?.props || []
   const stateDepsHash =
     JSON.stringify(statePropsList) === JSON.stringify(updatesStateHash)
       ? updates?.deps?.state?.hash
-      : hashObject(deps.state);
+      : hashObject(deps.state)
 
   const updatesPathsCache =
     updates?.deps?.paths?.reduce(
       (pc, { filePath, hash }) => ({
         ...pc,
-        [filePath]: hash
+        [filePath]: hash,
       }),
-      {}
-    ) || {};
+      {},
+    ) || {}
   const pathsCache = (await Promise.all(
     deps.paths.map(async filePath => {
       const hash = updatesPathsCache[filePath]
         ? updatesPathsCache[filePath]
-        : await getFileHash(filePath);
+        : await getFileHash(filePath)
       return {
         hash,
-        filePath
-      };
-    })
-  )).reduce((pc, { filePath, hash }) => ({ ...pc, [filePath]: hash }), {});
+        filePath,
+      }
+    }),
+  )).reduce((pc, { filePath, hash }) => ({ ...pc, [filePath]: hash }), {})
   const cacheObject = {
     deps: {
       state: {
         hash: stateDepsHash,
-        props: Object.keys(deps.state)
+        props: Object.keys(deps.state),
       },
-      paths: pathsCache
+      paths: pathsCache,
     },
-    taskResult
-  };
-  return await writeCache(cacheDir, name, cacheObject);
+    taskResult,
+  }
+  return await writeCache(cacheDir, name, cacheObject)
 }
 
 async function writeCache(cacheDir, name, cache) {
   if (!cacheDir) {
-    return false;
+    return false
   }
   return fs.writeFile(
     path.join(cacheDir, `${name}.cache`),
     JSON.stringify(cache),
-    "utf8"
-  );
+    "utf8",
+  )
 }
 
 async function readCache(cacheDir, name) {
   if (!cacheDir) {
-    return false;
+    return false
   }
   try {
     const content = await fs.readFile(
       path.join(cacheDir, `${name}.cache`),
-      "utf8"
-    );
-    return JSON.parse(content);
-  } catch (e) {
-    return false;
+      "utf8",
+    )
+    return JSON.parse(content)
+  } catch {
+    return false
   }
 }