Преглед на файлове

Refactor caching, processor API and logging

* Colourise logging
* Add prefixes for clearer logging
* cacheKey now slugified internally
* processors have all config inside config object
* expand job config object for tasks with files
* jobs can now run in parallel
Craig Fletcher преди 5 месеца
родител
ревизия
f37688df55
променени са 7 файла, в които са добавени 268 реда и са изтрити 191 реда
  1. 13 17
      src/cache.js
  2. 48 44
      src/defaults.js
  3. 41 32
      src/index.js
  4. 79 65
      src/lib.js
  5. 60 17
      src/logging.js
  6. 25 14
      src/processors.js
  7. 2 2
      src/util.js

+ 13 - 17
src/cache.js

@@ -4,7 +4,7 @@ import { createHash } from "crypto"
 import { createReadStream } from "fs"
 import stableStringify from "safe-stable-stringify"
 import {
-  fileExists,
+  slugifyString,
   checkPathExists,
   getValueAtPath,
   removeCwd,
@@ -26,7 +26,6 @@ export function hashObject(obj) {
 }
 
 const hashCache = new Map()
-console.log("Created new hashCache")
 export async function getFileHash(filePath, algorithm = "md5") {
   return new Promise((resolve, reject) => {
     if (!hashCache.has(filePath)) {
@@ -67,13 +66,11 @@ function getStatePropsHash(state, props) {
   }, {})
   return hashObject(stateValues)
 }
-
-export async function checkCache(name, currentState, opts) {
+export async function checkCache(cacheKey, currentState, opts) {
+  const name = slugifyString(cacheKey)
   const existingCacheObject = await readCache(opts.cacheDir, name)
   if (existingCacheObject) {
-    const outFiles = existingCacheObject.taskResult.paths
-    const outFilesExist = await checkPathExists(outFiles, opts.outDir)
-    if (outFilesExist) {
+    if (opts.ignoreExisting || (await checkPathExists(existingCacheObject.taskResult.paths, opts.outDir))) {
       const stateHash = getStatePropsHash(
         currentState,
         existingCacheObject.deps.state.props,
@@ -102,10 +99,11 @@ export async function checkCache(name, currentState, opts) {
       return { hit: false, reason: "State hash mismatch", updates }
     }
     if (opts.clean) {
+      const outFiles = existingCacheObject.taskResult.paths
       await Promise.all(
         outFiles.map(
           async outFile =>
-          await fs.rm(path.join(opts.outDir, outFile), { force: true }),
+            await fs.rm(path.join(opts.outDir, outFile), { force: true }),
         ),
       )
     }
@@ -116,16 +114,14 @@ export async function checkCache(name, currentState, opts) {
 
 export async function updateCache(
   cacheDir,
-  name,
+  cacheKey,
   pathDeps,
   stateDeps,
   taskResult,
   updates,
 ) {
-  const cacheDirExists = await fileExists(cacheDir)
-  if (!cacheDirExists) {
-    await fs.mkdir(cacheDir, { recursive: true })
-  }
+  await fs.mkdir(cacheDir, { recursive: true })
+  const name = slugifyString(cacheKey)
   const accessedState = getDeepestPropertiesForKey([...stateDeps], "path")
   const deps = {
     paths: [...new Set(removeCwd(pathDeps))],
@@ -138,8 +134,8 @@ export async function updateCache(
   const updatesStateHash = updates?.deps?.state?.props || []
   const stateDepsHash =
     JSON.stringify(statePropsList) === JSON.stringify(updatesStateHash)
-    ? updates?.deps?.state?.hash
-    : hashObject(deps.state)
+      ? updates?.deps?.state?.hash
+      : hashObject(deps.state)
 
   const updatesPathsCache =
     updates?.deps?.paths?.reduce(
@@ -178,7 +174,7 @@ async function writeCache(cacheDir, name, cache) {
     return false
   }
   return fs.writeFile(
-    path.join(cacheDir, `${name}.cache`),
+    path.join(cacheDir, `${name}.json`),
     JSON.stringify(cache),
     "utf8",
   )
@@ -190,7 +186,7 @@ async function readCache(cacheDir, name) {
   }
   try {
     const content = await fs.readFile(
-      path.join(cacheDir, `${name}.cache`),
+      path.join(cacheDir, `${name}.json`),
       "utf8",
     )
     return JSON.parse(content)

+ 48 - 44
src/defaults.js

@@ -8,53 +8,58 @@ import {
 } from "./processors.js"
 
 export const tasks = [
-  {
-    name: "styles",
-    inputFiles: [{ pattern: "styles/**/*.scss", ignore: "**/_*.scss" }],
-    stripPaths: ["styles/"],
-    outputDir: "static/styles/",
-    outputFileExtension: ".css",
-    processor: compileSass,
-  },
-  {
-    name: "icons",
-    inputFiles: [{ pattern: "images/icons/*.svg" }],
-    stripPaths: ["images/"],
-    outputDir: "static/",
-    outputFileExtension: ".svg",
-    processor: optimiseSvg,
-  },
-  {
-    name: "images",
-    inputFiles: [{ pattern: "images/content/*.jpg" }],
-    stripPaths: ["images/content/"],
-    outputDir: "images/",
-    outputFileExtension: ".webp",
-    imageSizes: [
-      "640w", "768w", "1024w", "1366w", "1600w", "1920w", "2560w",
-    ],
-    quality: 80,
-    processor: imageToWebP,
-  },
-  {
-    name: "favicons",
-    inputFiles: [{ pattern: "images/favicon/*" }],
-    stripPaths: ["images/favicon/"],
-    outputDir: "static/meta/",
-    processor: generateFavicons,
-  },
+  [
+    {
+      name: "styles",
+      inputFiles: [{ pattern: "styles/**/*.scss", ignore: "**/_*.scss" }],
+      stripPaths: ["styles/"],
+      outputDir: "static/styles/",
+      outputFileExtension: ".css",
+      processor: compileSass,
+    },
+    {
+      name: "icons",
+      inputFiles: [{ pattern: "images/icons/*.svg" }],
+      stripPaths: ["images/"],
+      outputDir: "static/",
+      outputFileExtension: ".svg",
+      processor: optimiseSvg,
+    },
+    {
+      name: "images",
+      inputFiles: [{ pattern: "images/content/*.jpg" }],
+      stripPaths: ["images/content/"],
+      outputDir: "images/",
+      outputFileExtension: ".webp",
+      imageSizes: [
+        "640w", "768w", "1024w", "1366w", "1600w", "1920w", "2560w",
+      ],
+      quality: 80,
+      processor: imageToWebP,
+    },
+    {
+      name: "static files",
+      inputFiles: [{ pattern: "static/*" }],
+      stripPaths: ["static/"],
+      processor: copy,
+    },
+    {
+      name: "favicons",
+      inputFiles: [{ pattern: "images/favicon/*" }],
+      stripPaths: ["images/favicon/"],
+      outputDir: "static/meta/",
+      processor: generateFavicons,
+    },
+  ],
   {
     name: "pages",
     inputFiles: [{ pattern: "markdown/*.md" }],
     stripPaths: ["markdown/"],
     outputFileExtension: ".html",
     processor: renderMarkdownWithTemplate,
-  },
-  {
-    name: "static files",
-    inputFiles: [{ pattern: "static/*" }],
-    stripPaths: ["static/"],
-    processor: copy,
+    logLevel: "debug",
+    defaultTemplate: "default",
+    templateDirs: ["templates/", "~/.rhedyn/templates/"],
   },
 ]
 
@@ -62,13 +67,12 @@ export const opts = {
   outDir: "dist/",
   runDir: process.cwd(),
   cacheDir: ".cache",
-  logLevel: "debug",
-  defaultTemplate: "default",
   include: {
     styles: [{ pattern: "~/.rhedyn/styles/*.scss" }],
   },
-  templateDirs: ["templates/", "~/.rhedyn/templates/"],
   clean: true,
+  ignoreExisting: false,
+  logLevel: "debug",
   site: {
     name: "Website generated by Rhedyn",
     shortName: "Rhedyn test site",

+ 41 - 32
src/index.js

@@ -1,13 +1,13 @@
 #!/usr/bin/env node
 
 import * as defaultConfig from "./defaults.js"
-import { getConfig, processFiles } from "./lib.js"
+import { getConfig, processTask } from "./lib.js"
 import { performance } from "node:perf_hooks"
 import { getLogger } from "./logging.js"
 const startTime = performance.now()
 const { opts, tasks } = await getConfig() || { ...defaultConfig }
-const log = getLogger(opts.logLevel)
-log.info(`Processing ${tasks.length} tasks`)
+const log = getLogger(opts.logLevel, "main")
+log.info(`Processing ${tasks.length} steps`)
 log.debug(`Running directory: ${opts.runDir}`)
 log.debug(`Output directory: ${opts.outDir}`)
 if (opts.cacheDir) {
@@ -16,40 +16,49 @@ if (opts.cacheDir) {
   log.warn("Cache disabled")
 }
 async function runTask(meta, task) {
-  const allResults = await processFiles(task, meta)
-  const cachedResults = allResults.filter(taskResult => taskResult.fromCache)
-  const processedResults = allResults.filter(taskResult => !taskResult.fromCache)
-  const taskResult = {
-    ...meta,
-    resources: {
-      ...meta.resources,
-      [task.name]: allResults.reduce((obj, taskResult) => ({ ...obj, [taskResult.ref]: taskResult }), {}),
-    },
-  }
-  return { taskResult, cachedResults, processedResults }
+  const allResults = await processTask(task, meta)
+  return allResults
 }
 
 const taskRunner = tasks.reduce(
-  async (metaPromise, task) => {
-    const meta = await metaPromise
-    const startTime = performance.now()
-    log.group(`[Task] ${task.name}`)
-    log.trace(`patterns: ${JSON.stringify(task.inputFiles)}`)
-    log.trace(`meta: ${JSON.stringify(meta, null, 2)}`)
-    log.trace(`task: ${JSON.stringify(task, null, 2)}`)
-    const { taskResult, cachedResults, processedResults } = await runTask(meta, task)
-    const endTime = performance.now()
-    const timeTaken = (endTime - startTime)
-    const hrTime = timeTaken > 1000 ? `${Number.parseFloat(timeTaken / 1000).toFixed(2)}s` : `${Number.parseFloat(timeTaken).toFixed(2)}ms`
-    const filesWritten = processedResults.reduce((acc, cur) => acc + cur.paths.length, 0)
-    log.info(`written: ${filesWritten} | processed: ${processedResults.length} | from cache: ${cachedResults.length} | ${hrTime}`)
-    log.groupEnd()
-    return taskResult
+  async (metaPromise, step) => {
+    const tasks = Array.isArray(step) ? step : [step]
+    const { meta, filesWritten } = await metaPromise
+    const stepTasks = tasks.map(task => task.name)
+    log.info(`Starting tasks: ${stepTasks.join(", ")}`)
+    const stepResults = await Promise.all(tasks.map(async task => {
+      const log = getLogger(opts.logLevel, task.name)
+      const startTime = performance.now()
+      const taskResult = await runTask(meta, task)
+      const cached = taskResult.filter(taskResult => taskResult.fromCache)
+      const processed = taskResult.filter(taskResult => !taskResult.fromCache)
+      const resources = taskResult.reduce((obj, tResult) => tResult.ref ? ({ ...obj, [tResult.ref]: tResult }) : obj, {})
+      const endTime = performance.now()
+      const timeTaken = (endTime - startTime)
+      const hrTime = timeTaken > 1000 ? `${Number.parseFloat(timeTaken / 1000).toFixed(2)}s` : `${Number.parseFloat(timeTaken).toFixed(2)}ms`
+      const filesWritten = processed.reduce((acc, cur) => acc + cur.paths.length, 0)
+      log.info(`written: ${filesWritten} | processed: ${processed.length} | from cache: ${cached.length} | ${hrTime}`)
+      return { name: task.name, taskResult, cached, processed, resources, filesWritten }
+    }))
+    const newState = stepResults.reduce((newState, taskResult) => {
+      const resources = Object.keys(taskResult.resources).length > 0 ? {
+        ...newState.meta.resources,
+        [taskResult.name]: taskResult.resources,
+      } : { ...newState.meta.resources }
+      return {
+        meta: {
+          ...newState.meta,
+          resources,
+        },
+        filesWritten: newState.filesWritten + taskResult.filesWritten,
+      }
+    }, { meta, filesWritten })
+    return newState
   },
-  Promise.resolve({ opts }),
+  Promise.resolve({ meta: { opts }, filesWritten: 0 }),
 )
-await taskRunner
+const finalState = await taskRunner
 const endTime = performance.now()
 const timeTaken = (endTime - startTime)
 const hrTime = timeTaken > 1000 ? `${Number.parseFloat(timeTaken / 1000).toFixed(2)}s` : `${Number.parseFloat(timeTaken).toFixed(2)}ms`
-log.info(`Completed ${tasks.length} tasks in ${hrTime}`)
+log.info(`Completed ${tasks.length} steps in ${hrTime}, wrote ${finalState.filesWritten} files.`)

+ 79 - 65
src/lib.js

@@ -9,7 +9,6 @@ import {
   removeBasePaths,
   removeCwd,
   replaceFileExtension,
-  slugifyString,
 } from "./util.js"
 import path from "path"
 import process from "node:process"
@@ -30,16 +29,74 @@ export async function getConfig() {
     return
   }
 }
+async function runTask({ meta, config, jobId }) {
+  const log = getLogger(meta.opts.logLevel, jobId)
+  log.trace(`meta: ${JSON.stringify(meta, null, 2)}`)
+  log.trace(`config: ${JSON.stringify(config, null, 2)}`)
 
-export async function processFiles(config, meta) {
-  const includes = meta.opts?.include?.[config.name] || []
-  const patternsToInclude = [...config.inputFiles, ...includes]
+  const stateObject = {
+    meta,
+    config,
+  }
+  const cache = (meta.opts.cacheDir && !config.skipCache) ?
+    await checkCache(
+      jobId,
+      stateObject,
+      meta.opts,
+    )
+    :
+    { disabled: true, reason: "Cache disabled" }
+
+  if (cache && cache.hit) {
+    log.debug(`Loaded cache for ${jobId}`)
+    return { ...cache.taskResult, fromCache: true }
+  }
+  log.debug(`Cache miss for ${jobId} (${cache.reason})`)
+
+  const state = meta.opts.cacheDir
+    ? createTrackedObject(stateObject)
+    : { proxy: stateObject }
+
+  const {
+    detail,
+    paths = [],
+    deps: processorDeps,
+    ref,
+  } = await config.processor(state.proxy)
+
+  const taskResult = {
+    detail,
+    paths: paths.map(fileOutputPath => fileOutputPath.replace(meta.opts.outDir, "")),
+    ref,
+  }
+  log.debug(`Wrote ${taskResult.paths.length} files for ${jobId}`)
+  if (cache && !cache.disabled) {
+    log.debug(`Updating cache for ${jobId}`)
+    const processorPathDeps = processorDeps?.paths || []
+    const processorStateDeps = processorDeps?.state || []
+    const configPathDeps = config.deps?.paths || []
+    const configStateDeps = config.deps?.state || []
+    await updateCache(
+      meta.opts.cacheDir,
+      jobId,
+      removeCwd([
+        ...configPathDeps, ...processorPathDeps, config?.filePath,
+      ].filter(item => !!item)),
+      [
+        ...configStateDeps, ...processorStateDeps, ...(state?.accessed || []),
+      ].filter(item => !!item),
+      taskResult,
+      cache.updates,
+    )
+  }
+
+  return taskResult
+}
+
+async function processFileTask(patternsToInclude, config, meta) {
   const filesToProcess = await readFilesByGlob(patternsToInclude)
   const pathsToStrip = config.stripPaths || []
   const outputDir = config.outputDir || ""
-  const configPathDeps = config.deps?.paths || []
-  const configStateDeps = config.deps?.state || []
-  const log = getLogger(meta.opts.logLevel)
   return await Promise.all(
     filesToProcess.map(async filePath => {
       const fileOutputPath = path.join(
@@ -52,66 +109,23 @@ export async function processFiles(config, meta) {
       )
 
       const fileOutputDir = path.dirname(fileOutputPath)
-
-      const stateObject = {
-        filePath,
-        meta,
-        fileOutputDir,
-        fileOutputPath,
-        config,
-      }
-
-      let cache = {}
-      if (meta.opts.cacheDir) {
-        cache = await checkCache(
-          slugifyString(filePath),
-          stateObject,
-          meta.opts,
-        )
-
-        if (cache && cache.hit) {
-          log.debug(`Loaded cache for ${filePath}`)
-          return { ...cache.taskResult, fromCache: true }
-        }
-        log.debug(`Cache miss for ${filePath} (${cache.reason})`)
+      const jobConfig = {
+        ...config,
+        filePath, fileOutputDir, fileOutputPath,
       }
+      return runTask({ meta, config: jobConfig, jobId: `${config.name} @ ${filePath}` })
+    }),
+  )}
 
-      const state = meta.opts.cacheDir
-        ? createTrackedObject(stateObject)
-        : { proxy: stateObject }
-
-      const {
-        detail,
-        paths,
-        deps: processorDeps,
-        ref,
-      } = await config.processor(state.proxy)
+export async function processTask(config, meta) {
+  const includes = meta.opts?.include?.[config.name] || []
+  const patternsToInclude = [...(config?.inputFiles || []), ...includes]
 
-      const taskResult = {
-        detail,
-        paths: paths.map(fileOutputPath => fileOutputPath.replace(meta.opts.outDir, "")),
-        ref,
-      }
-      log.debug(`Wrote ${taskResult.paths.length} files for ${filePath}`)
-      if (meta.opts.cacheDir) {
-        log.debug(`Updating cache for ${filePath}`)
-        const processorPathDeps = processorDeps?.paths || []
-        const processorStateDeps = processorDeps?.state || []
-        await updateCache(
-          meta.opts.cacheDir,
-          slugifyString(filePath),
-          removeCwd([
-            ...configPathDeps, ...processorPathDeps, filePath,
-          ]),
-          [
-            ...configStateDeps, ...processorStateDeps, ...state.accessed,
-          ],
-          taskResult,
-          cache.updates,
-        )
-      }
+  if (patternsToInclude.length) {
+    return processFileTask(patternsToInclude, config, meta)
+  }
 
-      return taskResult
-    }),
-  )
+  const jobId = config.jobId || config.name
+  const taskResult = await runTask({ meta, config, jobId })
+  return [taskResult]
 }

+ 60 - 17
src/logging.js

@@ -1,13 +1,56 @@
-function logMessage(label) {
+const formatMap = {
+  labels: {
+    main: {
+      start: "\x1b[1m\x1b[32m",
+      end: "\x1b[0m",
+    },
+  },
+  logTypes: {
+    error: {
+      start: "\x1b[31m",
+      end: "\x1b[0m",
+    },
+    warn: {
+      start: "\x1b[33m",
+      end: "\x1b[0m",
+    },
+    info: {
+      start: "\x1b[32m",
+      end: "\x1b[0m",
+    },
+    debug: {
+      start: "\x1b[34m",
+      end: "\x1b[0m",
+    },
+  },
+}
+
+function messageStart(label, logType) {
+  const labelFormatting = formatMap.labels?.[label]?.start || `[${label}] `
+  const logTypeFormatting = `${formatMap.logTypes?.[logType]?.start || ""}[${logType}] `
+  return `${logTypeFormatting}${labelFormatting}`
+}
+
+function messageEnd(label, logType) {
+  const labelFormatting = formatMap.labels?.[label]?.end || ""
+  const logTypeFormatting = formatMap.logTypes?.[logType]?.end || ""
+  return `${logTypeFormatting}${labelFormatting}`
+}
+
+function logMessage(logType, label) {
+  const log = console[logType]
+  const startStr = messageStart(label, logType)
+  const endStr = messageEnd(label, logType)
   return (message) => {
-    console[label](`[${label}] ${message}`)
+    log(`${startStr}${message}${endStr}`)
   }
 }
 
 function noop() {
 }
 
-export function getLogger(logLevel = "info") {
+export function getLogger(logLevel = "info", name) {
+  const label = name || "genid"
   switch(logLevel) {
     case 6:
     case "trace":
@@ -15,20 +58,20 @@ export function getLogger(logLevel = "info") {
         group: console.group,
         groupEnd: console.groupEnd,
         trace: console.trace,
-        debug: logMessage("debug"),
-        info: logMessage("info"),
-        warn: logMessage("warn"),
-        error: logMessage("error"),
+        debug: logMessage("debug", label),
+        info: logMessage("info", label),
+        warn: logMessage("warn", label),
+        error: logMessage("error", label),
       }
     case "debug":
       return {
         group: console.group,
         groupEnd: console.groupEnd,
         trace: noop,
-        debug: logMessage("debug"),
-        info: logMessage("info"),
-        warn: logMessage("warn"),
-        error: logMessage("error"),
+        debug: logMessage("debug", label),
+        info: logMessage("info", label),
+        warn: logMessage("warn", label),
+        error: logMessage("error", label),
       }
     case 4:
     case "info":
@@ -37,9 +80,9 @@ export function getLogger(logLevel = "info") {
         groupEnd: console.groupEnd,
         trace: noop,
         debug: noop,
-        info: logMessage("info"),
-        warn: logMessage("warn"),
-        error: logMessage("error"),
+        info: logMessage("info", label),
+        warn: logMessage("warn", label),
+        error: logMessage("error", label),
       }
     case 3:
     case "warn":
@@ -49,8 +92,8 @@ export function getLogger(logLevel = "info") {
         trace: noop,
         debug: noop,
         info: noop,
-        warn: logMessage("warn"),
-        error: logMessage("error"),
+        warn: logMessage("warn", label),
+        error: logMessage("error", label),
       }
     case 2:
     case "error":
@@ -61,7 +104,7 @@ export function getLogger(logLevel = "info") {
         debug: noop,
         info: noop,
         warn: noop,
-        error: logMessage("error"),
+        error: logMessage("error", label),
       }
     case 1:
     case "silent":

+ 25 - 14
src/processors.js

@@ -57,18 +57,19 @@ function createMarkdownRenderer(meta) {
 }
 
 export async function renderMarkdownWithTemplate({
-  filePath,
+  config,
   meta,
-  fileOutputPath,
 }) {
+  const filePath = config.filePath
+  const fileOutputPath = config.fileOutputPath
   const content = await fs.readFile(filePath, "utf8")
   const { data, content: markdown } = matter(content)
-  const templateName = data.template || meta.opts.defaultTemplate
+  const templateName = data.template || config.defaultTemplate
   const href = getHref(fileOutputPath, meta)
 
   if (!templateCache.has(templateName)) {
     const templatePath = await firstFound(
-      meta.opts.templateDirs,
+      config.templateDirs,
       `${templateName}.hbs`,
     )
     if (!templatePath) throw new Error(`Template not found: ${templateName}`)
@@ -107,14 +108,16 @@ export async function renderMarkdownWithTemplate({
   }
 }
 
-export async function compileSass({ filePath, fileOutputPath, meta }) {
+export async function compileSass({ config, meta }) {
+  const filePath = config.filePath
+  const fileOutputPath = config.fileOutputPath
   const result = await sass.compileAsync(filePath, { style: "compressed" })
   await writeFile(fileOutputPath, result.css)
   return {
     paths: [fileOutputPath],
     ref: slugifyString(fileOutputPath),
     detail: {
-      href: fileOutputPath.replace(meta.opts.outDir, "")
+      href: fileOutputPath.replace(meta.opts.outDir, ""),
     },
     deps: {
       paths: [...result.loadedUrls.map(item => item.pathname)],
@@ -122,7 +125,9 @@ export async function compileSass({ filePath, fileOutputPath, meta }) {
   }
 }
 
-export async function optimiseSvg({ filePath, fileOutputPath }) {
+export async function optimiseSvg({ config }) {
+  const filePath = config.filePath
+  const fileOutputPath = config.fileOutputPath
   const svgString = await fs.readFile(filePath, "utf8")
   const result = optimize(svgString, {
     plugins: ["preset-default"],
@@ -134,16 +139,20 @@ export async function optimiseSvg({ filePath, fileOutputPath }) {
   }
 }
 
-export async function copy({ filePath, fileOutputPath }) {
-  const fileContent = await fs.readFile(filePath, "utf8")
-  await writeFile(fileOutputPath, fileContent)
+export async function copy({ config }) {
+  const filePath = config.filePath
+  const fileOutputPath = config.fileOutputPath
+  await fs.mkdir(config.fileOutputDir, { recursive: true })
+  await fs.copyFile(filePath, fileOutputPath)
   return {
     paths: [fileOutputPath],
     ref: slugifyString(fileOutputPath),
   }
 }
 
-export async function imageToWebP({ filePath, meta, fileOutputDir, config }) {
+export async function imageToWebP({ meta, config }) {
+  const filePath = config.filePath
+  const fileOutputDir = config.fileOutputDir
   const sourceExtension = path.extname(filePath)
   const outputExtension = config.outputFileExtension
   const base = path.basename(filePath, sourceExtension)
@@ -177,16 +186,18 @@ export async function imageToWebP({ filePath, meta, fileOutputDir, config }) {
     }),
   )
 
-  const imageRef = getCleanPath(path.join(filePath), meta)
+  const imageRef = slugifyString(getCleanPath(path.join(filePath), meta))
 
   return {
     paths: srcSet.map(src => src[0]),
-    detail: { imageRef, srcSet, aspectRatio },
+    detail: { srcSet, aspectRatio },
     ref: imageRef,
   }
 }
 
-export async function generateFavicons({ filePath, meta, fileOutputDir }) {
+export async function generateFavicons({ meta, config }) {
+  const filePath = config.filePath
+  const fileOutputDir = config.fileOutputDir
   // Configuration for favicons package
   const configuration = {
     path: getCleanPath(fileOutputDir, meta), // Path for overriding default icons path

+ 2 - 2
src/util.js

@@ -117,7 +117,7 @@ function trackPropertyAccessDeep(obj, path = [], accessed = new Set()) {
       const fullPath = [...path, prop].map(stringifyPathPart).join(".")
       const value = Reflect.get(target, prop, receiver)
 
-      if (typeof target === "object" && target.hasOwnProperty(prop)) {
+      if (typeof target === "object" && Object.prototype.hasOwnProperty.call(target, prop)) {
         accessed.add({ path: fullPath, value })
       }
 
@@ -170,7 +170,7 @@ export function slugifyString(str) {
   return str
     .toLowerCase()
     .trim()
-    .replace(/[/\\?%*:|"<>]/g, "-") // Replace invalid filename characters
+    .replace(/[/\\?%@*:|"<>]/g, "-") // Replace invalid filename characters
     .replace(/\s+/g, "-") // Replace whitespace with dashes
     .replace(/-+/g, "-") // Collapse multiple dashes
     .replace(/\./g, "-") // Replace dots with dashes