Sfoglia il codice sorgente

Add config override switch

Craig Fletcher 2 giorni fa
parent
commit
557b05b05b
4 ha cambiato i file con 122 aggiunte e 62 eliminazioni
  1. 5 1
      README.md
  2. 94 36
      src/lib.js
  3. 8 21
      src/processors.js
  4. 15 4
      src/util/file-system.js

+ 5 - 1
README.md

@@ -6,7 +6,8 @@ Run this script from a directory containing markdown, templates and styles and a
 containing the rendered-out static assets ready for deployment.
 
 While the defaults should work for most (of my) use cases, you can configure behaviour using a `rhedyn.config.js` in the
-directory where you run the tool.
+directory where you run the tool, or pass a path to a config using `-c /path/to/config.js` or `--config
+path/to/config.js`.
 
 "Rhedyn" is Welsh for "fern", and is pronounced a bit like "read in". 
 
@@ -54,6 +55,9 @@ considerably larger cache files.
 
 ## Configuration
 
+By default, rhedyn will look for `rhedyn.config.js` in the current directory and fall back to the default if not found.
+You can override this behaviour using the `-c /path/to/config.js` or `--config path/to/config.js` CLI switches.
+
 A working example of how to configure can be found in `src/defaults.js`, with the processors separated out to
 `src/processors.js` for tidiness. 
 

+ 94 - 36
src/lib.js

@@ -1,7 +1,4 @@
-import {
-  checkCache,
-  updateCache,
-} from "./cache.js"
+import { checkCache, updateCache } from "./cache.js"
 import {
   createTrackedObject,
   fileExists,
@@ -10,28 +7,50 @@ import {
   removeCwd,
   replaceFileExtension,
   getValueAtPath,
+  expandTilde,
 } from "./util/index.js"
 import path from "path"
 import process from "node:process"
 import { getLogger } from "./logging.js"
 
 export async function getConfig() {
-  const configPath = path.join(process.cwd(), "rhedyn.config.js")
+  const args = process.argv.slice(2)
+  const defaultPath = path.join(process.cwd(), "rhedyn.config.js")
+
+  const findConfigPath = (args, index = 0) => {
+    if (index >= args.length) {
+      return defaultPath
+    }
+    if (
+      (args[index] === "-c" || args[index] === "--config") &&
+      index + 1 < args.length
+    ) {
+      return path.resolve(args[index + 1])
+    }
+    return findConfigPath(args, index + 1)
+  }
+
+  const configPath = findConfigPath(args)
   const configFileExists = await fileExists(configPath)
+
   if (configFileExists) {
     try {
       const config = await import(configPath)
       return config.default || config
     } catch (err) {
-      console.error("Error reading rhedyn.config.js:", err)
+      console.error(`Error reading config file at ${configPath}:`, err)
       throw new Error("Failed reading config file")
     }
   } else {
     return
   }
 }
+
 async function runTask({ meta, config, jobId }) {
-  const log = getLogger(config.logLevel ? config.logLevel : meta.opts.logLevel, jobId)
+  const log = getLogger(
+    config.logLevel ? config.logLevel : meta.opts.logLevel,
+    jobId,
+  )
   log.trace(`meta: ${JSON.stringify(meta, null, 2)}`)
   log.trace(`config: ${JSON.stringify(config, null, 2)}`)
 
@@ -39,14 +58,10 @@ async function runTask({ meta, config, jobId }) {
     meta,
     config,
   }
-  const cache = (meta.opts.cacheDir && !config.skipCache) ?
-    await checkCache(
-      jobId,
-      stateObject,
-      meta.opts,
-    )
-    :
-    { disabled: true, reason: "Cache disabled" }
+  const cache =
+    meta.opts.cacheDir && !config.skipCache
+      ? await checkCache(jobId, stateObject, meta.opts)
+      : { disabled: true, reason: "Cache disabled" }
 
   if (cache && cache.hit) {
     log.debug(`Loaded cache for ${jobId}: ${cache.filePath}`)
@@ -67,7 +82,9 @@ async function runTask({ meta, config, jobId }) {
 
   const taskResult = {
     detail,
-    paths: paths.map(fileOutputPath => fileOutputPath.replace(meta.opts.outDir, "")),
+    paths: paths.map(fileOutputPath =>
+      fileOutputPath.replace(meta.opts.outDir, ""),
+    ),
     ref,
   }
   log.debug(`Wrote ${taskResult.paths.length} files for ${jobId}`)
@@ -81,11 +98,18 @@ async function runTask({ meta, config, jobId }) {
     await updateCache(
       meta.opts.cacheDir,
       jobId,
-      removeCwd([
-        ...configPathDeps, ...processorPathDeps, config?.filePath,
-      ].filter(item => !!item)),
+      removeCwd(
+        [
+          ...configPathDeps, ...processorPathDeps, config?.filePath,
+        ].filter(
+          item => !!item,
+        ),
+      ),
       [
-        ...configStateDeps, ...stateSelectors, ...processorStateDeps, ...(state?.accessed || []),
+        ...configStateDeps,
+        ...stateSelectors,
+        ...processorStateDeps,
+        ...(state?.accessed || []),
       ].filter(item => !!item),
       taskResult,
       cache.updates,
@@ -98,7 +122,7 @@ async function runTask({ meta, config, jobId }) {
 
 async function expandFileTask(patternsToInclude, config, meta) {
   const filesToProcess = await readFilesByGlob(patternsToInclude)
-  const pathsToStrip = config.stripPaths || []
+  const pathsToStrip = (config.stripPaths || []).map(path => expandTilde(path))
   const outputDir = config.outputDir || ""
   return await Promise.all(
     filesToProcess.map(async filePath => {
@@ -114,27 +138,41 @@ async function expandFileTask(patternsToInclude, config, meta) {
       const fileOutputDir = path.dirname(fileOutputPath)
       const jobConfig = {
         ...config,
-        filePath, fileOutputDir, fileOutputPath,
+        filePath,
+        fileOutputDir,
+        fileOutputPath,
       }
-      return runTask({ meta, config: jobConfig, jobId: `${config.name} @ ${filePath}` })
+      return runTask({
+        meta,
+        config: jobConfig,
+        jobId: `${config.name} @ ${filePath}`,
+      })
     }),
   )
 }
 
 async function expandStateTask(stateToExpand, config, meta) {
-  const stateToProcess = stateToExpand.map(property => {
-    const values = getValueAtPath(meta, property)
-    const expandedValues = Array.isArray(values) ? values : Object.values(values)
-    return expandedValues.map((value, index) => ({ property, index, value }))
-  }).flat()
+  const stateToProcess = stateToExpand
+    .map(property => {
+      const values = getValueAtPath(meta, property)
+      const expandedValues = Array.isArray(values)
+        ? values
+        : Object.values(values)
+      return expandedValues.map((value, index) => ({ property, index, value }))
+    })
+    .flat()
 
   return await Promise.all(
-    stateToProcess.map(async (stateJob) => {
+    stateToProcess.map(async stateJob => {
       const jobConfig = {
         ...config,
         ...stateJob.value.detail,
       }
-      return runTask({ meta, config: jobConfig, jobId: `${config.name} @ ${stateJob.property}:${stateJob.index}` })
+      return runTask({
+        meta,
+        config: jobConfig,
+        jobId: `${config.name} @ ${stateJob.property}:${stateJob.index}`,
+      })
     }),
   )
 }
@@ -162,11 +200,31 @@ export async function processTask(meta, task) {
   const taskResult = await expandAndRunTask(meta, task)
   const cached = taskResult.filter(taskResult => taskResult.fromCache)
   const processed = taskResult.filter(taskResult => !taskResult.fromCache)
-  const resources = taskResult.reduce((obj, tResult) => tResult.ref ? ({ ...obj, [tResult.ref]: tResult }) : obj, {})
+  const resources = taskResult.reduce(
+    (obj, tResult) => (tResult.ref ? { ...obj, [tResult.ref]: tResult } : obj),
+    {},
+  )
   const endTime = performance.now()
-  const timeTaken = (endTime - startTime)
-  const hrTime = timeTaken > 1000 ? `${Number.parseFloat(timeTaken / 1000).toFixed(2)}s` : `${Number.parseFloat(timeTaken).toFixed(2)}ms`
-  const filesWritten = processed.reduce((acc, cur) => acc + cur.paths.length, 0)
-  log.info(`written: ${filesWritten} | processed: ${processed.length} | from cache: ${cached.length} | ${hrTime}`)
-  return { name: task.name, taskResult, cached, processed, resources, filesWritten }
+  const timeTaken = endTime - startTime
+  const hrTime =
+    timeTaken > 1000
+      ? `${Number.parseFloat(timeTaken / 1000).toFixed(2)}s`
+      : `${Number.parseFloat(timeTaken).toFixed(2)}ms`
+  const filesWritten = processed.reduce(
+    (acc, cur) => acc + cur.paths.length,
+    0,
+  )
+  log.info(
+    `written: ${filesWritten} | processed: ${processed.length} | from cache: ${
+      cached.length
+    } | ${hrTime}`,
+  )
+  return {
+    name: task.name,
+    taskResult,
+    cached,
+    processed,
+    resources,
+    filesWritten,
+  }
 }

+ 8 - 21
src/processors.js

@@ -29,7 +29,7 @@ function createMarkdownRenderer(meta) {
         image({ href, title, text }) {
           const attrs = [`alt="${text}"`]
 
-          const foundSrcSet = meta.resources.images[slugifyString(href)]
+          const foundSrcSet = meta.resources.images?.[slugifyString(href)]
 
           if (foundSrcSet) {
             const srcSetString = foundSrcSet.detail.srcSet
@@ -56,10 +56,7 @@ function createMarkdownRenderer(meta) {
     })
 }
 async function findTemplatePath(templateDirs, templateName) {
-  const templatePath = await firstFound(
-    templateDirs,
-    `${templateName}.hbs`,
-  )
+  const templatePath = await firstFound(templateDirs, `${templateName}.hbs`)
   if (!templatePath) throw new Error(`Template not found: ${templateName}`)
   return templatePath
 }
@@ -75,14 +72,10 @@ async function getTemplate(templatePath) {
   return templateCache.get(templatePath)
 }
 
-export async function renderTemplate({
-  config,
-  meta,
-}) {
-  const templatePath = config.filePath || await findTemplatePath(
-    config.templateDirs,
-    config.template,
-  )
+export async function renderTemplate({ config, meta }) {
+  const templatePath =
+    config.filePath ||
+    (await findTemplatePath(config.templateDirs, config.template))
   const fileOutputPath = config.fileOutputPath
   const href = getHref(fileOutputPath, meta)
 
@@ -121,10 +114,7 @@ export async function renderTemplate({
     ref: slugifyString(href),
   }
 }
-export async function renderMarkdownToHtml({
-  config,
-  meta,
-}) {
+export async function renderMarkdownToHtml({ config, meta }) {
   const filePath = config.filePath
   const fileOutputPath = config.fileOutputPath
   const content = await fs.readFile(filePath, "utf8")
@@ -139,10 +129,7 @@ export async function renderMarkdownToHtml({
     ref: slugifyString(filePath),
   }
 }
-export async function renderMarkdownWithTemplate({
-  config,
-  meta,
-}) {
+export async function renderMarkdownWithTemplate({ config, meta }) {
   const filePath = config.filePath
   const fileOutputPath = config.fileOutputPath
   const content = await fs.readFile(filePath, "utf8")

+ 15 - 4
src/util/file-system.js

@@ -1,5 +1,6 @@
 import fs from "node:fs/promises"
 import path from "path"
+import os from "os"
 import { glob } from "glob"
 
 export async function fileExists(filePath) {
@@ -44,7 +45,7 @@ export async function readFilesByGlob(globConfigs) {
         ignore: [],
         ...globConfig,
       }
-      const matches = await glob(pattern, {
+      const matches = await glob(expandTilde(pattern), {
         ignore,
         dot,
       })
@@ -56,6 +57,11 @@ export async function readFilesByGlob(globConfigs) {
   return [...new Set(files)]
 }
 
+export function expandTilde(path) {
+  if (!path.startsWith("~")) return path
+  return path.replace(/^~(?=$|\/|\\)/, os.homedir())
+}
+
 export async function checkFilesExist(files, baseDir) {
   const filesToCheck = Array.isArray(files) ? files : [files]
   const fileCheckResults = await Promise.all(
@@ -65,9 +71,14 @@ export async function checkFilesExist(files, baseDir) {
       return { filePath, exists }
     }),
   )
-  return fileCheckResults.reduce((sorted, { filePath, exists }) => {
-    return exists ? { ...sorted, present: [...sorted.present, filePath] } : { ...sorted, absent: [...sorted.absent, filePath] }
-  }, { present: [], absent: [] })
+  return fileCheckResults.reduce(
+    (sorted, { filePath, exists }) => {
+      return exists
+        ? { ...sorted, present: [...sorted.present, filePath] }
+        : { ...sorted, absent: [...sorted.absent, filePath] }
+    },
+    { present: [], absent: [] },
+  )
 }
 
 export async function writeFile(filePath, content) {