Browse Source

Add config override switch

Craig Fletcher 2 days ago
parent
commit
698d0a2707
2 changed files with 154 additions and 95 deletions
  1. 5 1
      README.md
  2. 149 94
      src/lib.js

+ 5 - 1
README.md

@@ -6,7 +6,8 @@ Run this script from a directory containing markdown, templates and styles and a
 containing the rendered-out static assets ready for deployment.
 
 While the defaults should work for most (of my) use cases, you can configure behaviour using a `rhedyn.config.js` in the
-directory where you run the tool.
+directory where you run the tool, or pass a path to a config using `-c /path/to/config.js` or `--config
+path/to/config.js`.
 
 "Rhedyn" is Welsh for "fern", and is pronounced a bit like "read in". 
 
@@ -54,6 +55,9 @@ considerably larger cache files.
 
 ## Configuration
 
+By default, rhedyn will look for `rhedyn.config.js` in the current directory and fall back to the default if not found.
+You can override this behaviour using the `-c /path/to/config.js` or `--config path/to/config.js` CLI switches.
+
 A working example of how to configure can be found in `src/defaults.js`, with the processors separated out to
 `src/processors.js` for tidiness. 
 

+ 149 - 94
src/lib.js

@@ -1,7 +1,4 @@
-import {
-  checkCache,
-  updateCache,
-} from "./cache.js"
+import { checkCache, updateCache } from "./cache.js";
 import {
   createTrackedObject,
   fileExists,
@@ -9,97 +6,121 @@ import {
   removeBasePaths,
   removeCwd,
   replaceFileExtension,
-  getValueAtPath,
-} from "./util/index.js"
-import path from "path"
-import process from "node:process"
-import { getLogger } from "./logging.js"
+  getValueAtPath
+} from "./util/index.js";
+import path from "path";
+import process from "node:process";
+import { getLogger } from "./logging.js";
 
 export async function getConfig() {
-  const configPath = path.join(process.cwd(), "rhedyn.config.js")
-  const configFileExists = await fileExists(configPath)
+  const args = process.argv.slice(2);
+  const defaultPath = path.join(process.cwd(), "rhedyn.config.js");
+
+  const findConfigPath = (args, index = 0) => {
+    if (index >= args.length) {
+      return defaultPath;
+    }
+    if (
+      (args[index] === "-c" || args[index] === "--config") &&
+      index + 1 < args.length
+    ) {
+      return path.resolve(args[index + 1]);
+    }
+    return findConfigPath(args, index + 1);
+  };
+
+  const configPath = findConfigPath(args);
+  const configFileExists = await fileExists(configPath);
+
   if (configFileExists) {
     try {
-      const config = await import(configPath)
-      return config.default || config
+      const config = await import(configPath);
+      return config.default || config;
     } catch (err) {
-      console.error("Error reading rhedyn.config.js:", err)
-      throw new Error("Failed reading config file")
+      console.error(`Error reading config file at ${configPath}:`, err);
+      throw new Error("Failed reading config file");
     }
   } else {
-    return
+    return;
   }
 }
+
 async function runTask({ meta, config, jobId }) {
-  const log = getLogger(config.logLevel ? config.logLevel : meta.opts.logLevel, jobId)
-  log.trace(`meta: ${JSON.stringify(meta, null, 2)}`)
-  log.trace(`config: ${JSON.stringify(config, null, 2)}`)
+  const log = getLogger(
+    config.logLevel ? config.logLevel : meta.opts.logLevel,
+    jobId
+  );
+  log.trace(`meta: ${JSON.stringify(meta, null, 2)}`);
+  log.trace(`config: ${JSON.stringify(config, null, 2)}`);
 
   const stateObject = {
     meta,
-    config,
-  }
-  const cache = (meta.opts.cacheDir && !config.skipCache) ?
-    await checkCache(
-      jobId,
-      stateObject,
-      meta.opts,
-    )
-    :
-    { disabled: true, reason: "Cache disabled" }
+    config
+  };
+  const cache =
+    meta.opts.cacheDir && !config.skipCache
+      ? await checkCache(jobId, stateObject, meta.opts)
+      : { disabled: true, reason: "Cache disabled" };
 
   if (cache && cache.hit) {
-    log.debug(`Loaded cache for ${jobId}: ${cache.filePath}`)
-    return { ...cache.taskResult, fromCache: true }
+    log.debug(`Loaded cache for ${jobId}: ${cache.filePath}`);
+    return { ...cache.taskResult, fromCache: true };
   }
-  log.debug(`Cache miss for ${jobId} (${cache.reason})`)
+  log.debug(`Cache miss for ${jobId} (${cache.reason})`);
 
   const state = meta.opts.cacheDir
     ? createTrackedObject(stateObject)
-    : { proxy: stateObject }
+    : { proxy: stateObject };
 
   const {
     detail = {},
     paths = [],
     deps: processorDeps,
-    ref,
-  } = await config.processor(state.proxy)
+    ref
+  } = await config.processor(state.proxy);
 
   const taskResult = {
     detail,
-    paths: paths.map(fileOutputPath => fileOutputPath.replace(meta.opts.outDir, "")),
-    ref,
-  }
-  log.debug(`Wrote ${taskResult.paths.length} files for ${jobId}`)
+    paths: paths.map(fileOutputPath =>
+      fileOutputPath.replace(meta.opts.outDir, "")
+    ),
+    ref
+  };
+  log.debug(`Wrote ${taskResult.paths.length} files for ${jobId}`);
   if (cache && !cache.disabled) {
-    log.debug(`Updating cache for ${jobId}: ${cache.filePath}`)
-    const processorPathDeps = processorDeps?.paths || []
-    const processorStateDeps = processorDeps?.state || []
-    const configPathDeps = config.deps?.paths || []
-    const configStateDeps = config.deps?.state || []
-    const stateSelectors = config.stateSelectors || []
+    log.debug(`Updating cache for ${jobId}: ${cache.filePath}`);
+    const processorPathDeps = processorDeps?.paths || [];
+    const processorStateDeps = processorDeps?.state || [];
+    const configPathDeps = config.deps?.paths || [];
+    const configStateDeps = config.deps?.state || [];
+    const stateSelectors = config.stateSelectors || [];
     await updateCache(
       meta.opts.cacheDir,
       jobId,
-      removeCwd([
-        ...configPathDeps, ...processorPathDeps, config?.filePath,
-      ].filter(item => !!item)),
+      removeCwd(
+        [...configPathDeps, ...processorPathDeps, config?.filePath].filter(
+          item => !!item
+        )
+      ),
       [
-        ...configStateDeps, ...stateSelectors, ...processorStateDeps, ...(state?.accessed || []),
+        ...configStateDeps,
+        ...stateSelectors,
+        ...processorStateDeps,
+        ...(state?.accessed || [])
       ].filter(item => !!item),
       taskResult,
       cache.updates,
-      meta.opts.includeStateValues,
-    )
+      meta.opts.includeStateValues
+    );
   }
 
-  return taskResult
+  return taskResult;
 }
 
 async function expandFileTask(patternsToInclude, config, meta) {
-  const filesToProcess = await readFilesByGlob(patternsToInclude)
-  const pathsToStrip = config.stripPaths || []
-  const outputDir = config.outputDir || ""
+  const filesToProcess = await readFilesByGlob(patternsToInclude);
+  const pathsToStrip = config.stripPaths || [];
+  const outputDir = config.outputDir || "";
   return await Promise.all(
     filesToProcess.map(async filePath => {
       const fileOutputPath = path.join(
@@ -107,66 +128,100 @@ async function expandFileTask(patternsToInclude, config, meta) {
         outputDir,
         replaceFileExtension(
           removeBasePaths(pathsToStrip, filePath),
-          config.outputFileExtension,
-        ),
-      )
+          config.outputFileExtension
+        )
+      );
 
-      const fileOutputDir = path.dirname(fileOutputPath)
+      const fileOutputDir = path.dirname(fileOutputPath);
       const jobConfig = {
         ...config,
-        filePath, fileOutputDir, fileOutputPath,
-      }
-      return runTask({ meta, config: jobConfig, jobId: `${config.name} @ ${filePath}` })
-    }),
-  )
+        filePath,
+        fileOutputDir,
+        fileOutputPath
+      };
+      return runTask({
+        meta,
+        config: jobConfig,
+        jobId: `${config.name} @ ${filePath}`
+      });
+    })
+  );
 }
 
 async function expandStateTask(stateToExpand, config, meta) {
-  const stateToProcess = stateToExpand.map(property => {
-    const values = getValueAtPath(meta, property)
-    const expandedValues = Array.isArray(values) ? values : Object.values(values)
-    return expandedValues.map((value, index) => ({ property, index, value }))
-  }).flat()
+  const stateToProcess = stateToExpand
+    .map(property => {
+      const values = getValueAtPath(meta, property);
+      const expandedValues = Array.isArray(values)
+        ? values
+        : Object.values(values);
+      return expandedValues.map((value, index) => ({ property, index, value }));
+    })
+    .flat();
 
   return await Promise.all(
-    stateToProcess.map(async (stateJob) => {
+    stateToProcess.map(async stateJob => {
       const jobConfig = {
         ...config,
-        ...stateJob.value.detail,
-      }
-      return runTask({ meta, config: jobConfig, jobId: `${config.name} @ ${stateJob.property}:${stateJob.index}` })
-    }),
-  )
+        ...stateJob.value.detail
+      };
+      return runTask({
+        meta,
+        config: jobConfig,
+        jobId: `${config.name} @ ${stateJob.property}:${stateJob.index}`
+      });
+    })
+  );
 }
 
 export async function expandAndRunTask(meta, config) {
-  const includes = meta.opts?.include?.[config.name] || []
-  const patternsToInclude = [...(config?.inputFiles || []), ...includes]
+  const includes = meta.opts?.include?.[config.name] || [];
+  const patternsToInclude = [...(config?.inputFiles || []), ...includes];
 
   if (patternsToInclude.length) {
-    return expandFileTask(patternsToInclude, config, meta)
+    return expandFileTask(patternsToInclude, config, meta);
   }
 
   if (config.stateSelectors) {
-    return expandStateTask(config.stateSelectors, config, meta)
+    return expandStateTask(config.stateSelectors, config, meta);
   }
 
-  const jobId = config.jobId || config.name
-  const taskResult = await runTask({ meta, config, jobId })
-  return [taskResult]
+  const jobId = config.jobId || config.name;
+  const taskResult = await runTask({ meta, config, jobId });
+  return [taskResult];
 }
 
 export async function processTask(meta, task) {
-  const log = getLogger(meta.opts.logLevel, task.name)
-  const startTime = performance.now()
-  const taskResult = await expandAndRunTask(meta, task)
-  const cached = taskResult.filter(taskResult => taskResult.fromCache)
-  const processed = taskResult.filter(taskResult => !taskResult.fromCache)
-  const resources = taskResult.reduce((obj, tResult) => tResult.ref ? ({ ...obj, [tResult.ref]: tResult }) : obj, {})
-  const endTime = performance.now()
-  const timeTaken = (endTime - startTime)
-  const hrTime = timeTaken > 1000 ? `${Number.parseFloat(timeTaken / 1000).toFixed(2)}s` : `${Number.parseFloat(timeTaken).toFixed(2)}ms`
-  const filesWritten = processed.reduce((acc, cur) => acc + cur.paths.length, 0)
-  log.info(`written: ${filesWritten} | processed: ${processed.length} | from cache: ${cached.length} | ${hrTime}`)
-  return { name: task.name, taskResult, cached, processed, resources, filesWritten }
+  const log = getLogger(meta.opts.logLevel, task.name);
+  const startTime = performance.now();
+  const taskResult = await expandAndRunTask(meta, task);
+  const cached = taskResult.filter(taskResult => taskResult.fromCache);
+  const processed = taskResult.filter(taskResult => !taskResult.fromCache);
+  const resources = taskResult.reduce(
+    (obj, tResult) => (tResult.ref ? { ...obj, [tResult.ref]: tResult } : obj),
+    {}
+  );
+  const endTime = performance.now();
+  const timeTaken = endTime - startTime;
+  const hrTime =
+    timeTaken > 1000
+      ? `${Number.parseFloat(timeTaken / 1000).toFixed(2)}s`
+      : `${Number.parseFloat(timeTaken).toFixed(2)}ms`;
+  const filesWritten = processed.reduce(
+    (acc, cur) => acc + cur.paths.length,
+    0
+  );
+  log.info(
+    `written: ${filesWritten} | processed: ${processed.length} | from cache: ${
+      cached.length
+    } | ${hrTime}`
+  );
+  return {
+    name: task.name,
+    taskResult,
+    cached,
+    processed,
+    resources,
+    filesWritten
+  };
 }