|
|
@@ -1,7 +1,4 @@
|
|
|
-import {
|
|
|
- checkCache,
|
|
|
- updateCache,
|
|
|
-} from "./cache.js"
|
|
|
+import { checkCache, updateCache } from "./cache.js"
|
|
|
import {
|
|
|
createTrackedObject,
|
|
|
fileExists,
|
|
|
@@ -10,28 +7,50 @@ import {
|
|
|
removeCwd,
|
|
|
replaceFileExtension,
|
|
|
getValueAtPath,
|
|
|
+ expandTilde,
|
|
|
} from "./util/index.js"
|
|
|
import path from "path"
|
|
|
import process from "node:process"
|
|
|
import { getLogger } from "./logging.js"
|
|
|
|
|
|
export async function getConfig() {
|
|
|
- const configPath = path.join(process.cwd(), "rhedyn.config.js")
|
|
|
+ const args = process.argv.slice(2)
|
|
|
+ const defaultPath = path.join(process.cwd(), "rhedyn.config.js")
|
|
|
+
|
|
|
+ const findConfigPath = (args, index = 0) => {
|
|
|
+ if (index >= args.length) {
|
|
|
+ return defaultPath
|
|
|
+ }
|
|
|
+ if (
|
|
|
+ (args[index] === "-c" || args[index] === "--config") &&
|
|
|
+ index + 1 < args.length
|
|
|
+ ) {
|
|
|
+ return path.resolve(args[index + 1])
|
|
|
+ }
|
|
|
+ return findConfigPath(args, index + 1)
|
|
|
+ }
|
|
|
+
|
|
|
+ const configPath = findConfigPath(args)
|
|
|
const configFileExists = await fileExists(configPath)
|
|
|
+
|
|
|
if (configFileExists) {
|
|
|
try {
|
|
|
const config = await import(configPath)
|
|
|
return config.default || config
|
|
|
} catch (err) {
|
|
|
- console.error("Error reading rhedyn.config.js:", err)
|
|
|
+ console.error(`Error reading config file at ${configPath}:`, err)
|
|
|
throw new Error("Failed reading config file")
|
|
|
}
|
|
|
} else {
|
|
|
return
|
|
|
}
|
|
|
}
|
|
|
+
|
|
|
async function runTask({ meta, config, jobId }) {
|
|
|
- const log = getLogger(config.logLevel ? config.logLevel : meta.opts.logLevel, jobId)
|
|
|
+ const log = getLogger(
|
|
|
+ config.logLevel ? config.logLevel : meta.opts.logLevel,
|
|
|
+ jobId,
|
|
|
+ )
|
|
|
log.trace(`meta: ${JSON.stringify(meta, null, 2)}`)
|
|
|
log.trace(`config: ${JSON.stringify(config, null, 2)}`)
|
|
|
|
|
|
@@ -39,14 +58,10 @@ async function runTask({ meta, config, jobId }) {
|
|
|
meta,
|
|
|
config,
|
|
|
}
|
|
|
- const cache = (meta.opts.cacheDir && !config.skipCache) ?
|
|
|
- await checkCache(
|
|
|
- jobId,
|
|
|
- stateObject,
|
|
|
- meta.opts,
|
|
|
- )
|
|
|
- :
|
|
|
- { disabled: true, reason: "Cache disabled" }
|
|
|
+ const cache =
|
|
|
+ meta.opts.cacheDir && !config.skipCache
|
|
|
+ ? await checkCache(jobId, stateObject, meta.opts)
|
|
|
+ : { disabled: true, reason: "Cache disabled" }
|
|
|
|
|
|
if (cache && cache.hit) {
|
|
|
log.debug(`Loaded cache for ${jobId}: ${cache.filePath}`)
|
|
|
@@ -67,7 +82,9 @@ async function runTask({ meta, config, jobId }) {
|
|
|
|
|
|
const taskResult = {
|
|
|
detail,
|
|
|
- paths: paths.map(fileOutputPath => fileOutputPath.replace(meta.opts.outDir, "")),
|
|
|
+ paths: paths.map(fileOutputPath =>
|
|
|
+ fileOutputPath.replace(meta.opts.outDir, ""),
|
|
|
+ ),
|
|
|
ref,
|
|
|
}
|
|
|
log.debug(`Wrote ${taskResult.paths.length} files for ${jobId}`)
|
|
|
@@ -81,11 +98,18 @@ async function runTask({ meta, config, jobId }) {
|
|
|
await updateCache(
|
|
|
meta.opts.cacheDir,
|
|
|
jobId,
|
|
|
- removeCwd([
|
|
|
- ...configPathDeps, ...processorPathDeps, config?.filePath,
|
|
|
- ].filter(item => !!item)),
|
|
|
+ removeCwd(
|
|
|
+ [
|
|
|
+ ...configPathDeps, ...processorPathDeps, config?.filePath,
|
|
|
+ ].filter(
|
|
|
+ item => !!item,
|
|
|
+ ),
|
|
|
+ ),
|
|
|
[
|
|
|
- ...configStateDeps, ...stateSelectors, ...processorStateDeps, ...(state?.accessed || []),
|
|
|
+ ...configStateDeps,
|
|
|
+ ...stateSelectors,
|
|
|
+ ...processorStateDeps,
|
|
|
+ ...(state?.accessed || []),
|
|
|
].filter(item => !!item),
|
|
|
taskResult,
|
|
|
cache.updates,
|
|
|
@@ -98,7 +122,7 @@ async function runTask({ meta, config, jobId }) {
|
|
|
|
|
|
async function expandFileTask(patternsToInclude, config, meta) {
|
|
|
const filesToProcess = await readFilesByGlob(patternsToInclude)
|
|
|
- const pathsToStrip = config.stripPaths || []
|
|
|
+ const pathsToStrip = (config.stripPaths || []).map(path => expandTilde(path))
|
|
|
const outputDir = config.outputDir || ""
|
|
|
return await Promise.all(
|
|
|
filesToProcess.map(async filePath => {
|
|
|
@@ -114,27 +138,41 @@ async function expandFileTask(patternsToInclude, config, meta) {
|
|
|
const fileOutputDir = path.dirname(fileOutputPath)
|
|
|
const jobConfig = {
|
|
|
...config,
|
|
|
- filePath, fileOutputDir, fileOutputPath,
|
|
|
+ filePath,
|
|
|
+ fileOutputDir,
|
|
|
+ fileOutputPath,
|
|
|
}
|
|
|
- return runTask({ meta, config: jobConfig, jobId: `${config.name} @ ${filePath}` })
|
|
|
+ return runTask({
|
|
|
+ meta,
|
|
|
+ config: jobConfig,
|
|
|
+ jobId: `${config.name} @ ${filePath}`,
|
|
|
+ })
|
|
|
}),
|
|
|
)
|
|
|
}
|
|
|
|
|
|
async function expandStateTask(stateToExpand, config, meta) {
|
|
|
- const stateToProcess = stateToExpand.map(property => {
|
|
|
- const values = getValueAtPath(meta, property)
|
|
|
- const expandedValues = Array.isArray(values) ? values : Object.values(values)
|
|
|
- return expandedValues.map((value, index) => ({ property, index, value }))
|
|
|
- }).flat()
|
|
|
+ const stateToProcess = stateToExpand
|
|
|
+ .map(property => {
|
|
|
+ const values = getValueAtPath(meta, property)
|
|
|
+ const expandedValues = Array.isArray(values)
|
|
|
+ ? values
|
|
|
+ : Object.values(values)
|
|
|
+ return expandedValues.map((value, index) => ({ property, index, value }))
|
|
|
+ })
|
|
|
+ .flat()
|
|
|
|
|
|
return await Promise.all(
|
|
|
- stateToProcess.map(async (stateJob) => {
|
|
|
+ stateToProcess.map(async stateJob => {
|
|
|
const jobConfig = {
|
|
|
...config,
|
|
|
...stateJob.value.detail,
|
|
|
}
|
|
|
- return runTask({ meta, config: jobConfig, jobId: `${config.name} @ ${stateJob.property}:${stateJob.index}` })
|
|
|
+ return runTask({
|
|
|
+ meta,
|
|
|
+ config: jobConfig,
|
|
|
+ jobId: `${config.name} @ ${stateJob.property}:${stateJob.index}`,
|
|
|
+ })
|
|
|
}),
|
|
|
)
|
|
|
}
|
|
|
@@ -162,11 +200,31 @@ export async function processTask(meta, task) {
|
|
|
const taskResult = await expandAndRunTask(meta, task)
|
|
|
const cached = taskResult.filter(taskResult => taskResult.fromCache)
|
|
|
const processed = taskResult.filter(taskResult => !taskResult.fromCache)
|
|
|
- const resources = taskResult.reduce((obj, tResult) => tResult.ref ? ({ ...obj, [tResult.ref]: tResult }) : obj, {})
|
|
|
+ const resources = taskResult.reduce(
|
|
|
+ (obj, tResult) => (tResult.ref ? { ...obj, [tResult.ref]: tResult } : obj),
|
|
|
+ {},
|
|
|
+ )
|
|
|
const endTime = performance.now()
|
|
|
- const timeTaken = (endTime - startTime)
|
|
|
- const hrTime = timeTaken > 1000 ? `${Number.parseFloat(timeTaken / 1000).toFixed(2)}s` : `${Number.parseFloat(timeTaken).toFixed(2)}ms`
|
|
|
- const filesWritten = processed.reduce((acc, cur) => acc + cur.paths.length, 0)
|
|
|
- log.info(`written: ${filesWritten} | processed: ${processed.length} | from cache: ${cached.length} | ${hrTime}`)
|
|
|
- return { name: task.name, taskResult, cached, processed, resources, filesWritten }
|
|
|
+ const timeTaken = endTime - startTime
|
|
|
+ const hrTime =
|
|
|
+ timeTaken > 1000
|
|
|
+ ? `${Number.parseFloat(timeTaken / 1000).toFixed(2)}s`
|
|
|
+ : `${Number.parseFloat(timeTaken).toFixed(2)}ms`
|
|
|
+ const filesWritten = processed.reduce(
|
|
|
+ (acc, cur) => acc + cur.paths.length,
|
|
|
+ 0,
|
|
|
+ )
|
|
|
+ log.info(
|
|
|
+ `written: ${filesWritten} | processed: ${processed.length} | from cache: ${
|
|
|
+ cached.length
|
|
|
+ } | ${hrTime}`,
|
|
|
+ )
|
|
|
+ return {
|
|
|
+ name: task.name,
|
|
|
+ taskResult,
|
|
|
+ cached,
|
|
|
+ processed,
|
|
|
+ resources,
|
|
|
+ filesWritten,
|
|
|
+ }
|
|
|
}
|