import { checkCache, updateCache, } from "./cache.js" import { createTrackedObject, fileExists, readFilesByGlob, removeBasePaths, removeCwd, replaceFileExtension, } from "./util.js" import path from "path" import process from "node:process" import { getLogger } from "./logging.js" export async function getConfig() { const configPath = path.join(process.cwd(), "rhedyn.config.js") const configFileExists = await fileExists(configPath) if (configFileExists) { try { const config = await import(configPath) return config.default || config } catch (err) { console.error("Error reading rhedyn.config.js:", err) throw new Error("Failed reading config file") } } else { return } } async function runTask({ meta, config, jobId }) { const log = getLogger(config.logLevel ? config.logLevel : meta.opts.logLevel, jobId) log.trace(`meta: ${JSON.stringify(meta, null, 2)}`) log.trace(`config: ${JSON.stringify(config, null, 2)}`) const stateObject = { meta, config, } const cache = (meta.opts.cacheDir && !config.skipCache) ? await checkCache( jobId, stateObject, meta.opts, ) : { disabled: true, reason: "Cache disabled" } if (cache && cache.hit) { log.debug(`Loaded cache for ${jobId}: ${cache.filePath}`) return { ...cache.taskResult, fromCache: true } } log.debug(`Cache miss for ${jobId} (${cache.reason})`) const state = meta.opts.cacheDir ? createTrackedObject(stateObject) : { proxy: stateObject } const { detail, paths = [], deps: processorDeps, ref, } = await config.processor(state.proxy) const taskResult = { detail, paths: paths.map(fileOutputPath => fileOutputPath.replace(meta.opts.outDir, "")), ref, } log.debug(`Wrote ${taskResult.paths.length} files for ${jobId}`) if (cache && !cache.disabled) { log.debug(`Updating cache for ${jobId}: ${cache.filePath}`) const processorPathDeps = processorDeps?.paths || [] const processorStateDeps = processorDeps?.state || [] const configPathDeps = config.deps?.paths || [] const configStateDeps = config.deps?.state || [] await updateCache( meta.opts.cacheDir, jobId, removeCwd([ ...configPathDeps, ...processorPathDeps, config?.filePath, ].filter(item => !!item)), [ ...configStateDeps, ...processorStateDeps, ...(state?.accessed || []), ].filter(item => !!item), taskResult, cache.updates, meta.opts.includeStateValues, ) } return taskResult } async function expandFileTask(patternsToInclude, config, meta) { const filesToProcess = await readFilesByGlob(patternsToInclude) const pathsToStrip = config.stripPaths || [] const outputDir = config.outputDir || "" return await Promise.all( filesToProcess.map(async filePath => { const fileOutputPath = path.join( meta.opts.outDir, outputDir, replaceFileExtension( removeBasePaths(pathsToStrip, filePath), config.outputFileExtension, ), ) const fileOutputDir = path.dirname(fileOutputPath) const jobConfig = { ...config, filePath, fileOutputDir, fileOutputPath, } return runTask({ meta, config: jobConfig, jobId: `${config.name} @ ${filePath}` }) }), )} export async function expandAndRunTask(meta, config) { const includes = meta.opts?.include?.[config.name] || [] const patternsToInclude = [...(config?.inputFiles || []), ...includes] if (patternsToInclude.length) { return expandFileTask(patternsToInclude, config, meta) } const jobId = config.jobId || config.name const taskResult = await runTask({ meta, config, jobId }) return [taskResult] } export async function processTask(meta, task) { const log = getLogger(meta.opts.logLevel, task.name) const startTime = performance.now() const taskResult = await expandAndRunTask(meta, task) const cached = taskResult.filter(taskResult => taskResult.fromCache) const processed = taskResult.filter(taskResult => !taskResult.fromCache) const resources = taskResult.reduce((obj, tResult) => tResult.ref ? ({ ...obj, [tResult.ref]: tResult }) : obj, {}) const endTime = performance.now() const timeTaken = (endTime - startTime) const hrTime = timeTaken > 1000 ? `${Number.parseFloat(timeTaken / 1000).toFixed(2)}s` : `${Number.parseFloat(timeTaken).toFixed(2)}ms` const filesWritten = processed.reduce((acc, cur) => acc + cur.paths.length, 0) log.info(`written: ${filesWritten} | processed: ${processed.length} | from cache: ${cached.length} | ${hrTime}`) return { name: task.name, taskResult, cached, processed, resources, filesWritten } }