lib.js 4.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147
  1. import {
  2. checkCache,
  3. updateCache,
  4. } from "./cache.js"
  5. import {
  6. createTrackedObject,
  7. fileExists,
  8. readFilesByGlob,
  9. removeBasePaths,
  10. removeCwd,
  11. replaceFileExtension,
  12. } from "./util.js"
  13. import path from "path"
  14. import process from "node:process"
  15. import { getLogger } from "./logging.js"
  16. export async function getConfig() {
  17. const configPath = path.join(process.cwd(), "rhedyn.config.js")
  18. const configFileExists = await fileExists(configPath)
  19. if (configFileExists) {
  20. try {
  21. const config = await import(configPath)
  22. return config.default || config
  23. } catch (err) {
  24. console.error("Error reading rhedyn.config.js:", err)
  25. throw new Error("Failed reading config file")
  26. }
  27. } else {
  28. return
  29. }
  30. }
  31. async function runTask({ meta, config, jobId }) {
  32. const log = getLogger(config.logLevel ? config.logLevel : meta.opts.logLevel, jobId)
  33. log.trace(`meta: ${JSON.stringify(meta, null, 2)}`)
  34. log.trace(`config: ${JSON.stringify(config, null, 2)}`)
  35. const stateObject = {
  36. meta,
  37. config,
  38. }
  39. const cache = (meta.opts.cacheDir && !config.skipCache) ?
  40. await checkCache(
  41. jobId,
  42. stateObject,
  43. meta.opts,
  44. )
  45. :
  46. { disabled: true, reason: "Cache disabled" }
  47. if (cache && cache.hit) {
  48. log.debug(`Loaded cache for ${jobId}: ${cache.filePath}`)
  49. return { ...cache.taskResult, fromCache: true }
  50. }
  51. log.debug(`Cache miss for ${jobId} (${cache.reason})`)
  52. const state = meta.opts.cacheDir
  53. ? createTrackedObject(stateObject)
  54. : { proxy: stateObject }
  55. const {
  56. detail,
  57. paths = [],
  58. deps: processorDeps,
  59. ref,
  60. } = await config.processor(state.proxy)
  61. const taskResult = {
  62. detail,
  63. paths: paths.map(fileOutputPath => fileOutputPath.replace(meta.opts.outDir, "")),
  64. ref,
  65. }
  66. log.debug(`Wrote ${taskResult.paths.length} files for ${jobId}`)
  67. if (cache && !cache.disabled) {
  68. log.debug(`Updating cache for ${jobId}: ${cache.filePath}`)
  69. const processorPathDeps = processorDeps?.paths || []
  70. const processorStateDeps = processorDeps?.state || []
  71. const configPathDeps = config.deps?.paths || []
  72. const configStateDeps = config.deps?.state || []
  73. await updateCache(
  74. meta.opts.cacheDir,
  75. jobId,
  76. removeCwd([
  77. ...configPathDeps, ...processorPathDeps, config?.filePath,
  78. ].filter(item => !!item)),
  79. [
  80. ...configStateDeps, ...processorStateDeps, ...(state?.accessed || []),
  81. ].filter(item => !!item),
  82. taskResult,
  83. cache.updates,
  84. meta.opts.includeStateValues,
  85. )
  86. }
  87. return taskResult
  88. }
  89. async function expandFileTask(patternsToInclude, config, meta) {
  90. const filesToProcess = await readFilesByGlob(patternsToInclude)
  91. const pathsToStrip = config.stripPaths || []
  92. const outputDir = config.outputDir || ""
  93. return await Promise.all(
  94. filesToProcess.map(async filePath => {
  95. const fileOutputPath = path.join(
  96. meta.opts.outDir,
  97. outputDir,
  98. replaceFileExtension(
  99. removeBasePaths(pathsToStrip, filePath),
  100. config.outputFileExtension,
  101. ),
  102. )
  103. const fileOutputDir = path.dirname(fileOutputPath)
  104. const jobConfig = {
  105. ...config,
  106. filePath, fileOutputDir, fileOutputPath,
  107. }
  108. return runTask({ meta, config: jobConfig, jobId: `${config.name} @ ${filePath}` })
  109. }),
  110. )}
  111. export async function expandAndRunTask(meta, config) {
  112. const includes = meta.opts?.include?.[config.name] || []
  113. const patternsToInclude = [...(config?.inputFiles || []), ...includes]
  114. if (patternsToInclude.length) {
  115. return expandFileTask(patternsToInclude, config, meta)
  116. }
  117. const jobId = config.jobId || config.name
  118. const taskResult = await runTask({ meta, config, jobId })
  119. return [taskResult]
  120. }
  121. export async function processTask(meta, task) {
  122. const log = getLogger(meta.opts.logLevel, task.name)
  123. const startTime = performance.now()
  124. const taskResult = await expandAndRunTask(meta, task)
  125. const cached = taskResult.filter(taskResult => taskResult.fromCache)
  126. const processed = taskResult.filter(taskResult => !taskResult.fromCache)
  127. const resources = taskResult.reduce((obj, tResult) => tResult.ref ? ({ ...obj, [tResult.ref]: tResult }) : obj, {})
  128. const endTime = performance.now()
  129. const timeTaken = (endTime - startTime)
  130. const hrTime = timeTaken > 1000 ? `${Number.parseFloat(timeTaken / 1000).toFixed(2)}s` : `${Number.parseFloat(timeTaken).toFixed(2)}ms`
  131. const filesWritten = processed.reduce((acc, cur) => acc + cur.paths.length, 0)
  132. log.info(`written: ${filesWritten} | processed: ${processed.length} | from cache: ${cached.length} | ${hrTime}`)
  133. return { name: task.name, taskResult, cached, processed, resources, filesWritten }
  134. }