Browse Source

Refactoring processor API

* changed: path -> paths, always an array
* removed: written, all processors now handle this for themselves
* removed: result, any data to be added to state is now to be returned
  as detail
Craig Fletcher 5 months ago
parent
commit
d3f708a3f1
5 changed files with 41 additions and 32 deletions
  1. 1 1
      src/defaults.js
  2. 2 2
      src/index.js
  3. 3 14
      src/lib.js
  4. 23 13
      src/processors.js
  5. 12 2
      src/util.js

+ 1 - 1
src/defaults.js

@@ -62,7 +62,7 @@ export const opts = {
   outDir: "dist/",
   runDir: process.cwd(),
   cacheDir: ".cache",
-  logLevel: "info",
+  logLevel: "debug",
   defaultTemplate: "default",
   include: {
     styles: [{ pattern: "~/.rhedyn/styles/*.scss" }],

+ 2 - 2
src/index.js

@@ -23,7 +23,7 @@ async function runTask(meta, task) {
     ...meta,
     resources: {
       ...meta.resources,
-      [task.name]: allResults.reduce((obj, path) => ({ ...obj, [path.ref]: path }), {}),
+      [task.name]: allResults.reduce((obj, taskResult) => ({ ...obj, [taskResult.ref]: taskResult }), {}),
     },
   }
   return { taskResult, cachedResults, processedResults }
@@ -41,7 +41,7 @@ const taskRunner = tasks.reduce(
     const endTime = performance.now()
     const timeTaken = (endTime - startTime)
     const hrTime = timeTaken > 1000 ? `${Number.parseFloat(timeTaken / 1000).toFixed(2)}s` : `${Number.parseFloat(timeTaken).toFixed(2)}ms`
-    const filesWritten = processedResults.reduce((acc, cur) => acc + cur.path.length, 0)
+    const filesWritten = processedResults.reduce((acc, cur) => acc + cur.paths.length, 0)
     log.info(`written: ${filesWritten} | processed: ${processedResults.length} | from cache: ${cachedResults.length} | ${hrTime}`)
     log.groupEnd()
     return taskResult

+ 3 - 14
src/lib.js

@@ -51,10 +51,6 @@ export async function processFiles(config, meta) {
       )
 
       const fileOutputDir = path.dirname(fileOutputPath)
-      const exists = await fileExists(fileOutputDir)
-      if (!exists) {
-        await fs.mkdir(fileOutputDir, { recursive: true })
-      }
 
       const stateObject = {
         filePath,
@@ -84,26 +80,19 @@ export async function processFiles(config, meta) {
         : { proxy: stateObject }
 
       const {
-        result,
         detail,
-        written,
+        paths,
         deps: processorDeps,
         ref,
       } = await config.processor(state.proxy)
 
-      if (!written) {
-        await fs.writeFile(fileOutputPath, result, {
-          encoding: "utf8",
-        })
-      }
-
       const taskRef = ref ? slugifyString(ref) : slugifyString(fileOutputPath)
       const taskResult = {
         detail,
-        path: written ? result : [fileOutputPath.replace(meta.opts.outDir, "")],
+        paths: paths.map(fileOutputPath => fileOutputPath.replace(meta.opts.outDir, "")),
         ref: taskRef,
       }
-      log.debug(`Wrote ${taskResult.path.length} files for ${filePath}`)
+      log.debug(`Wrote ${taskResult.paths.length} files for ${filePath}`)
       if (meta.opts.cacheDir) {
         log.debug(`Updating cache for ${filePath}`)
         const processorPathDeps = processorDeps?.paths || []

+ 23 - 13
src/processors.js

@@ -5,6 +5,7 @@ import {
   getCleanPath,
   getHref,
   slugifyString,
+  writeFile,
 } from "./util.js"
 import fs from "fs/promises"
 import handlebars from "handlebars"
@@ -94,44 +95,55 @@ export async function renderMarkdownWithTemplate({
     minifyJS: true,
   })
 
+  await writeFile(fileOutputPath, minifiedHtml)
+
   return {
     detail: { ...data, href },
-    result: minifiedHtml,
+    paths: [fileOutputPath],
     deps: {
       paths: [template.path],
     },
   }
 }
 
-export async function compileSass({ filePath }) {
+export async function compileSass({ filePath, fileOutputPath, meta }) {
   const result = await sass.compileAsync(filePath, { style: "compressed" })
+  await writeFile(fileOutputPath, result.css)
   return {
-    result: result.css,
+    paths: [fileOutputPath],
+    detail: {
+      href: fileOutputPath.replace(meta.opts.outDir, "")
+    },
     deps: {
       paths: [...result.loadedUrls.map(item => item.pathname)],
     },
   }
 }
 
-export async function optimiseSvg({ filePath }) {
+export async function optimiseSvg({ filePath, fileOutputPath }) {
   const svgString = await fs.readFile(filePath, "utf8")
   const result = optimize(svgString, {
     plugins: ["preset-default"],
   })
+  await writeFile(fileOutputPath, result.data)
   return {
-    result: result.data,
+    paths: [fileOutputPath],
   }
 }
 
-export async function copy({ filePath }) {
+export async function copy({ filePath, fileOutputPath }) {
   const fileContent = await fs.readFile(filePath, "utf8")
-  return { result: fileContent }
+  await writeFile(fileOutputPath, fileContent)
+  return {
+    paths: [fileOutputPath],
+  }
 }
 
 export async function imageToWebP({ filePath, meta, fileOutputDir, config }) {
   const sourceExtension = path.extname(filePath)
   const outputExtension = config.outputFileExtension
   const base = path.basename(filePath, sourceExtension)
+  await fs.mkdir(fileOutputDir, { recursive: true })
 
   const original = sharp(filePath)
   const metadata = await original.metadata()
@@ -164,9 +176,8 @@ export async function imageToWebP({ filePath, meta, fileOutputDir, config }) {
   const imageRef = getCleanPath(path.join(filePath), meta)
 
   return {
-    result: srcSet.map(src => src[0]),
+    paths: srcSet.map(src => src[0]),
     detail: { imageRef, srcSet, aspectRatio },
-    written: true,
     ref: imageRef,
   }
 }
@@ -210,7 +221,7 @@ export async function generateFavicons({ filePath, meta, fileOutputDir }) {
     await Promise.all(
       response.images.map(async image => {
         const outputPath = path.join(fileOutputDir, image.name)
-        await fs.writeFile(outputPath, image.contents)
+        await writeFile(outputPath, image.contents)
       }),
     )
 
@@ -218,7 +229,7 @@ export async function generateFavicons({ filePath, meta, fileOutputDir }) {
     await Promise.all(
       response.files.map(async file => {
         const outputPath = path.join(fileOutputDir, file.name)
-        await fs.writeFile(outputPath, file.contents)
+        await writeFile(outputPath, file.contents)
       }),
     )
 
@@ -228,7 +239,7 @@ export async function generateFavicons({ filePath, meta, fileOutputDir }) {
       detail: {
         htmlMeta,
       },
-      result: [
+      paths: [
         ...response.images.map(img =>
           getCleanPath(path.join(fileOutputDir, img.name), meta),
         ),
@@ -236,7 +247,6 @@ export async function generateFavicons({ filePath, meta, fileOutputDir }) {
           getCleanPath(path.join(fileOutputDir, file.name), meta),
         ),
       ],
-      written: true,
       ref: "metatags",
     }
   } catch (error) {

+ 12 - 2
src/util.js

@@ -211,7 +211,9 @@ export async function getFileHash(filePath, algorithm = "md5") {
 export async function checkPathExists(files, baseDir) {
   if (Array.isArray(files)) {
     return (await Promise.all(
-      files.map(file => fileExists(path.join(baseDir, file))),
+      files.map(file => {
+        return fileExists(path.join(baseDir, file))
+      }),
     )).every(item => !!item)
   }
   return fileExists(path.join(baseDir, files))
@@ -250,7 +252,7 @@ function getStatePropsHash(state, props) {
 export async function checkCache(name, currentState, opts) {
   const existingCacheObject = await readCache(opts.cacheDir, name)
   if (existingCacheObject) {
-    const outFiles = existingCacheObject.taskResult.path
+    const outFiles = existingCacheObject.taskResult.paths
     const outFilesExist = await checkPathExists(outFiles, opts.outDir)
     if (outFilesExist) {
       const stateHash = getStatePropsHash(
@@ -377,3 +379,11 @@ async function readCache(cacheDir, name) {
     return false
   }
 }
+
+export async function writeFile(filePath, content) {
+  const fileDir = path.dirname(filePath)
+  await fs.mkdir(fileDir, { recursive: true })
+  return await fs.writeFile(filePath, content, {
+    encoding: "utf8",
+  })
+}