|
|
@@ -1,37 +1,37 @@
|
|
|
-import fs from "node:fs/promises";
|
|
|
-import os from "node:os";
|
|
|
-import path from "path";
|
|
|
-import { glob } from "glob";
|
|
|
-import { createReadStream } from "fs";
|
|
|
-import { createHash } from "crypto";
|
|
|
-import stableStringify from "json-stable-stringify";
|
|
|
+import fs from "node:fs/promises"
|
|
|
+import os from "node:os"
|
|
|
+import path from "path"
|
|
|
+import { glob } from "glob"
|
|
|
+import { createHash } from "crypto"
|
|
|
+import { createReadStream } from "fs"
|
|
|
+import stableStringify from "json-stable-stringify"
|
|
|
|
|
|
export async function fileExists(filePath) {
|
|
|
try {
|
|
|
- const stats = await fs.stat(filePath);
|
|
|
- return true;
|
|
|
+ await fs.stat(filePath)
|
|
|
+ return true
|
|
|
} catch (err) {
|
|
|
if (err.code === "ENOENT") {
|
|
|
- return false;
|
|
|
+ return false
|
|
|
}
|
|
|
- throw err; // re-throw other errors
|
|
|
+ throw err // re-throw other errors
|
|
|
}
|
|
|
}
|
|
|
export async function readDirectoryRecursively(dir, files = []) {
|
|
|
- const exists = await fileExists(dir);
|
|
|
+ const exists = await fileExists(dir)
|
|
|
if (!exists) {
|
|
|
- return files;
|
|
|
+ return files
|
|
|
}
|
|
|
- const contents = await fs.readdir(dir, { withFileTypes: true });
|
|
|
+ const contents = await fs.readdir(dir, { withFileTypes: true })
|
|
|
for (const item of contents) {
|
|
|
- const itemPath = path.join(dir, item.name);
|
|
|
+ const itemPath = path.join(dir, item.name)
|
|
|
if (item.isDirectory()) {
|
|
|
- readDirectoryRecursively(itemPath, files);
|
|
|
+ readDirectoryRecursively(itemPath, files)
|
|
|
} else {
|
|
|
- files.push(itemPath);
|
|
|
+ files.push(itemPath)
|
|
|
}
|
|
|
}
|
|
|
- return files;
|
|
|
+ return files
|
|
|
}
|
|
|
// type InputConfig
|
|
|
// {
|
|
|
@@ -44,129 +44,129 @@ export async function readFilesByGlob(globConfigs) {
|
|
|
const { pattern, ignore, dot } = {
|
|
|
dot: false,
|
|
|
ignore: [],
|
|
|
- ...globConfig
|
|
|
- };
|
|
|
+ ...globConfig,
|
|
|
+ }
|
|
|
const matches = await glob(pattern, {
|
|
|
ignore,
|
|
|
- dot
|
|
|
- });
|
|
|
- return [...(await existingMatches), ...matches];
|
|
|
+ dot,
|
|
|
+ })
|
|
|
+ return [...(await existingMatches), ...matches]
|
|
|
},
|
|
|
- []
|
|
|
- );
|
|
|
- const files = await matchPromises;
|
|
|
- return [...new Set(files)];
|
|
|
+ [],
|
|
|
+ )
|
|
|
+ const files = await matchPromises
|
|
|
+ return [...new Set(files)]
|
|
|
}
|
|
|
|
|
|
export function resolvePath(unresolvedPath) {
|
|
|
- return path.resolve(unresolvedPath.replace(/^~/, os.homedir()));
|
|
|
+ return path.resolve(unresolvedPath.replace(/^~/, os.homedir()))
|
|
|
}
|
|
|
|
|
|
export async function firstFound(dirs, fileName) {
|
|
|
for (const dir of dirs) {
|
|
|
- const filePath = resolvePath(path.join(dir, fileName));
|
|
|
- const exists = await fileExists(filePath);
|
|
|
+ const filePath = resolvePath(path.join(dir, fileName))
|
|
|
+ const exists = await fileExists(filePath)
|
|
|
if (exists) {
|
|
|
- return filePath;
|
|
|
+ return filePath
|
|
|
}
|
|
|
}
|
|
|
- return null;
|
|
|
+ return null
|
|
|
}
|
|
|
|
|
|
export function removeCwd(paths) {
|
|
|
- const cwd = `${process.cwd()}/`;
|
|
|
- return paths.map(path => path.replace(cwd, ""));
|
|
|
+ const cwd = `${process.cwd()}/`
|
|
|
+ return paths.map(path => path.replace(cwd, ""))
|
|
|
}
|
|
|
|
|
|
export function removeBasePaths(baseDirs, fullPath) {
|
|
|
return baseDirs.reduce((cleanedPath, dir) => {
|
|
|
- return cleanedPath.replace(dir, "");
|
|
|
- }, fullPath);
|
|
|
+ return cleanedPath.replace(dir, "")
|
|
|
+ }, fullPath)
|
|
|
}
|
|
|
|
|
|
export function replaceFileExtension(filePath, newExtension) {
|
|
|
if (!newExtension) {
|
|
|
- return filePath;
|
|
|
+ return filePath
|
|
|
}
|
|
|
- return `${stripFileExtension(filePath)}${newExtension}`;
|
|
|
+ return `${stripFileExtension(filePath)}${newExtension}`
|
|
|
}
|
|
|
|
|
|
export function stripFileExtension(filePath) {
|
|
|
return path.join(
|
|
|
path.dirname(filePath),
|
|
|
- path.basename(filePath, path.extname(filePath))
|
|
|
- );
|
|
|
+ path.basename(filePath, path.extname(filePath)),
|
|
|
+ )
|
|
|
}
|
|
|
|
|
|
export function getCleanPath(filePath, meta) {
|
|
|
- return filePath.replace(meta.opts.runDir, "").replace(meta.opts.outDir, "/");
|
|
|
+ return filePath.replace(meta.opts.runDir, "").replace(meta.opts.outDir, "/")
|
|
|
}
|
|
|
|
|
|
export function getHref(filePath, meta) {
|
|
|
- const route = getCleanPath(filePath, meta);
|
|
|
+ const route = getCleanPath(filePath, meta)
|
|
|
if (route.includes("index.html")) {
|
|
|
- return route.replace("index.html", "");
|
|
|
+ return route.replace("index.html", "")
|
|
|
}
|
|
|
- return route.replace(".html", "");
|
|
|
+ return route.replace(".html", "")
|
|
|
}
|
|
|
|
|
|
function stringifyPathPart(part) {
|
|
|
- return typeof part === "symbol" ? part.toString() : String(part);
|
|
|
+ return typeof part === "symbol" ? part.toString() : String(part)
|
|
|
}
|
|
|
|
|
|
function trackPropertyAccessDeep(obj, path = [], accessed = new Set()) {
|
|
|
return new Proxy(obj, {
|
|
|
get(target, prop, receiver) {
|
|
|
- const fullPath = [...path, prop].map(stringifyPathPart).join(".");
|
|
|
- const value = Reflect.get(target, prop, receiver);
|
|
|
+ const fullPath = [...path, prop].map(stringifyPathPart).join(".")
|
|
|
+ const value = Reflect.get(target, prop, receiver)
|
|
|
|
|
|
if (typeof target === "object" && target.hasOwnProperty(prop)) {
|
|
|
- accessed.add({ path: fullPath, value });
|
|
|
+ accessed.add({ path: fullPath, value })
|
|
|
}
|
|
|
|
|
|
// Recursively wrap if value is an object and not null
|
|
|
if (value && typeof value === "object") {
|
|
|
- return trackPropertyAccessDeep(value, [...path, prop], accessed);
|
|
|
+ return trackPropertyAccessDeep(value, [...path, prop], accessed)
|
|
|
}
|
|
|
|
|
|
- return value;
|
|
|
- }
|
|
|
- });
|
|
|
+ return value
|
|
|
+ },
|
|
|
+ })
|
|
|
}
|
|
|
|
|
|
export function createTrackedObject(obj) {
|
|
|
- const accessed = new Set();
|
|
|
- const proxy = trackPropertyAccessDeep(obj, [], accessed);
|
|
|
- return { proxy, accessed };
|
|
|
+ const accessed = new Set()
|
|
|
+ const proxy = trackPropertyAccessDeep(obj, [], accessed)
|
|
|
+ return { proxy, accessed }
|
|
|
}
|
|
|
|
|
|
export function getDeepestPropertiesForKey(paths, key) {
|
|
|
// Sort paths to make prefix comparison easier
|
|
|
const sorted = paths.slice().sort((a, b) => {
|
|
|
if (a[key] < b[key]) {
|
|
|
- return -1;
|
|
|
+ return -1
|
|
|
}
|
|
|
if (a[key] > b[key]) {
|
|
|
- return 1;
|
|
|
+ return 1
|
|
|
}
|
|
|
- return 0;
|
|
|
- });
|
|
|
- const result = [];
|
|
|
+ return 0
|
|
|
+ })
|
|
|
+ const result = []
|
|
|
|
|
|
for (let i = 0; i < sorted.length; i++) {
|
|
|
- const current = sorted[i];
|
|
|
- const next = sorted[i + 1];
|
|
|
+ const current = sorted[i]
|
|
|
+ const next = sorted[i + 1]
|
|
|
// If the next path doesn't start with the current + a dot, it's a leaf node
|
|
|
- const nextKey = next?.[key];
|
|
|
- const currentKey = current[key];
|
|
|
+ const nextKey = next?.[key]
|
|
|
+ const currentKey = current[key]
|
|
|
if (nextKey !== currentKey) {
|
|
|
if (!next || !next[key].startsWith(current[key] + ".")) {
|
|
|
- result.push(current);
|
|
|
+ result.push(current)
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- return result;
|
|
|
+ return result
|
|
|
}
|
|
|
|
|
|
export function slugifyString(str) {
|
|
|
@@ -177,120 +177,120 @@ export function slugifyString(str) {
|
|
|
.replace(/\s+/g, "-") // Replace whitespace with dashes
|
|
|
.replace(/-+/g, "-") // Collapse multiple dashes
|
|
|
.replace(/\./g, "-") // Replace dots with dashes
|
|
|
- .replace(/^-+|-+$/g, ""); // Trim leading/trailing dashes
|
|
|
+ .replace(/^-+|-+$/g, "") // Trim leading/trailing dashes
|
|
|
}
|
|
|
|
|
|
export function getValueAtPath(obj, path) {
|
|
|
- const parts = path.split(".");
|
|
|
- let val = obj;
|
|
|
+ const parts = path.split(".")
|
|
|
+ let val = obj
|
|
|
for (const part of parts) {
|
|
|
- val = val?.[part];
|
|
|
- if (val === undefined) break;
|
|
|
+ val = val?.[part]
|
|
|
+ if (val === undefined) break
|
|
|
}
|
|
|
- return val;
|
|
|
+ return val
|
|
|
}
|
|
|
|
|
|
export function hashObject(obj) {
|
|
|
- const str = stableStringify(obj);
|
|
|
+ const str = stableStringify(obj)
|
|
|
return createHash("md5")
|
|
|
.update(str)
|
|
|
- .digest("hex");
|
|
|
+ .digest("hex")
|
|
|
}
|
|
|
|
|
|
export async function getFileHash(filePath, algorithm = "md5") {
|
|
|
return new Promise((resolve, reject) => {
|
|
|
- const hash = createHash(algorithm);
|
|
|
- const stream = createReadStream(filePath);
|
|
|
+ const hash = createHash(algorithm)
|
|
|
+ const stream = createReadStream(filePath)
|
|
|
|
|
|
- stream.on("error", reject);
|
|
|
- stream.on("data", chunk => hash.update(chunk));
|
|
|
- stream.on("end", () => resolve(hash.digest("hex")));
|
|
|
- });
|
|
|
+ stream.on("error", reject)
|
|
|
+ stream.on("data", chunk => hash.update(chunk))
|
|
|
+ stream.on("end", () => resolve(hash.digest("hex")))
|
|
|
+ })
|
|
|
}
|
|
|
|
|
|
export async function checkPathExists(files, baseDir) {
|
|
|
if (Array.isArray(files)) {
|
|
|
return (await Promise.all(
|
|
|
- files.map(file => fileExists(path.join(baseDir, file)))
|
|
|
- )).every(item => !!item);
|
|
|
+ files.map(file => fileExists(path.join(baseDir, file))),
|
|
|
+ )).every(item => !!item)
|
|
|
}
|
|
|
- return fileExists(path.join(baseDir, files));
|
|
|
+ return fileExists(path.join(baseDir, files))
|
|
|
}
|
|
|
|
|
|
export function generateRandomId(length = 8) {
|
|
|
- const chars = "abcdefghijklmnopqrstuvwxyz0123456789";
|
|
|
- let result = "";
|
|
|
+ const chars = "abcdefghijklmnopqrstuvwxyz0123456789"
|
|
|
+ let result = ""
|
|
|
for (let i = 0; i < length; i++) {
|
|
|
- result += chars.charAt(Math.floor(Math.random() * chars.length));
|
|
|
+ result += chars.charAt(Math.floor(Math.random() * chars.length))
|
|
|
}
|
|
|
- return result;
|
|
|
+ return result
|
|
|
}
|
|
|
|
|
|
async function getFileHashes(pathDeps) {
|
|
|
return Promise.all(
|
|
|
Object.keys(pathDeps).map(async filePath => {
|
|
|
- const hash = await getFileHash(filePath);
|
|
|
+ const hash = await getFileHash(filePath)
|
|
|
if (hash !== pathDeps[filePath]) {
|
|
|
- return Promise.reject({ filePath, hash });
|
|
|
+ return Promise.reject({ filePath, hash })
|
|
|
}
|
|
|
|
|
|
- return Promise.resolve(pathDeps[filePath]);
|
|
|
- })
|
|
|
- );
|
|
|
+ return Promise.resolve(pathDeps[filePath])
|
|
|
+ }),
|
|
|
+ )
|
|
|
}
|
|
|
|
|
|
function getStatePropsHash(state, props) {
|
|
|
const stateValues = props.reduce((depmap, dep) => {
|
|
|
- const value = getValueAtPath(state, dep);
|
|
|
- return { ...depmap, [dep]: value };
|
|
|
- }, {});
|
|
|
- return hashObject(stateValues);
|
|
|
+ const value = getValueAtPath(state, dep)
|
|
|
+ return { ...depmap, [dep]: value }
|
|
|
+ }, {})
|
|
|
+ return hashObject(stateValues)
|
|
|
}
|
|
|
|
|
|
export async function checkCache(name, currentState, opts) {
|
|
|
- const existingCacheObject = await readCache(opts.cacheDir, name);
|
|
|
+ const existingCacheObject = await readCache(opts.cacheDir, name)
|
|
|
if (existingCacheObject) {
|
|
|
- const outFiles = existingCacheObject.taskResult.path;
|
|
|
- const outFilesExist = await checkPathExists(outFiles, opts.outDir);
|
|
|
+ const outFiles = existingCacheObject.taskResult.path
|
|
|
+ const outFilesExist = await checkPathExists(outFiles, opts.outDir)
|
|
|
if (outFilesExist) {
|
|
|
const stateHash = getStatePropsHash(
|
|
|
currentState,
|
|
|
- existingCacheObject.deps.state.props
|
|
|
- );
|
|
|
+ existingCacheObject.deps.state.props,
|
|
|
+ )
|
|
|
if (stateHash === existingCacheObject.deps.state.hash) {
|
|
|
try {
|
|
|
- await getFileHashes(existingCacheObject.deps.paths);
|
|
|
- return { hit: true, taskResult: existingCacheObject.taskResult };
|
|
|
+ await getFileHashes(existingCacheObject.deps.paths)
|
|
|
+ return { hit: true, taskResult: existingCacheObject.taskResult }
|
|
|
} catch (e) {
|
|
|
const updates = {
|
|
|
deps: {
|
|
|
- paths: [e]
|
|
|
- }
|
|
|
- };
|
|
|
- return { hit: false, reason: "File hash mismatch", updates };
|
|
|
+ paths: [e],
|
|
|
+ },
|
|
|
+ }
|
|
|
+ return { hit: false, reason: "File hash mismatch", updates }
|
|
|
}
|
|
|
}
|
|
|
const updates = {
|
|
|
deps: {
|
|
|
state: {
|
|
|
...existingCacheObject.deps.state,
|
|
|
- hash: stateHash
|
|
|
- }
|
|
|
- }
|
|
|
- };
|
|
|
- return { hit: false, reason: "State hash mismatch", updates };
|
|
|
+ hash: stateHash,
|
|
|
+ },
|
|
|
+ },
|
|
|
+ }
|
|
|
+ return { hit: false, reason: "State hash mismatch", updates }
|
|
|
}
|
|
|
if (opts.clean) {
|
|
|
await Promise.all(
|
|
|
outFiles.map(
|
|
|
async outFile =>
|
|
|
- await fs.rm(path.join(opts.outDir, outFile), { force: true })
|
|
|
- )
|
|
|
- );
|
|
|
+ await fs.rm(path.join(opts.outDir, outFile), { force: true }),
|
|
|
+ ),
|
|
|
+ )
|
|
|
}
|
|
|
- return { hit: false, reason: "Missing output file(s)" };
|
|
|
+ return { hit: false, reason: "Missing output file(s)" }
|
|
|
}
|
|
|
- return { hit: false, reason: "Missing cache file" };
|
|
|
+ return { hit: false, reason: "Missing cache file" }
|
|
|
}
|
|
|
|
|
|
export async function updateCache(
|
|
|
@@ -299,81 +299,81 @@ export async function updateCache(
|
|
|
pathDeps,
|
|
|
stateDeps,
|
|
|
taskResult,
|
|
|
- updates
|
|
|
+ updates,
|
|
|
) {
|
|
|
- const cacheDirExists = await fileExists(cacheDir);
|
|
|
+ const cacheDirExists = await fileExists(cacheDir)
|
|
|
if (!cacheDirExists) {
|
|
|
- await fs.mkdir(cacheDir, { recursive: true });
|
|
|
+ await fs.mkdir(cacheDir, { recursive: true })
|
|
|
}
|
|
|
- const accessedState = getDeepestPropertiesForKey([...stateDeps], "path");
|
|
|
+ const accessedState = getDeepestPropertiesForKey([...stateDeps], "path")
|
|
|
const deps = {
|
|
|
paths: [...new Set(removeCwd(pathDeps))],
|
|
|
state: accessedState.reduce(
|
|
|
(as, { path, value }) => ({ ...as, [path]: value }),
|
|
|
- {}
|
|
|
- )
|
|
|
- };
|
|
|
- const statePropsList = Object.keys(deps.state);
|
|
|
- const updatesStateHash = updates?.deps?.state?.props || [];
|
|
|
+ {},
|
|
|
+ ),
|
|
|
+ }
|
|
|
+ const statePropsList = Object.keys(deps.state)
|
|
|
+ const updatesStateHash = updates?.deps?.state?.props || []
|
|
|
const stateDepsHash =
|
|
|
JSON.stringify(statePropsList) === JSON.stringify(updatesStateHash)
|
|
|
? updates?.deps?.state?.hash
|
|
|
- : hashObject(deps.state);
|
|
|
+ : hashObject(deps.state)
|
|
|
|
|
|
const updatesPathsCache =
|
|
|
updates?.deps?.paths?.reduce(
|
|
|
(pc, { filePath, hash }) => ({
|
|
|
...pc,
|
|
|
- [filePath]: hash
|
|
|
+ [filePath]: hash,
|
|
|
}),
|
|
|
- {}
|
|
|
- ) || {};
|
|
|
+ {},
|
|
|
+ ) || {}
|
|
|
const pathsCache = (await Promise.all(
|
|
|
deps.paths.map(async filePath => {
|
|
|
const hash = updatesPathsCache[filePath]
|
|
|
? updatesPathsCache[filePath]
|
|
|
- : await getFileHash(filePath);
|
|
|
+ : await getFileHash(filePath)
|
|
|
return {
|
|
|
hash,
|
|
|
- filePath
|
|
|
- };
|
|
|
- })
|
|
|
- )).reduce((pc, { filePath, hash }) => ({ ...pc, [filePath]: hash }), {});
|
|
|
+ filePath,
|
|
|
+ }
|
|
|
+ }),
|
|
|
+ )).reduce((pc, { filePath, hash }) => ({ ...pc, [filePath]: hash }), {})
|
|
|
const cacheObject = {
|
|
|
deps: {
|
|
|
state: {
|
|
|
hash: stateDepsHash,
|
|
|
- props: Object.keys(deps.state)
|
|
|
+ props: Object.keys(deps.state),
|
|
|
},
|
|
|
- paths: pathsCache
|
|
|
+ paths: pathsCache,
|
|
|
},
|
|
|
- taskResult
|
|
|
- };
|
|
|
- return await writeCache(cacheDir, name, cacheObject);
|
|
|
+ taskResult,
|
|
|
+ }
|
|
|
+ return await writeCache(cacheDir, name, cacheObject)
|
|
|
}
|
|
|
|
|
|
async function writeCache(cacheDir, name, cache) {
|
|
|
if (!cacheDir) {
|
|
|
- return false;
|
|
|
+ return false
|
|
|
}
|
|
|
return fs.writeFile(
|
|
|
path.join(cacheDir, `${name}.cache`),
|
|
|
JSON.stringify(cache),
|
|
|
- "utf8"
|
|
|
- );
|
|
|
+ "utf8",
|
|
|
+ )
|
|
|
}
|
|
|
|
|
|
async function readCache(cacheDir, name) {
|
|
|
if (!cacheDir) {
|
|
|
- return false;
|
|
|
+ return false
|
|
|
}
|
|
|
try {
|
|
|
const content = await fs.readFile(
|
|
|
path.join(cacheDir, `${name}.cache`),
|
|
|
- "utf8"
|
|
|
- );
|
|
|
- return JSON.parse(content);
|
|
|
- } catch (e) {
|
|
|
- return false;
|
|
|
+ "utf8",
|
|
|
+ )
|
|
|
+ return JSON.parse(content)
|
|
|
+ } catch {
|
|
|
+ return false
|
|
|
}
|
|
|
}
|