From ccb58ccc99e7a034cdac90b0f857cf6eb2293cab Mon Sep 17 00:00:00 2001 From: Trent Piepho Date: Sat, 7 Sep 2024 17:32:49 -0700 Subject: [PATCH] Add ability to pack objects rather than just files This allows compilePack() src argument to not just be a directory with files to pack, but an Iterable that returns the objects to pack. Then it can be used to create packs that are dynamically created. PF2e Workbench can use this for a compendium of macros it creates, where the macros don't exist as directory of json files. The macro Items are created dynamically. Currently, it uses an old NeDB format compendium for these and makes the database "manually" by concatenating lines of json text. --- lib/package.mjs | 43 +++++++++++++++++++++++++++---------------- 1 file changed, 27 insertions(+), 16 deletions(-) diff --git a/lib/package.mjs b/lib/package.mjs index 260a324..418c232 100644 --- a/lib/package.mjs +++ b/lib/package.mjs @@ -37,6 +37,8 @@ import { ClassicLevel } from "classic-level"; * @typedef {PackageOptions} CompileOptions * @property {boolean} [recursive=false] Whether to recurse into child directories to locate source files, otherwise * only source files located in the root directory will be used. + * @property {boolean} [filesAreObjects] Rather than a list of file names, an iterable object is supplied where each + * item is the object to be packed. */ /** @@ -175,7 +177,7 @@ export const TYPE_COLLECTION_MAP = { /** * Compile source files into a compendium pack. - * @param {string} src The directory containing the source files. + * @param {string|Iterable} src The directory containing the source files, or an iterable of existing objects. * @param {string} dest The target compendium pack. This should be a directory for LevelDB packs, or a .db file for * NeDB packs. * @param {CompileOptions} [options] @@ -187,9 +189,10 @@ export async function compilePack(src, dest, { if ( nedb && (path.extname(dest) !== ".db") ) { throw new Error("The nedb option was passed to compilePacks, but the target pack does not have a .db extension."); } - const files = findSourceFiles(src, { yaml, recursive }); - if ( nedb ) return compileNedb(dest, files, { log, transformEntry }); - return compileClassicLevel(dest, files, { log, transformEntry }); + const isDirectory = typeof (src) === "string" || src instanceof String; + const files = isDirectory ? findSourceFiles(src, { yaml, recursive }) : src; + if ( nedb ) return compileNedb(dest, files, { log, transformEntry, filesAreObjects: !isDirectory }); + return compileClassicLevel(dest, files, { log, transformEntry, filesAreObjects: !isDirectory }); } /* -------------------------------------------- */ @@ -198,10 +201,10 @@ export async function compilePack(src, dest, { * Compile a set of files into a NeDB compendium pack. * @param {string} pack The target compendium pack. * @param {string[]} files The source files. - * @param {Partial} [options] + * @param {Partial} [options] * @returns {Promise} */ -async function compileNedb(pack, files, { log, transformEntry }={}) { +async function compileNedb(pack, files, { log, transformEntry, filesAreObjects=false }={}) { // Delete the existing NeDB file if it exists. try { fs.unlinkSync(pack); @@ -223,10 +226,7 @@ async function compileNedb(pack, files, { log, transformEntry }={}) { // Iterate over all source files, writing them to the DB. for ( const file of files ) { try { - const contents = fs.readFileSync(file, "utf8"); - const ext = path.extname(file); - const isYaml = ext === ".yml" || ext === ".yaml"; - const doc = isYaml ? YAML.load(contents) : JSON.parse(contents); + const doc = filesAreObjects ? file : loadFile(file); const key = doc._key; const [, collection] = key.split("!"); // If the key starts with !folders, we should skip packing it as NeDB doesn't support folders. @@ -252,10 +252,10 @@ async function compileNedb(pack, files, { log, transformEntry }={}) { * Compile a set of files into a LevelDB compendium pack. * @param {string} pack The target compendium pack. * @param {string[]} files The source files. - * @param {Partial} [options] + * @param {Partial} [options] * @returns {Promise} */ -async function compileClassicLevel(pack, files, { log, transformEntry }={}) { +async function compileClassicLevel(pack, files, { log, transformEntry, filesAreObjects=false }={}) { // Create the classic level directory if it doesn't already exist. fs.mkdirSync(pack, { recursive: true }); @@ -279,10 +279,7 @@ async function compileClassicLevel(pack, files, { log, transformEntry }={}) { // Iterate over all files in the input directory, writing them to the DB. for ( const file of files ) { try { - const contents = fs.readFileSync(file, "utf8"); - const ext = path.extname(file); - const isYaml = ext === ".yml" || ext === ".yaml"; - const doc = isYaml ? YAML.load(contents) : JSON.parse(contents); + const doc = filesAreObjects ? file : loadFile(file); const [, collection] = doc._key.split("!"); if ( await transformEntry?.(doc) === false ) continue; await packDoc(doc, collection); @@ -550,3 +547,17 @@ function keyJoin(...args) { function getSafeFilename(filename) { return filename.replace(/[^a-zA-Z0-9А-я]/g, '_'); } + +/* -------------------------------------------- */ + +/** + * Load file jSON or YAML file. Use extension to tell type. + * @param {string} filename The filename to load + * @returns {object} The file data as an object + */ +function loadFile(filename) { + const contents = fs.readFileSync(filename, "utf8"); + const ext = path.extname(filename); + const isYaml = ext === ".yml" || ext === ".yaml"; + return isYaml ? YAML.load(contents) : JSON.parse(contents); +}