Skip to content

Commit

Permalink
feat: add dateLastModified auto-update mechanism
Browse files Browse the repository at this point in the history
  • Loading branch information
TheGiddyLimit committed May 17, 2022
1 parent b8351fb commit 69f1322
Show file tree
Hide file tree
Showing 720 changed files with 1,683 additions and 791 deletions.
108 changes: 108 additions & 0 deletions _node/add-timestamps.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
import {execFile} from "node:child_process";
import {Um, Uf} from "5etools-utils";
import * as Ub from "./util-brew.js";
import fs from "fs";
import hasha from "hasha";

const _LOG_TAG = `TIMESTAMPS`;

const _UPDATE_TYPES = {
NONE: 0,
HASH: 1,
TIMESTAMP: 2,
};


async function pUpdateDir (dir) {
const promises = Uf.listJsonFiles(dir)
.map(async file => {
const fileData = Uf.readJSON(file, {isIncludeRaw: true});

const hasMeta = !Ub.FILES_NO_META[file];
if (!fileData.json._meta && hasMeta) {
throw new Error(`File "${file}" did not have metadata!`);
}
if (!hasMeta) return;

let updateType = _UPDATE_TYPES.NONE;

// We hash the file *without* `_meta`, as `_meta` includes:
// - "dateAdded" and "dateLastModified", which we want to ignore for hashing for timestamp updates
// - the "_dateLastModifiedHash", which we want to ignore for hashing
// Unfortunately this means that updating the `_meta` does not trigger a hash change, but this is an
// acceptable sacrifice for the rest of the system being simpler.
const toHashObj = {...fileData.json};
delete toHashObj._meta;
const expectedHash = (await hasha.async(JSON.stringify(toHashObj))).slice(0, 10);

if (!fileData.json._meta._dateLastModifiedHash) {
updateType = _UPDATE_TYPES.HASH;
fileData.json._meta._dateLastModifiedHash = expectedHash;
} else if (expectedHash !== fileData.json._meta._dateLastModifiedHash) {
// Grab the last commit timestamp from the log.
// This is often a "junk" commit generated by cleaning (or indeed, timestamping) the file, but this is
// good enough.
const dateLastModified = await new Promise((resolve, reject) => {
execFile(
"git",
["log", "-1", `--format="%ad"`, file],
{
windowsHide: true,
},
(err, stdout, stderr) => {
if (err) return reject(err);
resolve(Math.round(new Date(stdout.trim()).getTime() / 1000));
},
);
});

if (fileData.json._meta.dateLastModified < dateLastModified) {
updateType = _UPDATE_TYPES.TIMESTAMP;
fileData.json._meta.dateLastModified = dateLastModified;
fileData.json._meta._dateLastModifiedHash = expectedHash;
}
}


if (updateType === _UPDATE_TYPES.NONE) return;

const strContents = Ub.getCleanJson(fileData.json);

await new Promise((resolve, reject) => {
fs.writeFile(
file,
strContents,
(err) => {
if (err) return reject(err);
resolve();
},
);
});

Um.info(
_LOG_TAG,
updateType === _UPDATE_TYPES.HASH
? `\t- Updated "_dateLastModifiedHash" for "${file}"...`
: `\t- Updated "dateLastModified" for "${file}"...`,
);
});

await Promise.all(promises);
}


async function main () {
await Uf.pRunOnDirs(
async (dir) => {
Um.info(_LOG_TAG, `Updating dateLastModified timestamps in dir "${dir}"...`);
await pUpdateDir(dir);
},
{
isSerial: true,
},
);
}

main().then(() => Um.info(_LOG_TAG, "Done!"));


103 changes: 34 additions & 69 deletions _node/clean-and-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,35 +6,8 @@ import {Um, Uf} from "5etools-utils";
import * as Ub from "./util-brew.js";
import {VANILLA_SOURCES} from "./util-sources.js";

const REPLACEMENTS = {
"—": "\\u2014",
"–": "\\u2013",
"−": "\\u2212",
"“": `\\"`,
"”": `\\"`,
"’": "'",
"…": "...",
" ": " ", // non-breaking space
"ff": "ff",
"ffi": "ffi",
"ffl": "ffl",
"fi": "fi",
"fl": "fl",
"IJ": "IJ",
"ij": "ij",
"LJ": "LJ",
"Lj": "Lj",
"lj": "lj",
"NJ": "NJ",
"Nj": "Nj",
"nj": "nj",
"ſt": "ft",
};

const _VANILLA_SOURCES = new Set(VANILLA_SOURCES);

const replacementRegex = new RegExp(Object.keys(REPLACEMENTS).join("|"), 'g');

const RUN_TIMESTAMP = Math.floor(Date.now() / 1000);
const MAX_TIMESTAMP = 9999999999;

Expand All @@ -47,76 +20,68 @@ function cleanFolder (folder) {

const files = Uf.listJsonFiles(folder);
files
.map(file => ({
name: file,
contents: Uf.readJSON(file)
}))
.map(file => {
if (RE_INVALID_WINDOWS_CHARS.test(file.name.split("/").slice(1).join("/"))) ALL_ERRORS.push(`${file.name} contained invalid characters!`);
if (!file.name.endsWith(".json")) ALL_ERRORS.push(`${file.name} had invalid extension! Should be ".json" (case-sensitive).`);
let contents = Uf.readJSON(file);

if (RE_INVALID_WINDOWS_CHARS.test(file.split("/").slice(1).join("/"))) ALL_ERRORS.push(`${file} contained invalid characters!`);
if (!file.endsWith(".json")) ALL_ERRORS.push(`${file} had invalid extension! Should be ".json" (case-sensitive).`);

if (!Ub.FILES_NO_META[file.name]) {
if (!Ub.FILES_NO_META[file]) {
// region clean
// Ensure _meta is at the top of the file
const tmp = {$schema: file.contents.$schema, _meta: file.contents._meta};
delete file.contents.$schema;
delete file.contents._meta;
Object.assign(tmp, file.contents);
file.contents = tmp;

if (file.contents._meta.dateAdded == null) {
Um.warn(`TIMESTAMPS`, `\tFile "${file.name}" did not have "dateAdded"! Adding one...`);
file.contents._meta.dateAdded = RUN_TIMESTAMP;
} else if (file.contents._meta.dateAdded > MAX_TIMESTAMP) {
Um.warn(`TIMESTAMPS`, `\tFile "${file.name}" had a "dateAdded" in milliseconds! Converting to seconds...`);
file.contents._meta.dateAdded = Math.round(file.contents._meta.dateAdded / 1000);
const tmp = {$schema: contents.$schema, _meta: contents._meta};
delete contents.$schema;
delete contents._meta;
Object.assign(tmp, contents);
contents = tmp;

if (contents._meta.dateAdded == null) {
Um.warn(`TIMESTAMPS`, `\tFile "${file}" did not have "dateAdded"! Adding one...`);
contents._meta.dateAdded = RUN_TIMESTAMP;
} else if (contents._meta.dateAdded > MAX_TIMESTAMP) {
Um.warn(`TIMESTAMPS`, `\tFile "${file}" had a "dateAdded" in milliseconds! Converting to seconds...`);
contents._meta.dateAdded = Math.round(contents._meta.dateAdded / 1000);
}

if (file.contents._meta.dateLastModified == null) {
Um.warn(`TIMESTAMPS`, `\tFile "${file.name}" did not have "dateLastModified"! Adding one...`);
file.contents._meta.dateLastModified = RUN_TIMESTAMP;
} else if (file.contents._meta.dateLastModified > MAX_TIMESTAMP) {
Um.warn(`TIMESTAMPS`, `\tFile "${file.name}" had a "dateLastModified" in milliseconds! Converting to seconds...`);
file.contents._meta.dateLastModified = Math.round(file.contents._meta.dateLastModified / 1000);
if (contents._meta.dateLastModified == null) {
Um.warn(`TIMESTAMPS`, `\tFile "${file}" did not have "dateLastModified"! Adding one...`);
contents._meta.dateLastModified = RUN_TIMESTAMP;
} else if (contents._meta.dateLastModified > MAX_TIMESTAMP) {
Um.warn(`TIMESTAMPS`, `\tFile "${file}" had a "dateLastModified" in milliseconds! Converting to seconds...`);
contents._meta.dateLastModified = Math.round(contents._meta.dateLastModified / 1000);
}

(file.contents._meta.sources || []).forEach(source => {
(contents._meta.sources || []).forEach(source => {
if (source.version != null) return;
Um.warn(`VERSION`, `\tFile "${file.name}" source "${source.json}" did not have "version"! Adding one...`);
Um.warn(`VERSION`, `\tFile "${file}" source "${source.json}" did not have "version"! Adding one...`);
source.version = "unknown";
});
// endregion

// region test
const validSources = new Set(file.contents._meta.sources.map(src => src.json));
const validSources = new Set(contents._meta.sources.map(src => src.json));
validSources.add("UAClassFeatureVariants"); // Allow CFV UA sources

Object.keys(file.contents)
Object.keys(contents)
.filter(k => !CONTENT_KEY_BLACKLIST.has(k))
.forEach(k => {
const data = file.contents[k];
const data = contents[k];

if (!(data instanceof Array) || !data.forEach) throw new Error(`File "${k}" data was not an array!`);

data.forEach(it => {
const source = it.source || (it.inherits ? it.inherits.source : null);
if (!source) return ALL_ERRORS.push(`${file.name} :: ${k} :: "${it.name || it.id}" had no source!`);
if (!validSources.has(source) && !_VANILLA_SOURCES.has(source)) return ALL_ERRORS.push(`${file.name} :: ${k} :: "${it.name || it.id}" source "${source}" was not in _meta`);
if (!source) return ALL_ERRORS.push(`${file} :: ${k} :: "${it.name || it.id}" had no source!`);
if (!validSources.has(source) && !_VANILLA_SOURCES.has(source)) return ALL_ERRORS.push(`${file} :: ${k} :: "${it.name || it.id}" source "${source}" was not in _meta`);
});
});
// endregion
}

file.contents = JSON.stringify(file.contents, null, "\t") + "\n";
return file;
})
.map(file => {
Um.info(`CLEANER`, `\t- "${file.name}"...`);
file.contents = file.contents.replace(replacementRegex, (match) => REPLACEMENTS[match]);
return file;
})
.forEach(file => {
fs.writeFileSync(file.name, file.contents);
Um.info(`CLEANER`, `\t- "${file}"...`);
contents = Ub.getCleanJson(contents);

fs.writeFileSync(file, contents);
});

if (ALL_ERRORS.length) {
Expand Down
35 changes: 34 additions & 1 deletion _node/util-brew.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,39 @@ const FILES_NO_META = {
"collection/index.json": 1
};

const _CLEAN_JSON_REPLACEMENTS = {
"—": "\\u2014",
"–": "\\u2013",
"−": "\\u2212",
"“": `\\"`,
"”": `\\"`,
"’": "'",
"…": "...",
" ": " ", // non-breaking space
"ff": "ff",
"ffi": "ffi",
"ffl": "ffl",
"fi": "fi",
"fl": "fl",
"IJ": "IJ",
"ij": "ij",
"LJ": "LJ",
"Lj": "Lj",
"lj": "lj",
"NJ": "NJ",
"Nj": "Nj",
"nj": "nj",
"ſt": "ft",
};
const _CLEAN_JSON_REPLACEMENT_REGEX = new RegExp(Object.keys(_CLEAN_JSON_REPLACEMENTS).join("|"), 'g');

const getCleanJson = (obj) => {
obj = JSON.stringify(obj, null, "\t") + "\n";
obj = obj.replace(_CLEAN_JSON_REPLACEMENT_REGEX, (match) => _CLEAN_JSON_REPLACEMENTS[match]);
return obj;
};

export {
FILES_NO_META,
}
getCleanJson,
};
6 changes: 6 additions & 0 deletions _schema-fast/homebrew.json
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,12 @@
"type": "integer",
"description": "The epoch timestamp (in seconds) when the homebrew was last modified. Not guaranteed to be anywhere near accurate."
},
"_dateLastModifiedHash": {
"type": "string",
"description": "A file hash used to automatically update the value of \"dateLastModified\". Should not be manually created/edited.",
"minLength": 10,
"maxLength": 10
},
"currencyConversions": {
"type": "object",
"description": "Keys should be uniquely-named value conversion objects, and can be referenced from item entities as \"currencyConversion\": \"<MyKey>\". The currency conversion info will then be used when rendering the value of the item.",
Expand Down
6 changes: 6 additions & 0 deletions _schema/homebrew.json
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,12 @@
"type": "integer",
"description": "The epoch timestamp (in seconds) when the homebrew was last modified. Not guaranteed to be anywhere near accurate."
},
"_dateLastModifiedHash": {
"type": "string",
"description": "A file hash used to automatically update the value of \"dateLastModified\". Should not be manually created/edited.",
"minLength": 10,
"maxLength": 10
},
"currencyConversions": {
"type": "object",
"description": "Keys should be uniquely-named value conversion objects, and can be referenced from item entities as \"currencyConversion\": \"<MyKey>\". The currency conversion info will then be used when rendering the value of the item.",
Expand Down
3 changes: 2 additions & 1 deletion action/Sample - Giddy; Extinguish.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@
}
],
"dateAdded": 0,
"dateLastModified": 0
"dateLastModified": 0,
"_dateLastModifiedHash": "caca90c5e3"
},
"action": [
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@
}
],
"dateAdded": 1558867830,
"dateLastModified": 1558867830
"dateLastModified": 1558867830,
"_dateLastModifiedHash": "b4ba9c3d27"
},
"adventure": [
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@
}
],
"dateAdded": 1558867830,
"dateLastModified": 1558867830
"dateLastModified": 1558867830,
"_dateLastModifiedHash": "f7d4203072"
},
"adventure": [
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@
}
],
"dateAdded": 1558867830,
"dateLastModified": 1558867830
"dateLastModified": 1558867830,
"_dateLastModifiedHash": "bc0e383fd8"
},
"adventure": [
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@
}
],
"dateAdded": 1600276023,
"dateLastModified": 1600276023
"dateLastModified": 1600276023,
"_dateLastModifiedHash": "7175f66f70"
},
"adventure": [
{
Expand Down
3 changes: 2 additions & 1 deletion adventure/Adventurers League; DDIA-MORD Rrakkma.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@
}
],
"dateAdded": 1552599066,
"dateLastModified": 1552599066
"dateLastModified": 1552599066,
"_dateLastModifiedHash": "5c8eb89a86"
},
"adventure": [
{
Expand Down
3 changes: 2 additions & 1 deletion adventure/Adventures Await; Shipwreck of the Sea Mephit.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@
}
],
"dateAdded": 1621775534,
"dateLastModified": 1621775534
"dateLastModified": 1621775534,
"_dateLastModifiedHash": "11efb350fd"
},
"adventure": [
{
Expand Down
Loading

0 comments on commit 69f1322

Please sign in to comment.