From c8991bf03d9f6e0117c6126a7fd311997b4aa9d0 Mon Sep 17 00:00:00 2001 From: Anton Vikulov Date: Thu, 19 Oct 2023 19:14:02 +0500 Subject: [PATCH 1/2] feat(cache): add cache --- src/cmd/build/index.ts | 19 +- src/constants.ts | 2 + src/models.ts | 23 ++- src/resolvers/lintPage.ts | 32 +++- src/resolvers/md2html.ts | 43 ++++- src/resolvers/md2md.ts | 137 +++++++++----- src/services/cache/cache.ts | 176 ++++++++++++++++++ src/services/cache/cacheFile.ts | 298 +++++++++++++++++++++++++++++++ src/services/cache/index.ts | 1 + src/services/cache/types.ts | 22 +++ src/services/preset.ts | 46 ++++- src/services/utils.ts | 13 ++ src/steps/processServiceFiles.ts | 4 +- src/utils/file.ts | 16 +- src/utils/path.ts | 6 +- src/utils/pluginEnvApi.ts | 176 ++++++++++++++++++ src/utils/presets.ts | 16 ++ src/workers/linter/index.ts | 3 + 18 files changed, 959 insertions(+), 74 deletions(-) create mode 100644 src/services/cache/cache.ts create mode 100644 src/services/cache/cacheFile.ts create mode 100644 src/services/cache/index.ts create mode 100644 src/services/cache/types.ts create mode 100644 src/utils/pluginEnvApi.ts diff --git a/src/cmd/build/index.ts b/src/cmd/build/index.ts index 6c82e9a2..a64acc17 100644 --- a/src/cmd/build/index.ts +++ b/src/cmd/build/index.ts @@ -27,6 +27,7 @@ import {Resources} from '../../models'; import {copyFiles, logger} from '../../utils'; import {upload as publishFilesToS3} from '../publish/upload'; import glob from 'glob'; +import {cacheServiceBuildMd, cacheServiceLint, cacheServiceMdToHtml} from '../../services/cache'; export const build = { command: ['build', '$0'], @@ -166,6 +167,18 @@ function builder(argv: Argv) { type: 'boolean', group: 'Build options:', }) + .option('cache-dir', { + default: resolve('cache'), + describe: 'Path to cache folder', + type: 'string', + group: 'Build options:', + }) + .option('cache', { + default: false, + describe: 'Enable cache', + type: 'boolean', + group: 'Build options:', + }) .check(argvValidator) .example('yfm -i ./input -o ./output', '') .demandOption( @@ -179,6 +192,10 @@ async function handler(args: Arguments) { const tmpInputFolder = resolve(args.output, TMP_INPUT_FOLDER); const tmpOutputFolder = resolve(args.output, TMP_OUTPUT_FOLDER); + cacheServiceLint.init(args.cache, args.cacheDir); + cacheServiceBuildMd.init(args.cache, args.cacheDir); + cacheServiceMdToHtml.init(args.cache, args.cacheDir); + try { ArgvService.init({ ...args, @@ -287,7 +304,7 @@ async function handler(args: Arguments) { } } } catch (err) { - logger.error('', err.message); + logger.error('', (err as Error).message); } finally { processLogs(tmpInputFolder); diff --git a/src/constants.ts b/src/constants.ts index 97dce0c1..3cbb15ce 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -97,6 +97,8 @@ export const GETTING_ALL_CONTRIBUTORS = 'Getting all contributors.'; export const ALL_CONTRIBUTORS_RECEIVED = 'All contributors received.'; export const getMsgСonfigurationMustBeProvided = (repo: string) => `Сonfiguration must be provided for ${repo} like env variables or in .yfm file`; +export const CACHE_HIT = 'Cache hit:'; +export const LINT_CACHE_HIT = 'Lint cache hit:'; export const FIRST_COMMIT_FROM_ROBOT_IN_GITHUB = '2dce14271359cd20d7e874956d604de087560cf4'; diff --git a/src/models.ts b/src/models.ts index 669da947..7d09e209 100644 --- a/src/models.ts +++ b/src/models.ts @@ -4,8 +4,9 @@ import {LintConfig} from '@diplodoc/transform/lib/yfmlint'; import {FileContributors, VCSConnector, VCSConnectorConfig} from './vcs-connector/connector-models'; import {Lang, Stage, IncludeMode, ResourceType} from './constants'; import {ChangelogItem} from '@diplodoc/transform/lib/plugins/changelog/types'; +import PluginEnvApi from './utils/pluginEnvApi'; -export type VarsPreset = 'internal' | 'external'; +export type VarsPreset = 'internal'|'external'; export type YfmPreset = Record; // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -13,10 +14,7 @@ export type Metadata = Record; export type ExternalAuthorByPathFunction = (path: string) => Contributor | null; export type ContributorsByPathFunction = (path: string) => Promise; -export type NestedContributorsForPathFunction = ( - path: string, - nestedContributors: Contributors, -) => void; +export type NestedContributorsForPathFunction = (path: string, nestedContributors: Contributors) => void; export type UserByLoginFunction = (login: string) => Promise; export type CollectionOfPluginsFunction = (output: string, options: PluginOptions) => string; @@ -48,6 +46,8 @@ export interface YfmArgv extends YfmConfig { rootInput: string; input: string; output: string; + cache: boolean; + cacheDir: string; quiet: string; publish: boolean; storageEndpoint: string; @@ -99,7 +99,7 @@ export type YfmTocIncluder = { export const includersNames = ['sourcedocs', 'openapi', 'generic', 'unarchive'] as const; -export type YfmTocIncluderName = (typeof includersNames)[number]; +export type YfmTocIncluderName = typeof includersNames[number]; // eslint-disable-next-line @typescript-eslint/no-explicit-any export type Includer = { @@ -107,9 +107,7 @@ export type Includer = { includerFunction: IncluderFunction; }; -export type IncluderFunction = ( - args: IncluderFunctionParams, -) => Promise; +export type IncluderFunction = (args: IncluderFunctionParams) => Promise; export type IncluderFunctionParams = { // item that contains include that uses includer @@ -153,7 +151,7 @@ export interface LeadingPageLinks extends Filter { } export interface Filter { - when?: boolean | string; + when?: boolean|string; [key: string]: unknown; } @@ -203,6 +201,7 @@ export interface PluginOptions { collectOfPlugins?: (input: string, options: PluginOptions) => string; changelogs?: ChangelogItem[]; extractChangelogs?: boolean; + envApi?: PluginEnvApi; } export interface Plugin { @@ -244,8 +243,8 @@ export type Resources = { }; export type YandexCloudTranslateGlossaryPair = { - sourceText: string; - translatedText: string; + sourceText: string; + translatedText: string; }; export type CommitInfo = { diff --git a/src/resolvers/lintPage.ts b/src/resolvers/lintPage.ts index 287c891e..e223f716 100644 --- a/src/resolvers/lintPage.ts +++ b/src/resolvers/lintPage.ts @@ -9,9 +9,13 @@ import {readFileSync} from 'fs'; import {bold} from 'chalk'; import {ArgvService, PluginService} from '../services'; -import {getVarsPerFile, getVarsPerRelativeFile} from '../utils'; +import {getVarsPerFileWithHash, getVarsPerRelativeFile, logger} from '../utils'; import {liquidMd2Html} from './md2html'; import {liquidMd2Md} from './md2md'; +import {cacheServiceLint} from '../services/cache'; +import PluginEnvApi from '../utils/pluginEnvApi'; +import {checkLogWithoutProblems, getLogState} from '../services/utils'; +import {LINT_CACHE_HIT} from '../constants'; interface FileTransformOptions { path: string; @@ -58,11 +62,28 @@ function MdFileLinter(content: string, lintOptions: FileTransformOptions): void const {path: filePath} = lintOptions; const plugins = outputFormat === 'md' ? [] : PluginService.getPlugins(); - const vars = getVarsPerFile(filePath); + const {vars, varsHashList} = getVarsPerFileWithHash(filePath); const root = resolve(input); const path: string = resolve(input, filePath); let preparedContent = content; + const cacheKey = cacheServiceLint.getHashKey({filename: filePath, content, varsHashList}); + + const cachedFile = cacheServiceLint.checkFile(cacheKey); + if (cachedFile) { + logger.info(filePath, LINT_CACHE_HIT); + return; + } + + const cacheFile = cacheServiceLint.createFile(cacheKey); + + const envApi = PluginEnvApi.create({ + root, + distRoot: '', + cacheFile, + }); + const logState = getLogState(log); + /* Relative path from folder of .md file to root of user' output folder */ const assetsPublicPath = relative(dirname(path), root); @@ -72,6 +93,7 @@ function MdFileLinter(content: string, lintOptions: FileTransformOptions): void const pluginOptions: PluginOptions = { ...options, vars, + varsHashList, root, path: localPath, lintMarkdown, // Should pass the function for linting included files @@ -79,6 +101,7 @@ function MdFileLinter(content: string, lintOptions: FileTransformOptions): void disableLiquid, log, getVarsPerFile: getVarsPerRelativeFile, + envApi, }; yfmlint({ @@ -110,4 +133,9 @@ function MdFileLinter(content: string, lintOptions: FileTransformOptions): void path, sourceMap, }); + + const logIsOk = checkLogWithoutProblems(log, logState); + if (logIsOk) { + cacheServiceLint.addFile(cacheFile); + } } diff --git a/src/resolvers/md2html.ts b/src/resolvers/md2html.ts index 5716ecc4..0e68d60d 100644 --- a/src/resolvers/md2html.ts +++ b/src/resolvers/md2html.ts @@ -12,12 +12,15 @@ import { generateStaticMarkup, logger, transformToc, - getVarsPerFile, getVarsPerRelativeFile, + getVarsPerFileWithHash, } from '../utils'; -import {PROCESSING_FINISHED, Lang} from '../constants'; +import {PROCESSING_FINISHED, Lang, CACHE_HIT} from '../constants'; import {getAssetsPublicPath, getUpdatedMetadata} from '../services/metadata'; import {MarkdownItPluginCb} from '@diplodoc/transform/lib/plugins/typings'; +import PluginEnvApi from '../utils/pluginEnvApi'; +import {cacheServiceMdToHtml} from '../services/cache'; +import {checkLogWithoutProblems, getLogState} from '../services/utils'; export interface FileTransformOptions { path: string; @@ -48,7 +51,7 @@ export async function resolveMd2HTML(options: ResolverOptions): Promise, path }); } -function MdFileTransformer(content: string, transformOptions: FileTransformOptions): Output { +async function MdFileTransformer(content: string, transformOptions: FileTransformOptions): Promise { const {input, ...options} = ArgvService.getConfig(); const {path: filePath} = transformOptions; const plugins = PluginService.getPlugins(); - const vars = getVarsPerFile(filePath); + const {vars, varsHashList} = getVarsPerFileWithHash(filePath); const root = resolve(input); const path: string = resolve(input, filePath); - return transform(content, { + const cacheKey = cacheServiceMdToHtml.getHashKey({filename: filePath, content, varsHashList}); + + const cachedFile = await cacheServiceMdToHtml.checkFileAsync(cacheKey); + if (cachedFile) { + logger.info(filePath, CACHE_HIT); + await cachedFile.extractCacheAsync(); + return cachedFile.getResult(); + } + + const cacheFile = cacheServiceMdToHtml.createFile(cacheKey); + const envApi = PluginEnvApi.create({ + root: resolve(input), + distRoot: resolve(options.output), + cacheFile, + }); + const logState = getLogState(log); + + const result = transform(content, { ...options, plugins: plugins as MarkdownItPluginCb[], vars, @@ -146,5 +166,16 @@ function MdFileTransformer(content: string, transformOptions: FileTransformOptio assetsPublicPath: getAssetsPublicPath(filePath), getVarsPerFile: getVarsPerRelativeFile, extractTitle: true, + envApi, }); + + envApi.executeActions(); + + const logIsOk = checkLogWithoutProblems(log, logState); + if (logIsOk) { + cacheFile.setResult(result); + await cacheServiceMdToHtml.addFileAsync(cacheFile); + } + + return result; } diff --git a/src/resolvers/md2md.ts b/src/resolvers/md2md.ts index e684a775..827adad1 100644 --- a/src/resolvers/md2md.ts +++ b/src/resolvers/md2md.ts @@ -1,40 +1,80 @@ -import {existsSync, readFileSync, writeFileSync} from 'fs'; -import {dirname, resolve, join, basename, extname} from 'path'; +import * as fs from 'fs'; +import {dirname, resolve, join, basename, extname, relative} from 'path'; import shell from 'shelljs'; -import log from '@diplodoc/transform/lib/log'; +import log, {LogLevels} from '@diplodoc/transform/lib/log'; import liquid from '@diplodoc/transform/lib/liquid'; import {ArgvService, PluginService} from '../services'; -import {logger, getVarsPerFile} from '../utils'; +import {logger, getVarsPerFileWithHash} from '../utils'; import {PluginOptions, ResolveMd2MdOptions} from '../models'; -import {PROCESSING_FINISHED} from '../constants'; +import {CACHE_HIT, PROCESSING_FINISHED} from '../constants'; import {getContentWithUpdatedMetadata} from '../services/metadata'; import {ChangelogItem} from '@diplodoc/transform/lib/plugins/changelog/types'; +import {cacheServiceBuildMd} from '../services/cache'; +import PluginEnvApi from '../utils/pluginEnvApi'; +import {checkLogWithoutProblems, getLogState} from '../services/utils'; export async function resolveMd2Md(options: ResolveMd2MdOptions): Promise { const {inputPath, outputPath, metadata} = options; const {input, output} = ArgvService.getConfig(); const resolvedInputPath = resolve(input, inputPath); - const vars = getVarsPerFile(inputPath); - - const content = await getContentWithUpdatedMetadata( - readFileSync(resolvedInputPath, 'utf8'), - metadata, - vars.__system, - ); - - const {result, changelogs} = transformMd2Md(content, { - path: resolvedInputPath, - destPath: outputPath, - root: resolve(input), - destRoot: resolve(output), - collectOfPlugins: PluginService.getCollectOfPlugins(), - vars, - log, - copyFile, - }); + const {vars, varsHashList} = getVarsPerFileWithHash(inputPath); + + const rawContent = fs.readFileSync(resolvedInputPath, 'utf8'); + + const cacheKey = cacheServiceBuildMd.getHashKey({filename: inputPath, content: rawContent, varsHashList}); + + let result: string; + let changelogs: ChangelogItem[]; + + const cachedFile = await cacheServiceBuildMd.checkFileAsync(cacheKey); + if (cachedFile) { + logger.info(inputPath, CACHE_HIT); + await cachedFile.extractCacheAsync(); + const results = cachedFile.getResult<{result: string; changelogs: ChangelogItem[]; logs: Record}>(); + result = results.result; + changelogs = results.changelogs; + } else { + const content = await getContentWithUpdatedMetadata( + rawContent, + metadata, + vars.__system, + ); + + const cacheFile = cacheServiceBuildMd.createFile(cacheKey); + const envApi = PluginEnvApi.create({ + root: resolve(input), + distRoot: resolve(output), + cacheFile, + }); + const logState = getLogState(log); + + const transformResult = transformMd2Md(content, { + path: resolvedInputPath, + destPath: outputPath, + root: resolve(input), + destRoot: resolve(output), + collectOfPlugins: PluginService.getCollectOfPlugins(), + vars, + log, + copyFile, + envApi, + }); - writeFileSync(outputPath, result); + result = transformResult.result; + changelogs = transformResult.changelogs; + + envApi.executeActions(); + + const logIsOk = checkLogWithoutProblems(log, logState); + if (logIsOk) { + cacheFile.setResult(transformResult); + // not async cause race condition + cacheServiceBuildMd.addFile(cacheFile); + } + } + + fs.writeFileSync(outputPath, result); if (changelogs?.length) { const mdFilename = basename(outputPath, extname(outputPath)); @@ -50,25 +90,19 @@ export async function resolveMd2Md(options: ResolveMd2MdOptions): Promise changesName = Math.trunc(new Date(changesDate).getTime() / 1000); } if (!changesName) { - changesName = `name-${mdFilename}-${String(changelogs.length - index).padStart( - 3, - '0', - )}`; + changesName = `name-${mdFilename}-${String(changelogs.length - index).padStart(3, '0')}`; } const changesPath = join(outputDir, `changes-${changesName}.json`); - if (existsSync(changesPath)) { + if (fs.existsSync(changesPath)) { throw new Error(`Changelog ${changesPath} already exists!`); } - writeFileSync( - changesPath, - JSON.stringify({ - ...changes, - source: mdFilename, - }), - ); + fs.writeFileSync(changesPath, JSON.stringify({ + ...changes, + source: mdFilename, + })); }); } @@ -78,20 +112,35 @@ export async function resolveMd2Md(options: ResolveMd2MdOptions): Promise } function copyFile(targetPath: string, targetDestPath: string, options?: PluginOptions) { - shell.mkdir('-p', dirname(targetDestPath)); - if (options) { - const sourceIncludeContent = readFileSync(targetPath, 'utf8'); + const {envApi} = options; + let sourceIncludeContent: string; + if (envApi) { + sourceIncludeContent = envApi.readFile(relative(envApi.root, targetPath), 'utf-8') as string; + } else { + sourceIncludeContent = fs.readFileSync(targetPath, 'utf8'); + } + const {result} = transformMd2Md(sourceIncludeContent, options); - writeFileSync(targetDestPath, result); + if (envApi) { + envApi.writeFileAsync(relative(envApi.distRoot, targetDestPath), result); + } else { + fs.mkdirSync(dirname(targetDestPath), {recursive: true}); + fs.writeFileSync(targetDestPath, result); + } } else { + fs.mkdirSync(dirname(targetDestPath), {recursive: true}); shell.cp(targetPath, targetDestPath); } } export function liquidMd2Md(input: string, vars: Record, path: string) { - const {applyPresets, resolveConditions, conditionsInCode} = ArgvService.getConfig(); + const { + applyPresets, + resolveConditions, + conditionsInCode, + } = ArgvService.getConfig(); return liquid(input, vars, path, { conditions: resolveConditions, @@ -103,7 +152,9 @@ export function liquidMd2Md(input: string, vars: Record, path: } function transformMd2Md(input: string, options: PluginOptions) { - const {disableLiquid} = ArgvService.getConfig(); + const { + disableLiquid, + } = ArgvService.getConfig(); const { vars = {}, path, @@ -113,6 +164,7 @@ function transformMd2Md(input: string, options: PluginOptions) { collectOfPlugins, log: pluginLog, copyFile: pluginCopyFile, + envApi, } = options; let output = input; @@ -136,6 +188,7 @@ function transformMd2Md(input: string, options: PluginOptions) { collectOfPlugins, changelogs, extractChangelogs: true, + envApi, }); } diff --git a/src/services/cache/cache.ts b/src/services/cache/cache.ts new file mode 100644 index 00000000..35a5c6a8 --- /dev/null +++ b/src/services/cache/cache.ts @@ -0,0 +1,176 @@ +import * as fs from 'fs'; +import * as crypto from 'crypto'; +import CacheFile from './cacheFile'; +import {ArgvService} from '../index'; +import {pick} from 'lodash'; +import path from 'path'; +import {fileExists} from '../../utils'; +import {HashKey} from './types'; + +const objHash = new WeakMap(); +const fileHash = new Map(); +const existsDir = new Set(); + +type GetHashKeyProps = Omit & {content: string}; + +let argsHash = ''; + +export class CacheService { + static getObjHash(obj: Record) { + let hash = objHash.get(obj); + if (!hash) { + hash = this.getHash(JSON.stringify(obj)); + objHash.set(obj, hash); + } + return hash; + } + + static getHash(data: crypto.BinaryLike) { + return crypto.createHash('sha1').update(data).digest('hex'); + } + + static getFileHash(filename: string) { + let hash = fileHash.get(filename); + if (!hash) { + hash = this.getHash(fs.readFileSync(filename)); + fileHash.set(filename, hash); + } + return hash; + } + + static async getFileHashAsync(filename: string) { + let hash = fileHash.get(filename); + if (!hash) { + hash = this.getHash(await fs.promises.readFile(filename)); + fileHash.set(filename, hash); + } + return hash; + } + + static getHashKey({filename, content, varsHashList}: GetHashKeyProps): HashKey { + if (!argsHash) { + const args = ArgvService.getConfig(); + const staticArgs = pick(args, [ + 'varsPreset', 'ignore', 'outputFormat', 'allowHTML', 'vars', 'applyPresets', + 'resolveConditions', 'conditionsInCode', 'disableLiquid', 'strict', 'ignoreStage', 'singlePage', + 'removeHiddenTocItems', 'connector', 'lang', 'lintConfig', 'resources', 'addSystemMeta', + 'contributors', 'ignoreAuthorPatterns', 'allowCustomResources', + ]); + argsHash = CacheService.getHash(JSON.stringify(staticArgs)); + } + const contentHash = CacheService.getHash(content); + return { + key: this.getHash(JSON.stringify({filename, contentHash, varsHashList, argsHash})), + filename, contentHash, varsHashList, + }; + } + + private readonly storeName; + private cacheDir = ''; + private disabled = false; + + constructor(storeName = 'main') { + this.storeName = storeName; + } + + init(enabled: boolean, cacheDir: string) { + this.disabled = !enabled; + this.cacheDir = path.resolve(cacheDir); + } + + checkFile({key}: HashKey) { + if (this.disabled) { return; } + + const filepath = this.getCacheFilepath(key); + if (!fs.existsSync(filepath)) { + return; + } + let file: CacheFile; + try { + const dataJson = fs.readFileSync(filepath, 'utf-8'); + const data = JSON.parse(dataJson); + file = CacheFile.from(data, this.disabled, this.getAssetsDir()); + } catch (err) { + return; + } + return file?.check() ? file : undefined; + } + + async checkFileAsync({key}: HashKey) { + if (this.disabled) { return; } + + const filepath = this.getCacheFilepath(key); + const exists = await fileExists(filepath); + if (!exists) { + return; + } + let file: CacheFile; + try { + const dataJson = await fs.promises.readFile(filepath, 'utf-8'); + const data = JSON.parse(dataJson); + file = CacheFile.from(data, this.disabled, this.getAssetsDir()); + } catch (err) { + return; + } + const isCorrect = await file?.checkAsync(); + return isCorrect ? file : undefined; + } + + createFile(key: HashKey) { + return new CacheFile(key, this.disabled, this.getAssetsDir()); + } + + addFile(file: CacheFile) { + if (this.disabled) { return; } + + const filepath = this.getCacheFilepath(file.getKey()); + const place = path.dirname(filepath); + if (!existsDir.has(place)) { + fs.mkdirSync(place, {recursive: true}); + existsDir.add(place); + } + file.writeAssets(); + fs.writeFileSync(filepath, JSON.stringify(file.toJSON())); + } + + async addFileAsync(file: CacheFile) { + if (this.disabled) { return; } + + const filepath = this.getCacheFilepath(file.getKey()); + const place = path.dirname(filepath); + if (!existsDir.has(place)) { + await fs.promises.mkdir(place, {recursive: true}); + existsDir.add(place); + } + await Promise.all([ + file.writeAssetsAsync(), + fs.promises.writeFile(filepath, JSON.stringify(file.toJSON())), + ]); + } + + getHashKey(props: GetHashKeyProps) { + if (this.disabled) { + const {filename, varsHashList} = props; + return { + key: '', + contentHash: '', + filename, + varsHashList, + }; + } + + return CacheService.getHashKey(props); + } + + private getCacheFilepath(key: string) { + return path.join(this.cacheDir, this.storeName, key.slice(0, 2), key); + } + + private getAssetsDir() { + return path.join(this.cacheDir, 'assets'); + } +} + +export const cacheServiceLint = new CacheService('lint'); +export const cacheServiceBuildMd = new CacheService('build-md'); +export const cacheServiceMdToHtml = new CacheService('md-to-html'); diff --git a/src/services/cache/cacheFile.ts b/src/services/cache/cacheFile.ts new file mode 100644 index 00000000..909e7dbb --- /dev/null +++ b/src/services/cache/cacheFile.ts @@ -0,0 +1,298 @@ +import {ArgvService} from '../index'; +import {CacheService} from './cache'; +import {fileExists, getVarsPerFileWithHash} from '../../utils'; +import isEqual from 'lodash/isEqual'; +import * as fs from 'fs'; +import path from 'path'; +import {asyncify, mapLimit, parallelLimit} from 'async'; +import {CacheFileData, CacheFileDataWithDeps, Deps} from './types'; + +const CUNCURRENCY = 1000; +const existsDir = new Set(); + +type CacheFileProps = CacheFileData & Partial; + +class CacheFile { + static from(data: CacheFileDataWithDeps, disabled: boolean, assetsDir: string) { + return new CacheFile(data, disabled, assetsDir); + } + + disabled = false; + private assetsDir: string; + private data: CacheFileDataWithDeps; + private wroteFileData: Record = {}; + + constructor(data: CacheFileProps, disabled: boolean, assetsDir: string) { + this.assetsDir = assetsDir; + this.disabled = disabled; + this.data = { + ...data, + fileDeps: data.fileDeps || {}, + wroteFiles: data.wroteFiles || {}, + copiedFiles: data.copiedFiles || {}, + existsFiles: data.existsFiles || {}, + fileVarsDeps: data.fileVarsDeps || {}, + }; + } + + use() { + if (this.disabled) { return undefined; } + return this; + } + + getKey() { + return this.data.key; + } + + toJSON(): CacheFileDataWithDeps { + return this.data; + } + + check() { + const args = ArgvService.getConfig(); + const {input} = args; + const root = path.resolve(input); + + const {fileDeps, copiedFiles, existsFiles, fileVarsDeps} = this.data; + + for (const filename in fileVarsDeps) { + if (!Object.hasOwnProperty.call(fileVarsDeps, filename)) { continue; } + + const reqVarsHashList = fileVarsDeps[filename]; + const {varsHashList} = getVarsPerFileWithHash(filename); + if (!isEqual(varsHashList, reqVarsHashList)) { return; } + } + + for (const to in copiedFiles) { + if (!Object.hasOwnProperty.call(copiedFiles, to)) { continue; } + + const from = copiedFiles[to]; + const filepath = path.join(root, from); + if (!fs.existsSync(filepath)) { return; } + } + + for (const filename in existsFiles) { + if (!Object.hasOwnProperty.call(existsFiles, filename)) { continue; } + + const reqState = existsFiles[filename]; + const filepath = path.join(root, filename); + if (fs.existsSync(filepath) !== reqState) { return; } + } + + for (const filename in fileDeps) { + if (!Object.hasOwnProperty.call(fileDeps, filename)) { continue; } + + const reqContentHash = fileDeps[filename]; + const filepath = path.join(root, filename); + if (!fs.existsSync(filepath)) { return; } + const contentHash = CacheService.getFileHash(filepath); + if (contentHash !== reqContentHash) { + return; + } + } + + return true; + } + + async checkAsync() { + const args = ArgvService.getConfig(); + const {input} = args; + const root = path.resolve(input); + + const {fileDeps, copiedFiles, existsFiles, fileVarsDeps} = this.data; + + for (const filename in fileVarsDeps) { + if (!Object.hasOwnProperty.call(fileVarsDeps, filename)) { continue; } + + const reqVarsHashList = fileVarsDeps[filename]; + const {varsHashList} = getVarsPerFileWithHash(filename); + if (!isEqual(varsHashList, reqVarsHashList)) { return; } + } + + const tasks: (() => Promise)[] = []; + + Object.entries(copiedFiles).forEach(([, from]) => tasks.push(asyncify(async () => { + const filepath = path.join(root, from); + const isExists = await fileExists(filepath); + if (!isExists) { + throw new Error('Aborted'); + } + }))); + + Object.entries(existsFiles).forEach(([filename, reqState]) => tasks.push(asyncify(async () => { + const filepath = path.join(root, filename as string); + const isExists = await fileExists(filepath); + if (isExists !== reqState as boolean) { + throw new Error('Aborted'); + } + }))); + + Object.entries(fileDeps).forEach(([filename, reqContentHash]) => tasks.push(asyncify(async () => { + const filepath = path.join(root, filename); + const isExists = await fileExists(filepath); + if (!isExists) { + throw new Error('Aborted'); + } + const contentHash = await CacheService.getFileHashAsync(filepath); + if (contentHash !== reqContentHash) { + throw new Error('Aborted'); + } + }))); + + try { + await parallelLimit(tasks, CUNCURRENCY); + } catch (err) { + if ((err as Error).message === 'Aborted') { + return false; + } + throw err; + } + + return true; + } + + addFileDep({filename, content}: {filename: string; content: string | Uint8Array}) { + if (this.data.fileDeps[filename]) return; + + this.data.fileDeps[filename] = CacheService.getHash(content); + } + + addFileExists({filename, state}: {filename: string; state: boolean}) { + this.data.existsFiles[filename] = state; + } + + addCopyFile({from, to}: {from: string; to: string}) { + this.data.copiedFiles[to] = from; + } + + addFileVarsDep(filename: string, varsHashList: string[]) { + this.data.fileVarsDeps[filename] = varsHashList; + } + + addWriteFile(to: string, content: string | Uint8Array) { + const contentHash = CacheService.getHash(content); + + this.wroteFileData[contentHash] = content; + this.data.wroteFiles[to] = contentHash; + } + + getResult() { + return this.data.result as T; + } + + setResult(result: unknown) { + this.data.result = result; + } + + async extractCacheAsync() { + await Promise.all([ + this.writeDataAsync(), + this.copyFilesAsync(), + ]); + } + + extractCache() { + this.writeData(); + this.copyFiles(); + } + + writeAssets() { + const {wroteFileData} = this; + for (const filename in wroteFileData) { + if (!Object.hasOwnProperty.call(wroteFileData, filename)) { + continue; + } + + const data = wroteFileData[filename]; + const fullFilename = this.getAssetFilepath(filename); + const place = path.dirname(fullFilename); + if (!existsDir.has(place)) { + fs.mkdirSync(place, {recursive: true}); + } + fs.writeFileSync(fullFilename, data); + } + } + + async writeAssetsAsync() { + const {wroteFileData} = this; + + const tasks = Object.entries(wroteFileData).map(([filename, data]) => asyncify(async () => { + const fullFilename = this.getAssetFilepath(filename); + const place = path.dirname(fullFilename); + if (!existsDir.has(place)) { + await fs.promises.mkdir(place, {recursive: true}); + } + await fs.promises.writeFile(fullFilename, data); + })); + + await parallelLimit(tasks, CUNCURRENCY); + } + + private writeData() { + const {output} = ArgvService.getConfig(); + const distRoot = path.resolve(output); + + const {wroteFiles} = this.data; + + Object.entries(wroteFiles).forEach(([to, assetName]) => { + const fullFrom = this.getAssetFilepath(assetName); + const fullTo = path.join(distRoot, to); + + fs.mkdirSync(path.dirname(fullTo), {recursive: true}); + fs.copyFileSync(fullFrom, fullTo); + }); + } + + private async writeDataAsync() { + const {output} = ArgvService.getConfig(); + const distRoot = path.resolve(output); + + const {wroteFiles} = this.data; + + await mapLimit(Object.entries(wroteFiles), CUNCURRENCY, asyncify(async ([to, assetName]: string[]) => { + const fullFrom = this.getAssetFilepath(assetName); + const fullTo = path.join(distRoot, to); + + await fs.promises.mkdir(path.dirname(fullTo), {recursive: true}); + await fs.promises.copyFile(fullFrom, fullTo); + })); + } + + private copyFiles() { + const {input, output} = ArgvService.getConfig(); + const root = path.resolve(input); + const distRoot = path.resolve(output); + + const {copiedFiles} = this.data; + + Object.entries(copiedFiles).forEach(([to, from]) => { + const fullFrom = path.join(root, from); + const fullTo = path.join(distRoot, to); + + fs.mkdirSync(path.dirname(fullTo), {recursive: true}); + fs.copyFileSync(fullFrom, fullTo); + }); + } + + private async copyFilesAsync() { + const {input, output} = ArgvService.getConfig(); + const root = path.resolve(input); + const distRoot = path.resolve(output); + + const {copiedFiles} = this.data; + + await mapLimit(Object.entries(copiedFiles), CUNCURRENCY, asyncify(async ([to, from]: string[]) => { + const fullFrom = path.join(root, from); + const fullTo = path.join(distRoot, to); + + await fs.promises.mkdir(path.dirname(fullTo), {recursive: true}); + await fs.promises.copyFile(fullFrom, fullTo); + })); + } + + private getAssetFilepath(key: string) { + return path.join(this.assetsDir, key.slice(0, 2), key); + } +} + +export default CacheFile; diff --git a/src/services/cache/index.ts b/src/services/cache/index.ts new file mode 100644 index 00000000..77e55d4e --- /dev/null +++ b/src/services/cache/index.ts @@ -0,0 +1 @@ +export * from './cache'; diff --git a/src/services/cache/types.ts b/src/services/cache/types.ts new file mode 100644 index 00000000..02ec2549 --- /dev/null +++ b/src/services/cache/types.ts @@ -0,0 +1,22 @@ +export interface HashKey { + filename: string; + contentHash: string; + varsHashList: string[]; + key: string; +} + +export type CacheFileDataWithDeps = CacheFileData & Deps; +type TargetLocation = string; +type SourceLocation = string; + +export interface CacheFileData extends HashKey { + result?: unknown; +} + +export interface Deps { + fileDeps: Record; + wroteFiles: Record; + copiedFiles: Record; + existsFiles: Record; + fileVarsDeps: Record; +} diff --git a/src/services/preset.ts b/src/services/preset.ts index 78d126ca..021de90b 100644 --- a/src/services/preset.ts +++ b/src/services/preset.ts @@ -2,11 +2,12 @@ import {dirname, normalize} from 'path'; import {DocPreset, YfmPreset} from '../models'; -export type PresetStorage = Map; +export type PresetStorage = {store: Map; hashMap: Map}; -let presetStorage: PresetStorage = new Map(); +let presetStorage: PresetStorage['store'] = new Map(); +let presetStorageHash: PresetStorage['hashMap'] = new Map(); -function add(parsedPreset: DocPreset, path: string, varsPreset: string) { +function add(parsedPreset: DocPreset, path: string, varsPreset: string, hash: string) { const combinedValues: YfmPreset = { ...(parsedPreset.default || {}), ...(parsedPreset[varsPreset] || {}), @@ -14,6 +15,7 @@ function add(parsedPreset: DocPreset, path: string, varsPreset: string) { const key = dirname(normalize(path)); presetStorage.set(key, combinedValues); + presetStorageHash.set(key, hash); } function get(path: string): YfmPreset { @@ -39,17 +41,47 @@ function get(path: string): YfmPreset { return combinedValues; } -function getPresetStorage(): Map { - return presetStorage; +function getWithHash(path: string): {vars: YfmPreset; varsHashList: string[]} { + const values: YfmPreset[] = []; + const varsHashList: string[] = []; + let localPath = normalize(path); + + const next = (place: string) => { + const presetValues = presetStorage.get(place); + const hash = presetStorageHash.get(place); + if (presetValues && hash) { + varsHashList.unshift(hash); + values.unshift(presetValues); + } + }; + + while (localPath !== '.') { + next(localPath); + localPath = dirname(localPath); + } + next(localPath); + + const combinedValues = Object.assign({}, ...values); + + return {vars: combinedValues, varsHashList}; +} + +function getPresetStorage(): PresetStorage { + return { + store: presetStorage, + hashMap: presetStorageHash, + }; } -function setPresetStorage(preset: Map): void { - presetStorage = preset; +function setPresetStorage(preset: PresetStorage): void { + presetStorage = preset.store; + presetStorageHash = preset.hashMap; } export default { add, get, + getWithHash, getPresetStorage, setPresetStorage, }; diff --git a/src/services/utils.ts b/src/services/utils.ts index 88430454..7dfa1f7d 100644 --- a/src/services/utils.ts +++ b/src/services/utils.ts @@ -2,6 +2,7 @@ import evalExp from '@diplodoc/transform/lib/liquid/evaluation'; import {Filter, TextItems} from '../models'; import liquid from '@diplodoc/transform/lib/liquid'; import {ArgvService} from './index'; +import {Logger} from '@doc-tools/transform/src/transform/log'; export interface FilterFilesOptions { resolveConditions?: boolean; @@ -159,3 +160,15 @@ export function liquidField(input: string, vars: Record, path: export function isObject(o: unknown): o is object { return typeof o === 'object' && o !== null; } + +export function getLogState(log: Logger) { + const {LogLevels} = log; + const problems = log.get(); + const warnCount = problems[LogLevels.WARN].length; + const errCount = problems[LogLevels.ERROR].length; + return warnCount + errCount; +} + +export function checkLogWithoutProblems(log: Logger, logState: number) { + return getLogState(log) === logState; +} diff --git a/src/steps/processServiceFiles.ts b/src/steps/processServiceFiles.ts index 7ae63f13..5d653394 100644 --- a/src/steps/processServiceFiles.ts +++ b/src/steps/processServiceFiles.ts @@ -8,6 +8,7 @@ import {ArgvService, PresetService, TocService} from '../services'; import {logger} from '../utils'; import {DocPreset} from '../models'; import shell from 'shelljs'; +import {CacheService} from '../services/cache'; type GetFilePathsByGlobalsFunction = (globs: string[]) => string[]; @@ -44,9 +45,10 @@ function preparingPresetFiles(getFilePathsByGlobals: GetFilePathsByGlobalsFuncti const pathToPresetFile = resolve(inputFolderPath, path); const content = readFileSync(pathToPresetFile, 'utf8'); + const contentHash = CacheService.getHash(content); const parsedPreset = load(content) as DocPreset; - PresetService.add(parsedPreset, path, varsPreset); + PresetService.add(parsedPreset, path, varsPreset, contentHash); if (outputFormat === 'md' && (!applyPresets || !resolveConditions)) { // Should save filtered presets.yaml only when --apply-presets=false or --resolve-conditions=false diff --git a/src/utils/file.ts b/src/utils/file.ts index 021f1dad..662f8d9f 100644 --- a/src/utils/file.ts +++ b/src/utils/file.ts @@ -1,6 +1,6 @@ import {dirname, resolve} from 'path'; import shell from 'shelljs'; -import {copyFileSync} from 'fs'; +import * as fs from 'fs'; import {logger} from './logger'; export function copyFiles( @@ -14,8 +14,20 @@ export function copyFiles( const to = resolve(outputFolderPath, pathToAsset); shell.mkdir('-p', outputDir); - copyFileSync(from, to); + fs.copyFileSync(from, to); logger.copy(pathToAsset); } } + +export async function fileExists(path: string) { + try { + await fs.promises.stat(path); + return true; + } catch (err) { + if ((err as Error & {code?: string}).code === 'ENOENT') { + return false; + } + throw err; + } +} diff --git a/src/utils/path.ts b/src/utils/path.ts index 9bf79da1..9bb021ff 100644 --- a/src/utils/path.ts +++ b/src/utils/path.ts @@ -1,4 +1,4 @@ -import {sep} from 'path'; +import {sep, normalize} from 'path'; import {Platforms} from '../constants'; export function addSlashPrefix(path: string): string { @@ -22,3 +22,7 @@ export function convertSlashToWindowsBackSlashes(path: string): string { return path; } + +export function safeRelativePath(filename: string) { + return normalize(`/${filename}`).slice(1); +} diff --git a/src/utils/pluginEnvApi.ts b/src/utils/pluginEnvApi.ts new file mode 100644 index 00000000..5551fdee --- /dev/null +++ b/src/utils/pluginEnvApi.ts @@ -0,0 +1,176 @@ +import * as fs from 'fs'; +import * as path from 'path'; +import CacheFile from '../services/cache/cacheFile'; +import {getVarsPerFileWithHash} from './presets'; +import {safeRelativePath} from './path'; +import {asyncify, mapLimit} from 'async'; + +const CUNCURRENCY = 1000; + +enum AsyncActionType { + Copy = 'copy', + Write = 'write', +} + +type CopyFileAsyncAction = {type: AsyncActionType.Copy; from: string; to: string}; +type WriteFileAsyncAction = {type: AsyncActionType.Write; to: string; data: string | Uint8Array}; + +type AsyncAction = CopyFileAsyncAction | WriteFileAsyncAction; + +interface PluginEnvApiProps { + root: string; distRoot: string; cacheFile?: CacheFile; +} + +class PluginEnvApi { + static create(props: PluginEnvApiProps) { + return new PluginEnvApi(props); + } + + public readonly root: string; + public readonly distRoot: string; + public readonly cacheFile: CacheFile | undefined; + + private readonly asyncActionQueue: AsyncAction[] = []; + + constructor({root, distRoot, cacheFile}: PluginEnvApiProps) { + this.root = root; + this.distRoot = distRoot; + this.cacheFile = cacheFile?.use(); + } + + copyFile(rawFrom: string, rawTo: string) { + const from = safeRelativePath(rawFrom); + const to = safeRelativePath(rawTo); + + const fullFrom = path.join(this.root, from); + const fullTo = path.join(this.distRoot, to); + + fs.mkdirSync(path.dirname(fullTo), {recursive: true}); + fs.copyFileSync(fullFrom, fullTo); + if (this.cacheFile) { + this.cacheFile.addCopyFile({from, to}); + } + } + + copyFileAsync(rawFrom: string, rawTo: string) { + const from = safeRelativePath(rawFrom); + const to = safeRelativePath(rawTo); + + this.asyncActionQueue.push({type: AsyncActionType.Copy, from, to}); + } + + readFile(rawTarget: string, encoding: BufferEncoding | null): Uint8Array | string { + const target = safeRelativePath(rawTarget); + const fullTarget = path.join(this.root, target); + + const result = fs.readFileSync(fullTarget, encoding); + if (this.cacheFile) { + this.cacheFile.addFileDep({filename: target, content: result}); + } + return result; + } + + fileExists(rawTarget: string) { + const target = safeRelativePath(rawTarget); + const fullTarget = path.join(this.root, target); + + const result = fs.existsSync(fullTarget); + if (this.cacheFile) { + this.cacheFile.addFileExists({filename: target, state: result}); + } + return result; + } + + writeFile(rawTo: string, data: string | Uint8Array) { + const to = safeRelativePath(rawTo); + const fullTo = path.join(this.distRoot, to); + + fs.mkdirSync(path.dirname(fullTo), {recursive: true}); + fs.writeFileSync(fullTo, data); + if (this.cacheFile) { + this.cacheFile.addWriteFile(to, data); + } + } + + writeFileAsync(rawTo: string, data: string | Uint8Array) { + const to = safeRelativePath(rawTo); + + this.asyncActionQueue.push({type: AsyncActionType.Write, to, data}); + } + + getFileVars(rawTarget: string) { + const target = safeRelativePath(rawTarget); + + const {vars, varsHashList} = getVarsPerFileWithHash(target); + if (this.cacheFile) { + this.cacheFile.addFileVarsDep(target, varsHashList); + } + return vars; + } + + executeActions() { + const {asyncActionQueue} = this; + + asyncActionQueue.splice(0).forEach((action) => { + switch (action.type) { + case AsyncActionType.Copy: { + const {from, to} = action; + const fullFrom = path.join(this.root, from); + const fullTo = path.join(this.distRoot, to); + + fs.mkdirSync(path.dirname(fullTo), {recursive: true}); + fs.copyFileSync(fullFrom, fullTo); + if (this.cacheFile) { + this.cacheFile.addCopyFile({from, to}); + } + break; + } + case AsyncActionType.Write: { + const {to, data} = action; + const fullTo = path.join(this.distRoot, to); + + fs.mkdirSync(path.dirname(fullTo), {recursive: true}); + fs.writeFileSync(fullTo, data); + if (this.cacheFile) { + this.cacheFile.addWriteFile(to, data); + } + break; + } + } + }); + } + + async executeActionsAsync() { + const {asyncActionQueue} = this; + + await mapLimit(asyncActionQueue.splice(0), CUNCURRENCY, asyncify(async (action: AsyncAction) => { + switch (action.type) { + case AsyncActionType.Copy: { + const {from, to} = action; + const fullFrom = path.join(this.root, from); + const fullTo = path.join(this.distRoot, to); + + await fs.promises.mkdir(path.dirname(fullTo), {recursive: true}); + await fs.promises.copyFile(fullFrom, fullTo); + if (this.cacheFile) { + this.cacheFile.addCopyFile({from, to}); + } + break; + } + case AsyncActionType.Write: { + const {to, data} = action; + const fullTo = path.join(this.distRoot, to); + + await fs.promises.mkdir(path.dirname(fullTo), {recursive: true}); + await fs.promises.writeFile(fullTo, data); + if (this.cacheFile) { + this.cacheFile.addWriteFile(to, data); + } + break; + } + } + })); + } +} + +export default PluginEnvApi; diff --git a/src/utils/presets.ts b/src/utils/presets.ts index c2208c89..66e991b3 100644 --- a/src/utils/presets.ts +++ b/src/utils/presets.ts @@ -1,6 +1,8 @@ import {dirname, relative, resolve} from 'path'; +import {isEmpty} from 'lodash'; import {ArgvService, PresetService} from '../services'; +import {CacheService} from '../services/cache'; export function getVarsPerFile(filePath: string): Record { const {vars: argVars} = ArgvService.getConfig(); @@ -11,6 +13,19 @@ export function getVarsPerFile(filePath: string): Record { }; } +export function getVarsPerFileWithHash(filePath: string): {varsHashList: string[]; vars: Record} { + const {vars: argVars} = ArgvService.getConfig(); + + const {vars, varsHashList} = PresetService.getWithHash(dirname(filePath)); + + if (!isEmpty(argVars)) { + varsHashList.push(CacheService.getObjHash(argVars)); + Object.assign(vars, argVars); + } + + return {vars, varsHashList}; +} + export function getVarsPerRelativeFile(filePath: string): Record { const {input} = ArgvService.getConfig(); const root = resolve(input); @@ -18,3 +33,4 @@ export function getVarsPerRelativeFile(filePath: string): Record return getVarsPerFile(relativeFilePath); } + diff --git a/src/workers/linter/index.ts b/src/workers/linter/index.ts index 969ee487..93edd920 100644 --- a/src/workers/linter/index.ts +++ b/src/workers/linter/index.ts @@ -8,6 +8,7 @@ import {TocServiceData} from '../../services/tocs'; import {PresetStorage} from '../../services/preset'; import {YfmArgv} from '../../models'; import {lintPage} from '../../resolvers'; +import {cacheServiceLint} from '../../services/cache'; let processedPages = new Subject(); @@ -23,6 +24,8 @@ async function run({argvConfig, presetStorage, navigationPaths}: ProcessLinterWo TocService.setNavigationPaths(navigationPaths); PluginService.setPlugins(); + cacheServiceLint.init(argvConfig.cache, argvConfig.cacheDir); + TocService.getNavigationPaths().forEach((pathToFile) => { lintPage({ inputPath: pathToFile, From 2a7049a6ddd26766275ee515d4f55a0301d3acd1 Mon Sep 17 00:00:00 2001 From: Anton Vikulov Date: Thu, 19 Oct 2023 19:23:19 +0500 Subject: [PATCH 2/2] fix(cache): fix --- src/models.ts | 19 ++-- src/resolvers/lintPage.ts | 1 - src/resolvers/md2html.ts | 5 +- src/resolvers/md2md.ts | 49 +++++---- src/services/cache/cache.ts | 45 +++++++-- src/services/cache/cacheFile.ts | 169 ++++++++++++++++++++------------ src/services/utils.ts | 2 +- src/utils/pluginEnvApi.ts | 62 ++++++------ src/utils/presets.ts | 6 +- 9 files changed, 224 insertions(+), 134 deletions(-) diff --git a/src/models.ts b/src/models.ts index 7d09e209..afd2bd42 100644 --- a/src/models.ts +++ b/src/models.ts @@ -6,7 +6,7 @@ import {Lang, Stage, IncludeMode, ResourceType} from './constants'; import {ChangelogItem} from '@diplodoc/transform/lib/plugins/changelog/types'; import PluginEnvApi from './utils/pluginEnvApi'; -export type VarsPreset = 'internal'|'external'; +export type VarsPreset = 'internal' | 'external'; export type YfmPreset = Record; // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -14,7 +14,10 @@ export type Metadata = Record; export type ExternalAuthorByPathFunction = (path: string) => Contributor | null; export type ContributorsByPathFunction = (path: string) => Promise; -export type NestedContributorsForPathFunction = (path: string, nestedContributors: Contributors) => void; +export type NestedContributorsForPathFunction = ( + path: string, + nestedContributors: Contributors, +) => void; export type UserByLoginFunction = (login: string) => Promise; export type CollectionOfPluginsFunction = (output: string, options: PluginOptions) => string; @@ -99,7 +102,7 @@ export type YfmTocIncluder = { export const includersNames = ['sourcedocs', 'openapi', 'generic', 'unarchive'] as const; -export type YfmTocIncluderName = typeof includersNames[number]; +export type YfmTocIncluderName = (typeof includersNames)[number]; // eslint-disable-next-line @typescript-eslint/no-explicit-any export type Includer = { @@ -107,7 +110,9 @@ export type Includer = { includerFunction: IncluderFunction; }; -export type IncluderFunction = (args: IncluderFunctionParams) => Promise; +export type IncluderFunction = ( + args: IncluderFunctionParams, +) => Promise; export type IncluderFunctionParams = { // item that contains include that uses includer @@ -151,7 +156,7 @@ export interface LeadingPageLinks extends Filter { } export interface Filter { - when?: boolean|string; + when?: boolean | string; [key: string]: unknown; } @@ -243,8 +248,8 @@ export type Resources = { }; export type YandexCloudTranslateGlossaryPair = { - sourceText: string; - translatedText: string; + sourceText: string; + translatedText: string; }; export type CommitInfo = { diff --git a/src/resolvers/lintPage.ts b/src/resolvers/lintPage.ts index e223f716..f34c563a 100644 --- a/src/resolvers/lintPage.ts +++ b/src/resolvers/lintPage.ts @@ -93,7 +93,6 @@ function MdFileLinter(content: string, lintOptions: FileTransformOptions): void const pluginOptions: PluginOptions = { ...options, vars, - varsHashList, root, path: localPath, lintMarkdown, // Should pass the function for linting included files diff --git a/src/resolvers/md2html.ts b/src/resolvers/md2html.ts index 0e68d60d..01470137 100644 --- a/src/resolvers/md2html.ts +++ b/src/resolvers/md2html.ts @@ -131,7 +131,10 @@ export function liquidMd2Html(input: string, vars: Record, path }); } -async function MdFileTransformer(content: string, transformOptions: FileTransformOptions): Promise { +async function MdFileTransformer( + content: string, + transformOptions: FileTransformOptions, +): Promise { const {input, ...options} = ArgvService.getConfig(); const {path: filePath} = transformOptions; diff --git a/src/resolvers/md2md.ts b/src/resolvers/md2md.ts index 827adad1..785a2187 100644 --- a/src/resolvers/md2md.ts +++ b/src/resolvers/md2md.ts @@ -22,7 +22,11 @@ export async function resolveMd2Md(options: ResolveMd2MdOptions): Promise const rawContent = fs.readFileSync(resolvedInputPath, 'utf8'); - const cacheKey = cacheServiceBuildMd.getHashKey({filename: inputPath, content: rawContent, varsHashList}); + const cacheKey = cacheServiceBuildMd.getHashKey({ + filename: inputPath, + content: rawContent, + varsHashList, + }); let result: string; let changelogs: ChangelogItem[]; @@ -31,15 +35,15 @@ export async function resolveMd2Md(options: ResolveMd2MdOptions): Promise if (cachedFile) { logger.info(inputPath, CACHE_HIT); await cachedFile.extractCacheAsync(); - const results = cachedFile.getResult<{result: string; changelogs: ChangelogItem[]; logs: Record}>(); + const results = cachedFile.getResult<{ + result: string; + changelogs: ChangelogItem[]; + logs: Record; + }>(); result = results.result; changelogs = results.changelogs; } else { - const content = await getContentWithUpdatedMetadata( - rawContent, - metadata, - vars.__system, - ); + const content = await getContentWithUpdatedMetadata(rawContent, metadata, vars.__system); const cacheFile = cacheServiceBuildMd.createFile(cacheKey); const envApi = PluginEnvApi.create({ @@ -90,7 +94,10 @@ export async function resolveMd2Md(options: ResolveMd2MdOptions): Promise changesName = Math.trunc(new Date(changesDate).getTime() / 1000); } if (!changesName) { - changesName = `name-${mdFilename}-${String(changelogs.length - index).padStart(3, '0')}`; + changesName = `name-${mdFilename}-${String(changelogs.length - index).padStart( + 3, + '0', + )}`; } const changesPath = join(outputDir, `changes-${changesName}.json`); @@ -99,10 +106,13 @@ export async function resolveMd2Md(options: ResolveMd2MdOptions): Promise throw new Error(`Changelog ${changesPath} already exists!`); } - fs.writeFileSync(changesPath, JSON.stringify({ - ...changes, - source: mdFilename, - })); + fs.writeFileSync( + changesPath, + JSON.stringify({ + ...changes, + source: mdFilename, + }), + ); }); } @@ -116,7 +126,10 @@ function copyFile(targetPath: string, targetDestPath: string, options?: PluginOp const {envApi} = options; let sourceIncludeContent: string; if (envApi) { - sourceIncludeContent = envApi.readFile(relative(envApi.root, targetPath), 'utf-8') as string; + sourceIncludeContent = envApi.readFile( + relative(envApi.root, targetPath), + 'utf-8', + ) as string; } else { sourceIncludeContent = fs.readFileSync(targetPath, 'utf8'); } @@ -136,11 +149,7 @@ function copyFile(targetPath: string, targetDestPath: string, options?: PluginOp } export function liquidMd2Md(input: string, vars: Record, path: string) { - const { - applyPresets, - resolveConditions, - conditionsInCode, - } = ArgvService.getConfig(); + const {applyPresets, resolveConditions, conditionsInCode} = ArgvService.getConfig(); return liquid(input, vars, path, { conditions: resolveConditions, @@ -152,9 +161,7 @@ export function liquidMd2Md(input: string, vars: Record, path: } function transformMd2Md(input: string, options: PluginOptions) { - const { - disableLiquid, - } = ArgvService.getConfig(); + const {disableLiquid} = ArgvService.getConfig(); const { vars = {}, path, diff --git a/src/services/cache/cache.ts b/src/services/cache/cache.ts index 35a5c6a8..7969e4a7 100644 --- a/src/services/cache/cache.ts +++ b/src/services/cache/cache.ts @@ -51,17 +51,36 @@ export class CacheService { if (!argsHash) { const args = ArgvService.getConfig(); const staticArgs = pick(args, [ - 'varsPreset', 'ignore', 'outputFormat', 'allowHTML', 'vars', 'applyPresets', - 'resolveConditions', 'conditionsInCode', 'disableLiquid', 'strict', 'ignoreStage', 'singlePage', - 'removeHiddenTocItems', 'connector', 'lang', 'lintConfig', 'resources', 'addSystemMeta', - 'contributors', 'ignoreAuthorPatterns', 'allowCustomResources', + 'varsPreset', + 'ignore', + 'outputFormat', + 'allowHTML', + 'vars', + 'applyPresets', + 'resolveConditions', + 'conditionsInCode', + 'disableLiquid', + 'strict', + 'ignoreStage', + 'singlePage', + 'removeHiddenTocItems', + 'connector', + 'lang', + 'lintConfig', + 'resources', + 'addSystemMeta', + 'contributors', + 'ignoreAuthorPatterns', + 'allowCustomResources', ]); argsHash = CacheService.getHash(JSON.stringify(staticArgs)); } const contentHash = CacheService.getHash(content); return { key: this.getHash(JSON.stringify({filename, contentHash, varsHashList, argsHash})), - filename, contentHash, varsHashList, + filename, + contentHash, + varsHashList, }; } @@ -79,7 +98,9 @@ export class CacheService { } checkFile({key}: HashKey) { - if (this.disabled) { return; } + if (this.disabled) { + return; + } const filepath = this.getCacheFilepath(key); if (!fs.existsSync(filepath)) { @@ -97,7 +118,9 @@ export class CacheService { } async checkFileAsync({key}: HashKey) { - if (this.disabled) { return; } + if (this.disabled) { + return; + } const filepath = this.getCacheFilepath(key); const exists = await fileExists(filepath); @@ -121,7 +144,9 @@ export class CacheService { } addFile(file: CacheFile) { - if (this.disabled) { return; } + if (this.disabled) { + return; + } const filepath = this.getCacheFilepath(file.getKey()); const place = path.dirname(filepath); @@ -134,7 +159,9 @@ export class CacheService { } async addFileAsync(file: CacheFile) { - if (this.disabled) { return; } + if (this.disabled) { + return; + } const filepath = this.getCacheFilepath(file.getKey()); const place = path.dirname(filepath); diff --git a/src/services/cache/cacheFile.ts b/src/services/cache/cacheFile.ts index 909e7dbb..21efddfe 100644 --- a/src/services/cache/cacheFile.ts +++ b/src/services/cache/cacheFile.ts @@ -36,7 +36,9 @@ class CacheFile { } use() { - if (this.disabled) { return undefined; } + if (this.disabled) { + return undefined; + } return this; } @@ -56,35 +58,51 @@ class CacheFile { const {fileDeps, copiedFiles, existsFiles, fileVarsDeps} = this.data; for (const filename in fileVarsDeps) { - if (!Object.hasOwnProperty.call(fileVarsDeps, filename)) { continue; } + if (!Object.hasOwnProperty.call(fileVarsDeps, filename)) { + continue; + } const reqVarsHashList = fileVarsDeps[filename]; const {varsHashList} = getVarsPerFileWithHash(filename); - if (!isEqual(varsHashList, reqVarsHashList)) { return; } + if (!isEqual(varsHashList, reqVarsHashList)) { + return; + } } for (const to in copiedFiles) { - if (!Object.hasOwnProperty.call(copiedFiles, to)) { continue; } + if (!Object.hasOwnProperty.call(copiedFiles, to)) { + continue; + } const from = copiedFiles[to]; const filepath = path.join(root, from); - if (!fs.existsSync(filepath)) { return; } + if (!fs.existsSync(filepath)) { + return; + } } for (const filename in existsFiles) { - if (!Object.hasOwnProperty.call(existsFiles, filename)) { continue; } + if (!Object.hasOwnProperty.call(existsFiles, filename)) { + continue; + } const reqState = existsFiles[filename]; const filepath = path.join(root, filename); - if (fs.existsSync(filepath) !== reqState) { return; } + if (fs.existsSync(filepath) !== reqState) { + return; + } } for (const filename in fileDeps) { - if (!Object.hasOwnProperty.call(fileDeps, filename)) { continue; } + if (!Object.hasOwnProperty.call(fileDeps, filename)) { + continue; + } const reqContentHash = fileDeps[filename]; const filepath = path.join(root, filename); - if (!fs.existsSync(filepath)) { return; } + if (!fs.existsSync(filepath)) { + return; + } const contentHash = CacheService.getFileHash(filepath); if (contentHash !== reqContentHash) { return; @@ -102,42 +120,58 @@ class CacheFile { const {fileDeps, copiedFiles, existsFiles, fileVarsDeps} = this.data; for (const filename in fileVarsDeps) { - if (!Object.hasOwnProperty.call(fileVarsDeps, filename)) { continue; } + if (!Object.hasOwnProperty.call(fileVarsDeps, filename)) { + continue; + } const reqVarsHashList = fileVarsDeps[filename]; const {varsHashList} = getVarsPerFileWithHash(filename); - if (!isEqual(varsHashList, reqVarsHashList)) { return; } + if (!isEqual(varsHashList, reqVarsHashList)) { + return; + } } const tasks: (() => Promise)[] = []; - Object.entries(copiedFiles).forEach(([, from]) => tasks.push(asyncify(async () => { - const filepath = path.join(root, from); - const isExists = await fileExists(filepath); - if (!isExists) { - throw new Error('Aborted'); - } - }))); - - Object.entries(existsFiles).forEach(([filename, reqState]) => tasks.push(asyncify(async () => { - const filepath = path.join(root, filename as string); - const isExists = await fileExists(filepath); - if (isExists !== reqState as boolean) { - throw new Error('Aborted'); - } - }))); - - Object.entries(fileDeps).forEach(([filename, reqContentHash]) => tasks.push(asyncify(async () => { - const filepath = path.join(root, filename); - const isExists = await fileExists(filepath); - if (!isExists) { - throw new Error('Aborted'); - } - const contentHash = await CacheService.getFileHashAsync(filepath); - if (contentHash !== reqContentHash) { - throw new Error('Aborted'); - } - }))); + Object.entries(copiedFiles).forEach(([, from]) => + tasks.push( + asyncify(async () => { + const filepath = path.join(root, from); + const isExists = await fileExists(filepath); + if (!isExists) { + throw new Error('Aborted'); + } + }), + ), + ); + + Object.entries(existsFiles).forEach(([filename, reqState]) => + tasks.push( + asyncify(async () => { + const filepath = path.join(root, filename as string); + const isExists = await fileExists(filepath); + if (isExists !== (reqState as boolean)) { + throw new Error('Aborted'); + } + }), + ), + ); + + Object.entries(fileDeps).forEach(([filename, reqContentHash]) => + tasks.push( + asyncify(async () => { + const filepath = path.join(root, filename); + const isExists = await fileExists(filepath); + if (!isExists) { + throw new Error('Aborted'); + } + const contentHash = await CacheService.getFileHashAsync(filepath); + if (contentHash !== reqContentHash) { + throw new Error('Aborted'); + } + }), + ), + ); try { await parallelLimit(tasks, CUNCURRENCY); @@ -185,10 +219,7 @@ class CacheFile { } async extractCacheAsync() { - await Promise.all([ - this.writeDataAsync(), - this.copyFilesAsync(), - ]); + await Promise.all([this.writeDataAsync(), this.copyFilesAsync()]); } extractCache() { @@ -216,14 +247,16 @@ class CacheFile { async writeAssetsAsync() { const {wroteFileData} = this; - const tasks = Object.entries(wroteFileData).map(([filename, data]) => asyncify(async () => { - const fullFilename = this.getAssetFilepath(filename); - const place = path.dirname(fullFilename); - if (!existsDir.has(place)) { - await fs.promises.mkdir(place, {recursive: true}); - } - await fs.promises.writeFile(fullFilename, data); - })); + const tasks = Object.entries(wroteFileData).map(([filename, data]) => + asyncify(async () => { + const fullFilename = this.getAssetFilepath(filename); + const place = path.dirname(fullFilename); + if (!existsDir.has(place)) { + await fs.promises.mkdir(place, {recursive: true}); + } + await fs.promises.writeFile(fullFilename, data); + }), + ); await parallelLimit(tasks, CUNCURRENCY); } @@ -249,13 +282,17 @@ class CacheFile { const {wroteFiles} = this.data; - await mapLimit(Object.entries(wroteFiles), CUNCURRENCY, asyncify(async ([to, assetName]: string[]) => { - const fullFrom = this.getAssetFilepath(assetName); - const fullTo = path.join(distRoot, to); - - await fs.promises.mkdir(path.dirname(fullTo), {recursive: true}); - await fs.promises.copyFile(fullFrom, fullTo); - })); + await mapLimit( + Object.entries(wroteFiles), + CUNCURRENCY, + asyncify(async ([to, assetName]: string[]) => { + const fullFrom = this.getAssetFilepath(assetName); + const fullTo = path.join(distRoot, to); + + await fs.promises.mkdir(path.dirname(fullTo), {recursive: true}); + await fs.promises.copyFile(fullFrom, fullTo); + }), + ); } private copyFiles() { @@ -281,13 +318,17 @@ class CacheFile { const {copiedFiles} = this.data; - await mapLimit(Object.entries(copiedFiles), CUNCURRENCY, asyncify(async ([to, from]: string[]) => { - const fullFrom = path.join(root, from); - const fullTo = path.join(distRoot, to); - - await fs.promises.mkdir(path.dirname(fullTo), {recursive: true}); - await fs.promises.copyFile(fullFrom, fullTo); - })); + await mapLimit( + Object.entries(copiedFiles), + CUNCURRENCY, + asyncify(async ([to, from]: string[]) => { + const fullFrom = path.join(root, from); + const fullTo = path.join(distRoot, to); + + await fs.promises.mkdir(path.dirname(fullTo), {recursive: true}); + await fs.promises.copyFile(fullFrom, fullTo); + }), + ); } private getAssetFilepath(key: string) { diff --git a/src/services/utils.ts b/src/services/utils.ts index 7dfa1f7d..f55271ed 100644 --- a/src/services/utils.ts +++ b/src/services/utils.ts @@ -2,7 +2,7 @@ import evalExp from '@diplodoc/transform/lib/liquid/evaluation'; import {Filter, TextItems} from '../models'; import liquid from '@diplodoc/transform/lib/liquid'; import {ArgvService} from './index'; -import {Logger} from '@doc-tools/transform/src/transform/log'; +import {Logger} from '@diplodoc/transform/src/transform/log'; export interface FilterFilesOptions { resolveConditions?: boolean; diff --git a/src/utils/pluginEnvApi.ts b/src/utils/pluginEnvApi.ts index 5551fdee..ee8c080c 100644 --- a/src/utils/pluginEnvApi.ts +++ b/src/utils/pluginEnvApi.ts @@ -18,7 +18,9 @@ type WriteFileAsyncAction = {type: AsyncActionType.Write; to: string; data: stri type AsyncAction = CopyFileAsyncAction | WriteFileAsyncAction; interface PluginEnvApiProps { - root: string; distRoot: string; cacheFile?: CacheFile; + root: string; + distRoot: string; + cacheFile?: CacheFile; } class PluginEnvApi { @@ -26,9 +28,9 @@ class PluginEnvApi { return new PluginEnvApi(props); } - public readonly root: string; - public readonly distRoot: string; - public readonly cacheFile: CacheFile | undefined; + readonly root: string; + readonly distRoot: string; + readonly cacheFile: CacheFile | undefined; private readonly asyncActionQueue: AsyncAction[] = []; @@ -143,33 +145,37 @@ class PluginEnvApi { async executeActionsAsync() { const {asyncActionQueue} = this; - await mapLimit(asyncActionQueue.splice(0), CUNCURRENCY, asyncify(async (action: AsyncAction) => { - switch (action.type) { - case AsyncActionType.Copy: { - const {from, to} = action; - const fullFrom = path.join(this.root, from); - const fullTo = path.join(this.distRoot, to); - - await fs.promises.mkdir(path.dirname(fullTo), {recursive: true}); - await fs.promises.copyFile(fullFrom, fullTo); - if (this.cacheFile) { - this.cacheFile.addCopyFile({from, to}); + await mapLimit( + asyncActionQueue.splice(0), + CUNCURRENCY, + asyncify(async (action: AsyncAction) => { + switch (action.type) { + case AsyncActionType.Copy: { + const {from, to} = action; + const fullFrom = path.join(this.root, from); + const fullTo = path.join(this.distRoot, to); + + await fs.promises.mkdir(path.dirname(fullTo), {recursive: true}); + await fs.promises.copyFile(fullFrom, fullTo); + if (this.cacheFile) { + this.cacheFile.addCopyFile({from, to}); + } + break; } - break; - } - case AsyncActionType.Write: { - const {to, data} = action; - const fullTo = path.join(this.distRoot, to); - - await fs.promises.mkdir(path.dirname(fullTo), {recursive: true}); - await fs.promises.writeFile(fullTo, data); - if (this.cacheFile) { - this.cacheFile.addWriteFile(to, data); + case AsyncActionType.Write: { + const {to, data} = action; + const fullTo = path.join(this.distRoot, to); + + await fs.promises.mkdir(path.dirname(fullTo), {recursive: true}); + await fs.promises.writeFile(fullTo, data); + if (this.cacheFile) { + this.cacheFile.addWriteFile(to, data); + } + break; } - break; } - } - })); + }), + ); } } diff --git a/src/utils/presets.ts b/src/utils/presets.ts index 66e991b3..5bdde32f 100644 --- a/src/utils/presets.ts +++ b/src/utils/presets.ts @@ -13,7 +13,10 @@ export function getVarsPerFile(filePath: string): Record { }; } -export function getVarsPerFileWithHash(filePath: string): {varsHashList: string[]; vars: Record} { +export function getVarsPerFileWithHash(filePath: string): { + varsHashList: string[]; + vars: Record; +} { const {vars: argVars} = ArgvService.getConfig(); const {vars, varsHashList} = PresetService.getWithHash(dirname(filePath)); @@ -33,4 +36,3 @@ export function getVarsPerRelativeFile(filePath: string): Record return getVarsPerFile(relativeFilePath); } -