From e343cc0e15234e3b7904d6f33ff05f19f3d82f22 Mon Sep 17 00:00:00 2001 From: Max Duval Date: Mon, 24 Jun 2024 14:48:51 +0100 Subject: [PATCH] Format tools directory (#27250) prepare for ESM migration by reducing the git difference: - default export on separate line - multi-line task arrays - explicit config paths - chore: update hash of eslint plugin Co-authored-by: Ravi <7014230+arelra@users.noreply.github.com> --- tools/__tasks__/compile/conf/clean.js | 11 +- tools/__tasks__/compile/conf/copy.js | 56 +++-- tools/__tasks__/compile/conf/index.js | 12 +- tools/__tasks__/compile/css/clean.js | 16 +- tools/__tasks__/compile/css/index.dev.js | 19 +- tools/__tasks__/compile/css/index.js | 19 +- tools/__tasks__/compile/css/mkdir.js | 10 +- tools/__tasks__/compile/css/sass.js | 82 +++--- tools/__tasks__/compile/data/amp.js | 22 +- tools/__tasks__/compile/data/clean.js | 16 +- tools/__tasks__/compile/data/download.js | 107 ++++---- tools/__tasks__/compile/data/index.dev.js | 13 +- tools/__tasks__/compile/data/index.js | 12 +- tools/__tasks__/compile/data/index.watch.js | 13 +- tools/__tasks__/compile/hash/clean.js | 10 +- tools/__tasks__/compile/hash/index.js | 235 ++++++++++-------- tools/__tasks__/compile/images/clean.js | 18 +- tools/__tasks__/compile/images/copy.js | 20 +- tools/__tasks__/compile/images/icons.js | 167 ++++++------- tools/__tasks__/compile/images/index.js | 19 +- tools/__tasks__/compile/images/svg.js | 81 +++--- tools/__tasks__/compile/index.dev.js | 21 +- tools/__tasks__/compile/index.js | 23 +- tools/__tasks__/compile/index.watch.js | 21 +- tools/__tasks__/compile/inline-svgs/index.js | 85 ++++--- .../compile/javascript/bundle-polyfills.js | 91 +++---- tools/__tasks__/compile/javascript/clean.js | 16 +- tools/__tasks__/compile/javascript/copy.js | 42 ++-- .../compile/javascript/index.atoms.js | 17 +- .../__tasks__/compile/javascript/index.dev.js | 25 +- tools/__tasks__/compile/javascript/index.js | 23 +- .../compile/javascript/index.watch.js | 19 +- .../compile/javascript/webpack-atoms.js | 43 ++-- .../compile/javascript/webpack.dev.js | 43 ++-- tools/__tasks__/compile/javascript/webpack.js | 43 ++-- tools/__tasks__/config.js | 22 +- tools/__tasks__/lib/check-network.js | 18 +- tools/__tasks__/lib/get-changed-files.js | 52 ++-- tools/__tasks__/test/index.js | 14 +- tools/__tasks__/test/javascript/index.js | 61 ++--- tools/__tasks__/validate-head/index.js | 14 +- .../__tasks__/validate-head/javascript-fix.js | 26 +- tools/__tasks__/validate-head/javascript.js | 135 +++++----- tools/__tasks__/validate-head/sass.js | 120 ++++----- .../validate/check-for-disallowed-strings.js | 96 +++---- tools/__tasks__/validate/index.js | 21 +- tools/__tasks__/validate/javascript-fix.js | 59 +++-- tools/__tasks__/validate/javascript.js | 60 ++--- tools/__tasks__/validate/sass.js | 8 +- tools/__tasks__/validate/scalafmt.js | 30 +-- tools/__tasks__/validate/typescript.js | 40 +-- tools/asset-monitor/cloudwatch.js | 221 ++++++++-------- tools/asset-monitor/index.js | 102 ++++---- tools/compile-css.js | 140 ++++++----- .../eslint-plugin-guardian-frontend/README.md | 10 +- .../__tests__/exports-last.js | 32 +-- .../__tests__/global-config.js | 40 +-- .../__tests__/no-default-export.js | 28 +-- .../__tests__/no-direct-access-config.js | 18 +- .../no-multiple-classlist-parameter.js | 30 +-- .../eslint-plugin-guardian-frontend/index.js | 2 +- .../package.json | 14 +- .../rules/exports-last.js | 74 +++--- .../rules/global-config.js | 54 ++-- .../rules/no-default-export.js | 28 +-- .../rules/no-direct-access-config.js | 70 +++--- .../rules/no-multiple-classlist-parameters.js | 64 +++-- tools/sync-githooks.js | 4 +- tools/task-runner/README.md | 61 +++-- .../task-runner/run-task-verbose-formater.js | 100 ++++---- tools/webpack-progress-reporter.js | 14 +- yarn.lock | 4 +- 72 files changed, 1755 insertions(+), 1601 deletions(-) diff --git a/tools/__tasks__/compile/conf/clean.js b/tools/__tasks__/compile/conf/clean.js index 73acb96ae19f..f34d9ecdd10a 100644 --- a/tools/__tasks__/compile/conf/clean.js +++ b/tools/__tasks__/compile/conf/clean.js @@ -1,9 +1,12 @@ const path = require('path'); const rimraf = require('rimraf'); -const { root } = require('../../config').paths; +const { paths } = require('../../config'); -module.exports = { - description: 'Clear template rendering artefacts', - task: () => rimraf.sync(path.resolve(root, 'common', 'conf', 'assets')), +const task = { + description: 'Clear template rendering artefacts', + task: () => + rimraf.sync(path.resolve(paths.root, 'common', 'conf', 'assets')), }; + +module.exports = task; diff --git a/tools/__tasks__/compile/conf/copy.js b/tools/__tasks__/compile/conf/copy.js index ccd4d506f341..5760625f396f 100644 --- a/tools/__tasks__/compile/conf/copy.js +++ b/tools/__tasks__/compile/conf/copy.js @@ -1,32 +1,34 @@ const path = require('path'); const cpy = require('cpy'); -const { conf, target, hash, src } = require('../../config').paths; +const { paths } = require('../../config'); -module.exports = { - description: 'Copy assets', - task: () => - Promise.all([ - cpy('curl.js', conf, { - cwd: path.resolve( - path.dirname(require.resolve('curl')), - '..', - 'dist', - 'curl-with-js-and-domReady' - ), - }), - cpy( - ['**/head*.css', 'inline/**/*.css'], - path.resolve(conf, 'inline-stylesheets'), - { - cwd: path.resolve(target, 'stylesheets'), - } - ), - cpy(['**/assets.map'], path.resolve(conf), { - cwd: path.resolve(hash, 'assets'), - }), - cpy(['polyfill.io'], path.resolve(conf), { - cwd: path.resolve(src, 'javascripts'), - }), - ]), +const task = { + description: 'Copy assets', + task: () => + Promise.all([ + cpy('curl.js', paths.conf, { + cwd: path.resolve( + path.dirname(require.resolve('curl')), + '..', + 'dist', + 'curl-with-js-and-domReady', + ), + }), + cpy( + ['**/head*.css', 'inline/**/*.css'], + path.resolve(paths.conf, 'inline-stylesheets'), + { + cwd: path.resolve(paths.target, 'stylesheets'), + }, + ), + cpy(['**/assets.map'], path.resolve(paths.conf), { + cwd: path.resolve(paths.hash, 'assets'), + }), + cpy(['polyfill.io'], path.resolve(paths.conf), { + cwd: path.resolve(paths.src, 'javascripts'), + }), + ]), }; + +module.exports = task; diff --git a/tools/__tasks__/compile/conf/index.js b/tools/__tasks__/compile/conf/index.js index c6a1f6c9b880..9e2898d5650c 100644 --- a/tools/__tasks__/compile/conf/index.js +++ b/tools/__tasks__/compile/conf/index.js @@ -1,4 +1,10 @@ -module.exports = { - description: 'Compile assets for template rendering in Play', - task: [require('./copy'), require('../inline-svgs')], +const task = { + description: 'Compile assets for template rendering in Play', + task: [ + // prettier: multi-line + require('./copy'), + require('../inline-svgs'), + ], }; + +module.exports = task; diff --git a/tools/__tasks__/compile/css/clean.js b/tools/__tasks__/compile/css/clean.js index 63176b1457da..c69783a553c1 100644 --- a/tools/__tasks__/compile/css/clean.js +++ b/tools/__tasks__/compile/css/clean.js @@ -1,12 +1,14 @@ const path = require('path'); const rimraf = require('rimraf'); -const { target, hash } = require('../../config').paths; +const { paths } = require('../../config'); -module.exports = { - description: 'Clear CSS build artefacts', - task: () => { - rimraf.sync(path.resolve(target, 'stylesheets')); - rimraf.sync(path.resolve(hash, 'stylesheets')); - }, +const task = { + description: 'Clear CSS build artefacts', + task: () => { + rimraf.sync(path.resolve(paths.target, 'stylesheets')); + rimraf.sync(path.resolve(paths.hash, 'stylesheets')); + }, }; + +module.exports = task; diff --git a/tools/__tasks__/compile/css/index.dev.js b/tools/__tasks__/compile/css/index.dev.js index 0cf180adad82..ab0f461335ee 100644 --- a/tools/__tasks__/compile/css/index.dev.js +++ b/tools/__tasks__/compile/css/index.dev.js @@ -1,9 +1,12 @@ -module.exports = { - description: 'Compile CSS', - task: [ - require('./clean'), - require('./mkdir'), - require('../images'), - require('./sass'), - ], +const task = { + description: 'Compile CSS', + task: [ + // prettier: multi-line + require('./clean'), + require('./mkdir'), + require('../images'), + require('./sass'), + ], }; + +module.exports = task; diff --git a/tools/__tasks__/compile/css/index.js b/tools/__tasks__/compile/css/index.js index 0cf180adad82..ab0f461335ee 100644 --- a/tools/__tasks__/compile/css/index.js +++ b/tools/__tasks__/compile/css/index.js @@ -1,9 +1,12 @@ -module.exports = { - description: 'Compile CSS', - task: [ - require('./clean'), - require('./mkdir'), - require('../images'), - require('./sass'), - ], +const task = { + description: 'Compile CSS', + task: [ + // prettier: multi-line + require('./clean'), + require('./mkdir'), + require('../images'), + require('./sass'), + ], }; + +module.exports = task; diff --git a/tools/__tasks__/compile/css/mkdir.js b/tools/__tasks__/compile/css/mkdir.js index daf8b57e563c..06d26a0b3e27 100644 --- a/tools/__tasks__/compile/css/mkdir.js +++ b/tools/__tasks__/compile/css/mkdir.js @@ -1,7 +1,9 @@ const mkdirp = require('mkdirp'); -const { target } = require('../../config').paths; +const { paths } = require('../../config'); -module.exports = { - description: 'Create CSS target directory', - task: () => mkdirp.sync(`${target}/stylesheets`), +const task = { + description: 'Create CSS target directory', + task: () => mkdirp.sync(`${paths.target}/stylesheets`), }; + +module.exports = task; diff --git a/tools/__tasks__/compile/css/sass.js b/tools/__tasks__/compile/css/sass.js index 18da5a6719e9..6eca5b1d1a24 100644 --- a/tools/__tasks__/compile/css/sass.js +++ b/tools/__tasks__/compile/css/sass.js @@ -1,43 +1,45 @@ const compile = require('../../../compile-css'); -module.exports = { - description: 'Compile Sass', - task: [ - { - description: 'Old IE', - task: () => - compile('old-ie.*.scss', { - browsers: 'Explorer 8', - remify: false, - }), - }, - { - description: 'IE9', - task: () => - compile('ie9.*.scss', { - browsers: 'Explorer 9', - }), - }, - { - description: 'Email', - task: () => - compile('head.email-{article,front}.scss', { - remify: false, - }), - }, - { - description: 'Modern', - task: () => - compile('!(_|ie9|old-ie|*email-article|*email-front)*.scss'), - }, - { - description: 'Inline', - task: () => compile('inline/*.scss'), - }, - { - description: 'Atoms', - task: () => compile('atoms/*.scss'), - }, - ], - concurrent: true, +const task = { + description: 'Compile Sass', + task: [ + { + description: 'Old IE', + task: () => + compile('old-ie.*.scss', { + browsers: 'Explorer 8', + remify: false, + }), + }, + { + description: 'IE9', + task: () => + compile('ie9.*.scss', { + browsers: 'Explorer 9', + }), + }, + { + description: 'Email', + task: () => + compile('head.email-{article,front}.scss', { + remify: false, + }), + }, + { + description: 'Modern', + task: () => + compile('!(_|ie9|old-ie|*email-article|*email-front)*.scss'), + }, + { + description: 'Inline', + task: () => compile('inline/*.scss'), + }, + { + description: 'Atoms', + task: () => compile('atoms/*.scss'), + }, + ], + concurrent: true, }; + +module.exports = task; diff --git a/tools/__tasks__/compile/data/amp.js b/tools/__tasks__/compile/data/amp.js index 2be09a387c9c..cf1fd9ed14a8 100644 --- a/tools/__tasks__/compile/data/amp.js +++ b/tools/__tasks__/compile/data/amp.js @@ -1,20 +1,22 @@ const path = require('path'); const cpy = require('cpy'); -const { vendor, target } = require('../../config').paths; +const { paths } = require('../../config'); // Source -const ampIframeHtml = path.join(vendor, 'data/amp-iframe.html'); +const ampIframeHtml = path.join(paths.vendor, 'data/amp-iframe.html'); // Destinations // The static assets -const staticDir = path.resolve(target, 'data', 'vendor'); +const staticDir = path.resolve(paths.target, 'data', 'vendor'); -module.exports = { - description: 'Copy AMP iframe HTML', - task: () => - cpy(ampIframeHtml, staticDir, { - parents: false, - nodir: false, - }), +const task = { + description: 'Copy AMP iframe HTML', + task: () => + cpy(ampIframeHtml, staticDir, { + parents: false, + nodir: false, + }), }; + +module.exports = task; diff --git a/tools/__tasks__/compile/data/clean.js b/tools/__tasks__/compile/data/clean.js index 1625a718ef65..442111a26a2a 100644 --- a/tools/__tasks__/compile/data/clean.js +++ b/tools/__tasks__/compile/data/clean.js @@ -1,12 +1,14 @@ const path = require('path'); const rimraf = require('rimraf'); -const { target, hash } = require('../../config').paths; +const { paths } = require('../../config'); -module.exports = { - description: 'Clear Data build artefacts', - task: () => { - rimraf.sync(path.resolve(target, 'data')); - rimraf.sync(path.resolve(hash, 'data')); - }, +const task = { + description: 'Clear Data build artefacts', + task: () => { + rimraf.sync(path.resolve(paths.target, 'data')); + rimraf.sync(path.resolve(paths.hash, 'data')); + }, }; + +module.exports = task; diff --git a/tools/__tasks__/compile/data/download.js b/tools/__tasks__/compile/data/download.js index 29d6c8e8ad39..a6cc7a9590db 100644 --- a/tools/__tasks__/compile/data/download.js +++ b/tools/__tasks__/compile/data/download.js @@ -5,65 +5,70 @@ const chalk = require('chalk'); const warning = chalk.keyword('orange'); -const { vendor } = require('../../config').paths; +const { paths } = require('../../config'); // Sources const vendorListOfficialUrl = 'https://vendorlist.consensu.org/vendorlist.json'; const fallbackVendorListJSON = path.join( - vendor, - 'data/cmp_fallback_vendorlist.json' + paths.vendor, + 'data/cmp_fallback_vendorlist.json', ); // Destination -const currentVendorListJSON = path.join(vendor, 'data/cmp_vendorlist.json'); +const currentVendorListJSON = path.join( + paths.vendor, + 'data/cmp_vendorlist.json', +); -module.exports = { - description: 'Downloading data files', - task: () => - new Promise((resolve, reject) => { - request(vendorListOfficialUrl, (error, response, body) => { - if (error) { - return reject( - new Error( - `Error GETting '${vendorListOfficialUrl}': ${response}` - ) - ); - } - // Do some basic sanity check on the body - let vendorList; - try { - vendorList = JSON.parse(body); - } catch (JSONerr) { - console.error( - warning( - `Body from GETting '${vendorListOfficialUrl}' is not valid JSON` - ) - ); - return reject(JSONerr); - } +const task = { + description: 'Downloading data files', + task: () => + new Promise((resolve, reject) => { + request(vendorListOfficialUrl, (error, response, body) => { + if (error) { + return reject( + new Error( + `Error GETting '${vendorListOfficialUrl}': ${response}`, + ), + ); + } + // Do some basic sanity check on the body + let vendorList; + try { + vendorList = JSON.parse(body); + } catch (JSONerr) { + console.error( + warning( + `Body from GETting '${vendorListOfficialUrl}' is not valid JSON`, + ), + ); + return reject(JSONerr); + } - if (!vendorList.vendorListVersion) { - console.error( - warning( - `Body from GETting '${vendorListOfficialUrl}' does not look like a vendorList` - ) - ); - return reject(new Error('Bad vendor list format')); - } + if (!vendorList.vendorListVersion) { + console.error( + warning( + `Body from GETting '${vendorListOfficialUrl}' does not look like a vendorList`, + ), + ); + return reject(new Error('Bad vendor list format')); + } - return resolve(body); - }); - }).then( - vendorsJSON => { - fs.writeFileSync(currentVendorListJSON, vendorsJSON); - return true; - }, - () => { - console.error( - warning(`\nFalling back to ${fallbackVendorListJSON}`) - ); - fs.copyFileSync(fallbackVendorListJSON, currentVendorListJSON); - return true; - } - ), + return resolve(body); + }); + }).then( + (vendorsJSON) => { + fs.writeFileSync(currentVendorListJSON, vendorsJSON); + return true; + }, + () => { + console.error( + warning(`\nFalling back to ${fallbackVendorListJSON}`), + ); + fs.copyFileSync(fallbackVendorListJSON, currentVendorListJSON); + return true; + }, + ), }; + +module.exports = task; diff --git a/tools/__tasks__/compile/data/index.dev.js b/tools/__tasks__/compile/data/index.dev.js index ea5b321b57eb..0219475dc03e 100644 --- a/tools/__tasks__/compile/data/index.dev.js +++ b/tools/__tasks__/compile/data/index.dev.js @@ -1,4 +1,11 @@ -module.exports = { - description: 'Clean download and build data assets (dev)', - task: [require('./clean'), require('./download'), require('./amp')], +const task = { + description: 'Clean download and build data assets (dev)', + task: [ + // prettier: multi-line + require('./clean'), + require('./download'), + require('./amp'), + ], }; + +module.exports = task; diff --git a/tools/__tasks__/compile/data/index.js b/tools/__tasks__/compile/data/index.js index a670f822a555..ed94c89af481 100644 --- a/tools/__tasks__/compile/data/index.js +++ b/tools/__tasks__/compile/data/index.js @@ -1,4 +1,10 @@ -module.exports = { - description: 'Clean download and build data assets', - task: [require('./clean'), require('./download'), require('./amp')], +const task = { + description: 'Clean download and build data assets', + task: [ + // prettier: multi-line + require('./clean'), + require('./download'), + require('./amp'), + ], }; +module.exports = task; diff --git a/tools/__tasks__/compile/data/index.watch.js b/tools/__tasks__/compile/data/index.watch.js index 23b76585fc20..a24d24796b0a 100644 --- a/tools/__tasks__/compile/data/index.watch.js +++ b/tools/__tasks__/compile/data/index.watch.js @@ -1,4 +1,11 @@ -module.exports = { - description: 'Clean, download and build data assets (watch)', - task: [require('./clean'), require('./download'), require('./amp')], +const task = { + description: 'Clean, download and build data assets (watch)', + task: [ + // prettier: multi-line + require('./clean'), + require('./download'), + require('./amp'), + ], }; + +module.exports = task; diff --git a/tools/__tasks__/compile/hash/clean.js b/tools/__tasks__/compile/hash/clean.js index 2ca48ec42e30..618c7fb6f4f7 100644 --- a/tools/__tasks__/compile/hash/clean.js +++ b/tools/__tasks__/compile/hash/clean.js @@ -1,9 +1,11 @@ const path = require('path'); const rimraf = require('rimraf'); -const { hash } = require('../../config').paths; +const { paths } = require('../../config'); -module.exports = { - description: 'Clear asset hash artefacts', - task: () => rimraf.sync(path.resolve(hash, 'assets')), +const task = { + description: 'Clear asset hash artefacts', + task: () => rimraf.sync(path.resolve(paths.hash, 'assets')), }; + +module.exports = task; diff --git a/tools/__tasks__/compile/hash/index.js b/tools/__tasks__/compile/hash/index.js index 0e5fed4e5d25..6580e3d1b396 100644 --- a/tools/__tasks__/compile/hash/index.js +++ b/tools/__tasks__/compile/hash/index.js @@ -9,123 +9,138 @@ const pify = require('pify'); const writeFile = pify(fs.writeFile); -const { hash, target } = require('../../config').paths; +const { paths } = require('../../config'); -module.exports = { - description: 'Version assets', - task: [ - require('./clean'), - { - description: 'Hash assets', - task: () => { - const webpackRegex = /graun\./; - const webpackChunkRegex = /chunk/; - const sourcemapRegex = /\.map$/; +const task = { + description: 'Version assets', + task: [ + require('./clean'), + { + description: 'Hash assets', + task: () => { + const webpackRegex = /graun\./; + const webpackChunkRegex = /chunk/; + const sourcemapRegex = /\.map$/; - // create the hashed asset map for all files in target - const assetMap = glob - .sync('**/!(*.map)', { nodir: true, cwd: target }) - .reduce((map, assetPath) => { - const assetLocation = path.resolve(target, assetPath); - const hasSourceMap = fs.existsSync( - `${assetLocation}.map` - ); + // create the hashed asset map for all files in target + const assetMap = glob + .sync('**/!(*.map)', { nodir: true, cwd: paths.target }) + .reduce((map, assetPath) => { + const assetLocation = path.resolve( + paths.target, + assetPath, + ); + const hasSourceMap = fs.existsSync( + `${assetLocation}.map`, + ); - // webpack bundles come pre-hashed, so we won't hash them, just add them - if (webpackRegex.test(assetPath)) { - const sourcemap = hasSourceMap - ? { - [`${assetPath}.map`]: `${assetPath}.map`, - } - : {}; + // webpack bundles come pre-hashed, so we won't hash them, just add them + if (webpackRegex.test(assetPath)) { + const sourcemap = hasSourceMap + ? { + [`${assetPath}.map`]: `${assetPath}.map`, + } + : {}; - return Object.assign( - map, - { [assetPath]: assetPath }, - sourcemap - ); - } + return Object.assign( + map, + { [assetPath]: assetPath }, + sourcemap, + ); + } - // hash everything else as normal - const assetHash = hasha.fromFileSync(assetLocation, { - algorithm: 'md5', - }); - const hashedPath = path.join( - path.dirname(assetPath), - assetHash, - path.basename(assetPath) - ); - const sourcemap = hasSourceMap - ? { [`${assetPath}.map`]: `${hashedPath}.map` } - : {}; + // hash everything else as normal + const assetHash = hasha.fromFileSync(assetLocation, { + algorithm: 'md5', + }); + const hashedPath = path.join( + path.dirname(assetPath), + assetHash, + path.basename(assetPath), + ); + const sourcemap = hasSourceMap + ? { [`${assetPath}.map`]: `${hashedPath}.map` } + : {}; - return Object.assign( - map, - { [assetPath]: hashedPath }, - sourcemap - ); - }, {}); + return Object.assign( + map, + { [assetPath]: hashedPath }, + sourcemap, + ); + }, {}); - return Promise.all( - // copy all the built files to their hash locations - Object.keys(assetMap).map(asset => - cpFile( - path.resolve(target, asset), - path.resolve(hash, assetMap[asset]) - ) - ) - ) - .then(() => { - // we need unhashed keys for webpack entry bundles so we can refer to them in play templates. - // since they arrived ready-hashed, we need to add some new ones from the hashed ones... + return Promise.all( + // copy all the built files to their hash locations + Object.keys(assetMap).map((asset) => + cpFile( + path.resolve(paths.target, asset), + path.resolve(paths.hash, assetMap[asset]), + ), + ), + ) + .then(() => { + // we need unhashed keys for webpack entry bundles so we can refer to them in play templates. + // since they arrived ready-hashed, we need to add some new ones from the hashed ones... - // get the webpack entry bundles - const webpackEntryBundles = Object.keys( - assetMap - ).filter( - key => - webpackRegex.test(key) && - !webpackChunkRegex.test(key) && - !sourcemapRegex.test(key) - ); + // get the webpack entry bundles + const webpackEntryBundles = Object.keys( + assetMap, + ).filter( + (key) => + webpackRegex.test(key) && + !webpackChunkRegex.test(key) && + !sourcemapRegex.test(key), + ); - // create a new key for each one and add them them to asset map - return Object.assign( - {}, - assetMap, - webpackEntryBundles.reduce( - (map, webpackEntryBundle) => - Object.assign(map, { - [webpackEntryBundle.replace( - /(javascripts\/)(.+\/)/, - '$1' - )]: assetMap[webpackEntryBundle], - }), - {} - ), - webpackEntryBundles.reduce( - (map, webpackEntryBundle) => - Object.assign(map, { - [webpackEntryBundle.replace( - /(javascripts\/commercial\/)(.+\/)/, - '$1' - )]: assetMap[webpackEntryBundle], - }), - {} - ), - ); - }) - .then(( - normalisedAssetMap // save the asset map - ) => - mkdirp(path.resolve(hash, 'assets')).then(() => - writeFile( - path.resolve(hash, 'assets', 'assets.map'), - JSON.stringify(normalisedAssetMap, null, 4) - ) - ) - ); - }, - }, - ], + // create a new key for each one and add them them to asset map + return Object.assign( + {}, + assetMap, + webpackEntryBundles.reduce( + (map, webpackEntryBundle) => + Object.assign(map, { + [webpackEntryBundle.replace( + /(javascripts\/)(.+\/)/, + '$1', + )]: assetMap[webpackEntryBundle], + }), + {}, + ), + webpackEntryBundles.reduce( + (map, webpackEntryBundle) => + Object.assign(map, { + [webpackEntryBundle.replace( + /(javascripts\/commercial\/)(.+\/)/, + '$1', + )]: assetMap[webpackEntryBundle], + }), + {}, + ), + ); + }) + .then( + ( + normalisedAssetMap, // save the asset map + ) => + mkdirp(path.resolve(paths.hash, 'assets')).then( + () => + writeFile( + path.resolve( + paths.hash, + 'assets', + 'assets.map', + ), + JSON.stringify( + normalisedAssetMap, + null, + 4, + ), + ), + ), + ); + }, + }, + ], }; + +module.exports = task; diff --git a/tools/__tasks__/compile/images/clean.js b/tools/__tasks__/compile/images/clean.js index 213a47a5447c..014dbbe07e13 100644 --- a/tools/__tasks__/compile/images/clean.js +++ b/tools/__tasks__/compile/images/clean.js @@ -1,13 +1,15 @@ const path = require('path'); const rimraf = require('rimraf'); -const { target, hash, src } = require('../../config').paths; +const { paths } = require('../../config'); -module.exports = { - description: 'Clear image build artefacts', - task: () => { - rimraf.sync(path.resolve(src, 'stylesheets', 'icons')); - rimraf.sync(path.resolve(target, 'images')); - rimraf.sync(path.resolve(hash, 'images')); - }, +const task = { + description: 'Clear image build artefacts', + task: () => { + rimraf.sync(path.resolve(paths.src, 'stylesheets', 'icons')); + rimraf.sync(path.resolve(paths.target, 'images')); + rimraf.sync(path.resolve(paths.hash, 'images')); + }, }; + +module.exports = task; diff --git a/tools/__tasks__/compile/images/copy.js b/tools/__tasks__/compile/images/copy.js index 96b46f082b80..79b029180db2 100644 --- a/tools/__tasks__/compile/images/copy.js +++ b/tools/__tasks__/compile/images/copy.js @@ -1,14 +1,16 @@ const path = require('path'); const cpy = require('cpy'); -const { public: publicDir, target } = require('../../config').paths; +const { paths } = require('../../config'); -module.exports = { - description: 'Copy images', - task: () => - cpy(['**/*'], path.resolve(target, 'images'), { - cwd: path.resolve(publicDir, 'images'), - parents: true, - nodir: true, - }), +const task = { + description: 'Copy images', + task: () => + cpy(['**/*'], path.resolve(paths.target, 'images'), { + cwd: path.resolve(paths.public, 'images'), + parents: true, + nodir: true, + }), }; + +module.exports = task; diff --git a/tools/__tasks__/compile/images/icons.js b/tools/__tasks__/compile/images/icons.js index f40bed7e4305..72da1f8311b1 100644 --- a/tools/__tasks__/compile/images/icons.js +++ b/tools/__tasks__/compile/images/icons.js @@ -1,67 +1,67 @@ #!/usr/bin/env node -const { src, root } = require('../../config').paths; +const { paths } = require('../../config'); const fs = require('fs'); const path = require('path'); const glob = require('glob'); const btoa = require('btoa'); -const { optimize, extendDefaultPlugins } = require('svgo') +const { optimize, extendDefaultPlugins } = require('svgo'); const mkdirp = require('mkdirp'); -const getSVG = iconPath => - new Promise((resolve, reject) => { - // eslint-disable-next-line consistent-return - fs.readFile(iconPath, { encoding: 'utf-8' }, (err, data) => { - if (err) return reject(err); - try { - resolve({ - name: path.parse(iconPath).name, - data: optimize(data, { - plugins: [ - { - name: 'preset-default', - params: { - overrides: { - removeViewBox: false, - } - } - } - ], - }) - }) - } catch (e) { - return reject(e); - } - }); - }); +const getSVG = (iconPath) => + new Promise((resolve, reject) => { + // eslint-disable-next-line consistent-return + fs.readFile(iconPath, { encoding: 'utf-8' }, (err, data) => { + if (err) return reject(err); + try { + resolve({ + name: path.parse(iconPath).name, + data: optimize(data, { + plugins: [ + { + name: 'preset-default', + params: { + overrides: { + removeViewBox: false, + }, + }, + }, + ], + }), + }); + } catch (e) { + return reject(e); + } + }); + }); -const sortSVGs = svgs => - svgs.sort((a, b) => { - const aInfo = a.data.info; - const bInfo = b.data.info; - if (aInfo.height !== bInfo.height) { - return aInfo.height - bInfo.height; - } else if (aInfo.width !== bInfo.width) { - return bInfo.width - aInfo.width; - } - return a.name.localeCompare(b.name); - }); +const sortSVGs = (svgs) => + svgs.sort((a, b) => { + const aInfo = a.data.info; + const bInfo = b.data.info; + if (aInfo.height !== bInfo.height) { + return aInfo.height - bInfo.height; + } else if (aInfo.width !== bInfo.width) { + return bInfo.width - aInfo.width; + } + return a.name.localeCompare(b.name); + }); -const generateSassForSVG = svg => { - const { - name, - data: fileData, - data: { - info: { width = 0, height = 0 }, - }, - } = svg; - return ` +const generateSassForSVG = (svg) => { + const { + name, + data: fileData, + data: { + info: { width = 0, height = 0 }, + }, + } = svg; + return ` %svg-i-${name}, .svg-i-${name} { background-image: url(data:image/svg+xml;base64,${btoa( - fileData.data - )}); + fileData.data, + )}); background-position: 0 0; width: ${width}px; height: ${height}px; @@ -73,50 +73,47 @@ const generateSassForSVG = svg => { }; const saveSass = (sass, dest, fileName) => - new Promise((resolve, reject) => { - fs.writeFile( - path.join(dest, fileName), - ` + new Promise((resolve, reject) => { + fs.writeFile( + path.join(dest, fileName), + ` // THIS FILE WAS AUTOMATICALLY GENERATED BY - // ${path.relative(root, path.resolve(__filename))} + // ${path.relative(paths.root, path.resolve(__filename))} // DO NOT EDIT IT! @if ($svg-support) { ${sass} } ` - .trim() - .replace(/ {16}/g, ''), - err => { - if (err) return reject(err); - return resolve(); - } - ); - }); + .trim() + .replace(/ {16}/g, ''), + (err) => { + if (err) return reject(err); + return resolve(); + }, + ); + }); -module.exports = { - description: 'Create sprites', - task: ['commercial', 'global', 'membership', 'video'].map(target => ({ - description: `Spriting ${target}`, - concurrent: true, - task: () => { - const srcPath = path.join(src, 'images', target); - const destPath = path.join(src, 'stylesheets', 'icons'); - const fileName = `_${target}-icons-svg.scss`; +const task = { + description: 'Create sprites', + task: ['commercial', 'global', 'membership', 'video'].map((target) => ({ + description: `Spriting ${target}`, + concurrent: true, + task: () => { + const srcPath = path.join(paths.src, 'images', target); + const destPath = path.join(paths.src, 'stylesheets', 'icons'); + const fileName = `_${target}-icons-svg.scss`; - const iconPaths = glob.sync(path.join(srcPath, '*.svg')); + const iconPaths = glob.sync(path.join(srcPath, '*.svg')); - mkdirp.sync(destPath); + mkdirp.sync(destPath); - return Promise.all(iconPaths.map(getSVG)) - .then(sortSVGs) - .then(svgs => - svgs - .map(generateSassForSVG) - .join('') - .trim() - ) - .then(sass => saveSass(sass, destPath, fileName)); - }, - })), + return Promise.all(iconPaths.map(getSVG)) + .then(sortSVGs) + .then((svgs) => svgs.map(generateSassForSVG).join('').trim()) + .then((sass) => saveSass(sass, destPath, fileName)); + }, + })), }; + +module.exports = task; diff --git a/tools/__tasks__/compile/images/index.js b/tools/__tasks__/compile/images/index.js index e6f1ae932faa..3be860817e8b 100644 --- a/tools/__tasks__/compile/images/index.js +++ b/tools/__tasks__/compile/images/index.js @@ -1,9 +1,12 @@ -module.exports = { - description: 'Compile images', - task: [ - require('./clean'), - require('./copy'), - require('./icons'), - require('./svg'), - ], +const task = { + description: 'Compile images', + task: [ + // prettier: multi-line + require('./clean'), + require('./copy'), + require('./icons'), + require('./svg'), + ], }; + +module.exports = task; diff --git a/tools/__tasks__/compile/images/svg.js b/tools/__tasks__/compile/images/svg.js index eef79cf9f0c1..57558ab87cfd 100644 --- a/tools/__tasks__/compile/images/svg.js +++ b/tools/__tasks__/compile/images/svg.js @@ -7,45 +7,48 @@ const pify = require('pify'); const readFile = pify(fs.readFile); const stat = pify(fs.stat); -const { src } = require('../../config').paths; +const { paths } = require('../../config'); -const srcDir = path.resolve(src); +const srcDir = path.resolve(paths.src); -module.exports = { - description: - 'Prohibit inline data URIs in svgs and other unoptimised things', - task: () => - Promise.all( - glob.sync('**/*.svg', { cwd: srcDir }).map(svgPath => - Promise.all([ - stat(path.resolve(srcDir, svgPath)).then( - fileStats => - new Promise((resolve, reject) => { - if (fileStats.size > 136 * 1000) { - reject( - new Error( - `whooahh ${svgPath} is much too large at ${fileStats.size / - 1000}kB` - ) - ); - } - resolve(); - }) - ), - readFile(path.resolve(srcDir, svgPath), 'utf-8').then( - fileData => - new Promise((resolve, reject) => { - if (fileData.includes(';base64,')) { - reject( - new Error( - `base64 encoded data detected in ${svgPath}` - ) - ); - } - resolve(); - }) - ), - ]) - ) - ), +const task = { + description: + 'Prohibit inline data URIs in svgs and other unoptimised things', + task: () => + Promise.all( + glob.sync('**/*.svg', { cwd: srcDir }).map((svgPath) => + Promise.all([ + stat(path.resolve(srcDir, svgPath)).then( + (fileStats) => + new Promise((resolve, reject) => { + if (fileStats.size > 136 * 1000) { + reject( + new Error( + `whooahh ${svgPath} is much too large at ${ + fileStats.size / 1000 + }kB`, + ), + ); + } + resolve(); + }), + ), + readFile(path.resolve(srcDir, svgPath), 'utf-8').then( + (fileData) => + new Promise((resolve, reject) => { + if (fileData.includes(';base64,')) { + reject( + new Error( + `base64 encoded data detected in ${svgPath}`, + ), + ); + } + resolve(); + }), + ), + ]), + ), + ), }; + +module.exports = task; diff --git a/tools/__tasks__/compile/index.dev.js b/tools/__tasks__/compile/index.dev.js index a5339f10526b..1bdda0b6e92e 100644 --- a/tools/__tasks__/compile/index.dev.js +++ b/tools/__tasks__/compile/index.dev.js @@ -1,10 +1,13 @@ -module.exports = { - description: 'Compile assets for development', - task: [ - require('./conf/clean'), - require('./css/index.dev'), - require('./data/index.dev'), - require('./javascript/index.dev'), - require('./conf'), - ], +const task = { + description: 'Compile assets for development', + task: [ + // prettier: multi-line + require('./conf/clean'), + require('./css/index.dev'), + require('./data/index.dev'), + require('./javascript/index.dev'), + require('./conf'), + ], }; + +module.exports = task; diff --git a/tools/__tasks__/compile/index.js b/tools/__tasks__/compile/index.js index f532548a4eb1..4632a11987cb 100755 --- a/tools/__tasks__/compile/index.js +++ b/tools/__tasks__/compile/index.js @@ -1,11 +1,14 @@ -module.exports = { - description: 'Compile assets for production', - task: [ - require('./conf/clean'), - require('./css'), - require('./data'), - require('./javascript'), - require('./hash'), - require('./conf'), - ], +const task = { + description: 'Compile assets for production', + task: [ + // prettier: multi-line + require('./conf/clean'), + require('./css'), + require('./data'), + require('./javascript'), + require('./hash'), + require('./conf'), + ], }; + +module.exports = task; diff --git a/tools/__tasks__/compile/index.watch.js b/tools/__tasks__/compile/index.watch.js index c853323cee8a..cbabca5e61e2 100644 --- a/tools/__tasks__/compile/index.watch.js +++ b/tools/__tasks__/compile/index.watch.js @@ -1,10 +1,13 @@ -module.exports = { - description: 'Compile assets for development', - task: [ - require('./conf/clean'), - require('./css/index.dev'), - require('./data/index.watch'), - require('./javascript/index.watch'), - require('./conf'), - ], +const task = { + description: 'Compile assets for development', + task: [ + // prettier: multi-line + require('./conf/clean'), + require('./css/index.dev'), + require('./data/index.watch'), + require('./javascript/index.watch'), + require('./conf'), + ], }; + +module.exports = task; diff --git a/tools/__tasks__/compile/inline-svgs/index.js b/tools/__tasks__/compile/inline-svgs/index.js index 1d59db135ccb..39bd64e9a060 100644 --- a/tools/__tasks__/compile/inline-svgs/index.js +++ b/tools/__tasks__/compile/inline-svgs/index.js @@ -3,52 +3,55 @@ const fs = require('fs'); const mkdirp = require('mkdirp'); const glob = require('glob'); -const { optimize } = require('svgo') +const { optimize } = require('svgo'); const pify = require('pify'); const readFile = pify(fs.readFile); const writeFile = pify(fs.writeFile); -const { src, conf } = require('../../config').paths; +const { paths } = require('../../config'); -const srcDir = path.resolve(src, 'inline-svgs'); +const srcDir = path.resolve(paths.src, 'inline-svgs'); -module.exports = { - description: 'Prepare inline SVGs', - task: () => - Promise.all( - glob.sync('**/*.svg', { cwd: srcDir }).map(svgPath => { - const dest = path.resolve(conf, 'inline-svgs', svgPath); - return mkdirp(path.dirname(dest)) - .then(() => - readFile(path.resolve(srcDir, svgPath), 'utf-8') - ) - .then( - fileData => - new Promise(resolve => - resolve(optimize(fileData, { - plugins: [ - { - name: 'preset-default', - params: { - overrides: { - removeViewBox: false, - } - } - }, - 'removeXMLNS', - ], - }) - ) - ) - ) - .then(optimisedFileData => { - if (!optimisedFileData?.data) { - console.error('error inlining:', srcDir, svgPath); - return Promise.resolve(); - } - return writeFile(dest, optimisedFileData.data); - }); - }) - ), +const task = { + description: 'Prepare inline SVGs', + task: () => + Promise.all( + glob.sync('**/*.svg', { cwd: srcDir }).map((svgPath) => { + const dest = path.resolve(paths.conf, 'inline-svgs', svgPath); + return mkdirp(path.dirname(dest)) + .then(() => + readFile(path.resolve(srcDir, svgPath), 'utf-8'), + ) + .then( + (fileData) => + new Promise((resolve) => + resolve( + optimize(fileData, { + plugins: [ + { + name: 'preset-default', + params: { + overrides: { + removeViewBox: false, + }, + }, + }, + 'removeXMLNS', + ], + }), + ), + ), + ) + .then((optimisedFileData) => { + if (!optimisedFileData?.data) { + console.error('error inlining:', srcDir, svgPath); + return Promise.resolve(); + } + return writeFile(dest, optimisedFileData.data); + }); + }), + ), }; + +module.exports = task; diff --git a/tools/__tasks__/compile/javascript/bundle-polyfills.js b/tools/__tasks__/compile/javascript/bundle-polyfills.js index 050f73ed3aef..0344d8970f2b 100644 --- a/tools/__tasks__/compile/javascript/bundle-polyfills.js +++ b/tools/__tasks__/compile/javascript/bundle-polyfills.js @@ -10,52 +10,53 @@ const readFileP = pify(fs.readFile); const writeFileP = pify(fs.writeFile); const requestP = pify(request, { multiArgs: true }); -const { src, target, vendor } = require('../../config').paths; +const { paths } = require('../../config'); -const dest = path.resolve(target, 'javascripts', 'vendor'); +const dest = path.resolve(paths.target, 'javascripts', 'vendor'); const polyfillURL = fs - .readFileSync(path.resolve(src, 'javascripts', 'polyfill.io'), 'utf8') - .trim(); + .readFileSync(path.resolve(paths.src, 'javascripts', 'polyfill.io'), 'utf8') + .trim(); -module.exports = { - description: 'Bundle polyfill.io fallback', - task: () => { - mkdirp.sync(dest); - // try and get the lastest result from polyfill.io - // gobbledegook UA means it will return *all* polyfills, so this - // strictly a worst-case fallback - return ( - requestP(`${polyfillURL}&ua=qwerty&unknown=polyfill`) - .then(result => { - const [, body] = result; - // make sure the response looks about right - if (body.endsWith('guardianPolyfilled();')) { - return body; - } - return Promise.reject(); - }) - // if that fails, just use our checked in version. - // it's probably the same, but this should mean our fallback is - // always as up to date as possible... - .catch(() => - readFileP( - path.resolve( - vendor, - 'javascripts', - 'polyfillio.fallback.js' - ), - 'utf8' - ).then( - polyfills => - uglify.minify(polyfills, { fromString: true }).code - ) - ) - .then(polyfills => - writeFileP( - path.resolve(dest, 'polyfillio.fallback.js'), - polyfills - ) - ) - ); - }, +const task = { + description: 'Bundle polyfill.io fallback', + task: () => { + mkdirp.sync(dest); + // try and get the lastest result from polyfill.io + // gobbledegook UA means it will return *all* polyfills, so this + // strictly a worst-case fallback + return ( + requestP(`${polyfillURL}&ua=qwerty&unknown=polyfill`) + .then((result) => { + const [, body] = result; + // make sure the response looks about right + if (body.endsWith('guardianPolyfilled();')) { + return body; + } + return Promise.reject(); + }) + // if that fails, just use our checked in version. + // it's probably the same, but this should mean our fallback is + // always as up to date as possible... + .catch(() => + readFileP( + path.resolve( + paths.vendor, + 'javascripts', + 'polyfillio.fallback.js', + ), + 'utf8', + ).then( + (polyfills) => + uglify.minify(polyfills, { fromString: true }).code, + ), + ) + .then((polyfills) => + writeFileP( + path.resolve(dest, 'polyfillio.fallback.js'), + polyfills, + ), + ) + ); + }, }; +module.exports = task; diff --git a/tools/__tasks__/compile/javascript/clean.js b/tools/__tasks__/compile/javascript/clean.js index 8b3932a04b6f..eb5f6b188516 100644 --- a/tools/__tasks__/compile/javascript/clean.js +++ b/tools/__tasks__/compile/javascript/clean.js @@ -1,12 +1,14 @@ const path = require('path'); const rimraf = require('rimraf'); -const { target, hash } = require('../../config').paths; +const { paths } = require('../../config'); -module.exports = { - description: 'Clear JS build artefacts', - task: () => { - rimraf.sync(path.resolve(target, 'javascripts')); - rimraf.sync(path.resolve(hash, 'javascripts')); - }, +const task = { + description: 'Clear JS build artefacts', + task: () => { + rimraf.sync(path.resolve(paths.target, 'javascripts')); + rimraf.sync(path.resolve(paths.hash, 'javascripts')); + }, }; + +module.exports = task; diff --git a/tools/__tasks__/compile/javascript/copy.js b/tools/__tasks__/compile/javascript/copy.js index 65dd3d206983..b31758917fd1 100644 --- a/tools/__tasks__/compile/javascript/copy.js +++ b/tools/__tasks__/compile/javascript/copy.js @@ -1,25 +1,27 @@ const path = require('path'); const cpy = require('cpy'); -const { vendor, target } = require('../../config').paths; +const { paths } = require('../../config'); -module.exports = { - description: 'Copy 3rd JS party libraries', - task: () => - Promise.all([ - cpy( - [ - 'formstack-interactive/**/*', - 'prebid_safeframe.js', - 'polyfillio.minimum.fallback.js', - 'omsdk-v1.js', - ], - path.resolve(target, 'javascripts', 'vendor'), - { - cwd: path.resolve(vendor, 'javascripts'), - parents: true, - nodir: true, - } - ), - ]), +const task = { + description: 'Copy 3rd JS party libraries', + task: () => + Promise.all([ + cpy( + [ + 'formstack-interactive/**/*', + 'prebid_safeframe.js', + 'polyfillio.minimum.fallback.js', + 'omsdk-v1.js', + ], + path.resolve(paths.target, 'javascripts', 'vendor'), + { + cwd: path.resolve(paths.vendor, 'javascripts'), + parents: true, + nodir: true, + }, + ), + ]), }; + +module.exports = task; diff --git a/tools/__tasks__/compile/javascript/index.atoms.js b/tools/__tasks__/compile/javascript/index.atoms.js index abb73e0c3a79..8a2ebaf90e49 100644 --- a/tools/__tasks__/compile/javascript/index.atoms.js +++ b/tools/__tasks__/compile/javascript/index.atoms.js @@ -1,8 +1,11 @@ -module.exports = { - description: 'Compile JS', - task: [ - require('./clean'), - require('../inline-svgs'), - require('./webpack-atoms'), - ], +const task = { + description: 'Compile JS', + task: [ + // prettier: multi-line + require('./clean'), + require('../inline-svgs'), + require('./webpack-atoms'), + ], }; + +module.exports = task; diff --git a/tools/__tasks__/compile/javascript/index.dev.js b/tools/__tasks__/compile/javascript/index.dev.js index 34e6f69e8b8c..c7d0773bd4b7 100644 --- a/tools/__tasks__/compile/javascript/index.dev.js +++ b/tools/__tasks__/compile/javascript/index.dev.js @@ -1,12 +1,15 @@ -module.exports = { - description: 'Prepare JS for development', - task: [ - require('../inline-svgs'), - require('./clean'), - require('./copy'), - require('../../commercial/compile'), - require('./webpack.dev'), - require('./webpack-dcr.dev'), - require('./bundle-polyfills'), - ], +const task = { + description: 'Prepare JS for development', + task: [ + // prettier: multi-line + require('../inline-svgs'), + require('./clean'), + require('./copy'), + require('../../commercial/compile'), + require('./webpack.dev'), + require('./webpack-dcr.dev'), + require('./bundle-polyfills'), + ], }; + +module.exports = task; diff --git a/tools/__tasks__/compile/javascript/index.js b/tools/__tasks__/compile/javascript/index.js index d82bd4f88317..8b9ad2d69c14 100644 --- a/tools/__tasks__/compile/javascript/index.js +++ b/tools/__tasks__/compile/javascript/index.js @@ -1,11 +1,14 @@ -module.exports = { - description: 'Compile JS', - task: [ - require('./clean'), - require('../inline-svgs'), - require('./copy'), - require('./webpack'), - require('./webpack-atoms'), - require('./bundle-polyfills'), - ], +const task = { + description: 'Compile JS', + task: [ + // prettier: multi-line + require('./clean'), + require('../inline-svgs'), + require('./copy'), + require('./webpack'), + require('./webpack-atoms'), + require('./bundle-polyfills'), + ], }; + +module.exports = task; diff --git a/tools/__tasks__/compile/javascript/index.watch.js b/tools/__tasks__/compile/javascript/index.watch.js index 292bb437b11e..d5b4315f814c 100644 --- a/tools/__tasks__/compile/javascript/index.watch.js +++ b/tools/__tasks__/compile/javascript/index.watch.js @@ -1,9 +1,12 @@ -module.exports = { - description: 'Prepare JS for development', - task: [ - require('../inline-svgs'), - require('./clean'), - require('./copy'), - require('./bundle-polyfills'), - ], +const task = { + description: 'Prepare JS for development', + task: [ + // prettier: multi-line + require('../inline-svgs'), + require('./clean'), + require('./copy'), + require('./bundle-polyfills'), + ], }; + +module.exports = task; diff --git a/tools/__tasks__/compile/javascript/webpack-atoms.js b/tools/__tasks__/compile/javascript/webpack-atoms.js index 983b6bf28e96..7a62a6dabeb0 100644 --- a/tools/__tasks__/compile/javascript/webpack-atoms.js +++ b/tools/__tasks__/compile/javascript/webpack-atoms.js @@ -1,4 +1,3 @@ - require('any-observable/register/rxjs-all'); const Observable = require('any-observable'); @@ -8,26 +7,28 @@ const chalk = require('chalk'); const config = require('../../../../webpack.config.atoms.js'); -module.exports = { - description: 'Create Webpack bundles for atoms', - task: () => - new Observable(observer => { - config.plugins = [ - require('../../../webpack-progress-reporter')(observer), - ...config.plugins, - ]; +const task = { + description: 'Create Webpack bundles for atoms', + task: () => + new Observable((observer) => { + config.plugins = [ + require('../../../webpack-progress-reporter')(observer), + ...config.plugins, + ]; - const bundler = webpack(config); + const bundler = webpack(config); - bundler.run((err, stats) => { - if (err) { - throw new Error(chalk.red(err)); - } - const info = stats.toJson(); - if (stats.hasErrors()) { - throw new Error(chalk.red(info.errors)); - } - observer.complete(); - }); - }), + bundler.run((err, stats) => { + if (err) { + throw new Error(chalk.red(err)); + } + const info = stats.toJson(); + if (stats.hasErrors()) { + throw new Error(chalk.red(info.errors)); + } + observer.complete(); + }); + }), }; + +module.exports = task; diff --git a/tools/__tasks__/compile/javascript/webpack.dev.js b/tools/__tasks__/compile/javascript/webpack.dev.js index 841244d0f4f2..acb73fcc0e5a 100644 --- a/tools/__tasks__/compile/javascript/webpack.dev.js +++ b/tools/__tasks__/compile/javascript/webpack.dev.js @@ -1,4 +1,3 @@ - require('any-observable/register/rxjs-all'); const Observable = require('any-observable'); @@ -8,26 +7,28 @@ const chalk = require('chalk'); const config = require('../../../../webpack.config.dev.js'); -module.exports = { - description: 'Create Webpack bundles', - task: () => - new Observable(observer => { - config.plugins = [ - require('../../../webpack-progress-reporter')(observer), - ...config.plugins, - ]; +const task = { + description: 'Create Webpack bundles', + task: () => + new Observable((observer) => { + config.plugins = [ + require('../../../webpack-progress-reporter')(observer), + ...config.plugins, + ]; - const bundler = webpack(config); + const bundler = webpack(config); - bundler.run((err, stats) => { - if (err) { - throw new Error(chalk.red(err)); - } - const info = stats.toJson(); - if (stats.hasErrors()) { - throw new Error(chalk.red(info.errors)); - } - observer.complete(); - }); - }), + bundler.run((err, stats) => { + if (err) { + throw new Error(chalk.red(err)); + } + const info = stats.toJson(); + if (stats.hasErrors()) { + throw new Error(chalk.red(info.errors)); + } + observer.complete(); + }); + }), }; + +module.exports = task; diff --git a/tools/__tasks__/compile/javascript/webpack.js b/tools/__tasks__/compile/javascript/webpack.js index 977d3b8c6e9f..79435c0bf0d0 100644 --- a/tools/__tasks__/compile/javascript/webpack.js +++ b/tools/__tasks__/compile/javascript/webpack.js @@ -1,4 +1,3 @@ - require('any-observable/register/rxjs-all'); const Observable = require('any-observable'); @@ -8,26 +7,28 @@ const chalk = require('chalk'); const config = require('../../../../webpack.config.prod.js'); -module.exports = { - description: 'Create Webpack bundles', - task: () => - new Observable(observer => { - config.plugins = [ - require('../../../webpack-progress-reporter')(observer), - ...config.plugins, - ]; +const task = { + description: 'Create Webpack bundles', + task: () => + new Observable((observer) => { + config.plugins = [ + require('../../../webpack-progress-reporter')(observer), + ...config.plugins, + ]; - const bundler = webpack(config); + const bundler = webpack(config); - bundler.run((err, stats) => { - if (err) { - throw new Error(chalk.red(err)); - } - const info = stats.toJson(); - if (stats.hasErrors()) { - throw new Error(chalk.red(info.errors)); - } - observer.complete(); - }); - }), + bundler.run((err, stats) => { + if (err) { + throw new Error(chalk.red(err)); + } + const info = stats.toJson(); + if (stats.hasErrors()) { + throw new Error(chalk.red(info.errors)); + } + observer.complete(); + }); + }), }; + +module.exports = task; diff --git a/tools/__tasks__/config.js b/tools/__tasks__/config.js index afe5b037fb54..99adbb3833ce 100644 --- a/tools/__tasks__/config.js +++ b/tools/__tasks__/config.js @@ -1,13 +1,15 @@ const path = require('path'); -module.exports = { - paths: { - target: path.join(__dirname, '../', '../', 'static', 'target'), - hash: path.join(__dirname, '../', '../', 'static', 'hash'), - src: path.join(__dirname, '../', '../', 'static', 'src'), - public: path.join(__dirname, '../', '../', 'static', 'public'), - vendor: path.join(__dirname, '../', '../', 'static', 'vendor'), - root: path.join(__dirname, '../', '../'), - conf: path.join(__dirname, '../', '../', 'common', 'conf', 'assets'), - }, +const task = { + paths: { + target: path.join(__dirname, '../', '../', 'static', 'target'), + hash: path.join(__dirname, '../', '../', 'static', 'hash'), + src: path.join(__dirname, '../', '../', 'static', 'src'), + public: path.join(__dirname, '../', '../', 'static', 'public'), + vendor: path.join(__dirname, '../', '../', 'static', 'vendor'), + root: path.join(__dirname, '../', '../'), + conf: path.join(__dirname, '../', '../', 'common', 'conf', 'assets'), + }, }; + +module.exports = task; diff --git a/tools/__tasks__/lib/check-network.js b/tools/__tasks__/lib/check-network.js index b3aa9c3b8ff2..b26f4e2defec 100644 --- a/tools/__tasks__/lib/check-network.js +++ b/tools/__tasks__/lib/check-network.js @@ -2,13 +2,13 @@ const tcpp = require('tcp-ping'); const pify = require('pify'); module.exports = (domain, port) => ({ - description: `Probing ${domain} on port ${port}...`, - task: () => - pify(tcpp.probe, { multiArgs: true })(domain, port).then(result => { - if (!result[0]) { - throw new Error( - `Cannot reach ${domain}:${port} - is your server running?` - ); - } - }), + description: `Probing ${domain} on port ${port}...`, + task: () => + pify(tcpp.probe, { multiArgs: true })(domain, port).then((result) => { + if (!result[0]) { + throw new Error( + `Cannot reach ${domain}:${port} - is your server running?`, + ); + } + }), }); diff --git a/tools/__tasks__/lib/get-changed-files.js b/tools/__tasks__/lib/get-changed-files.js index 500859bb0ea0..8eab1980cff5 100644 --- a/tools/__tasks__/lib/get-changed-files.js +++ b/tools/__tasks__/lib/get-changed-files.js @@ -1,42 +1,42 @@ const execa = require('execa'); const getCurrentBranchName = () => - execa.stdout('git', ['symbolic-ref', '--short', 'HEAD']); + execa.stdout('git', ['symbolic-ref', '--short', 'HEAD']); -const hasRemoteBranch = branch => - execa - .stdout('git', ['status', '--porcelain', '-b']) - .then(status => status.includes(`...origin/${branch}`)); +const hasRemoteBranch = (branch) => + execa + .stdout('git', ['status', '--porcelain', '-b']) + .then((status) => status.includes(`...origin/${branch}`)); // return files that have changed locally // compared to remote feature branch -const diffAgainstRemote = branch => - execa - .stdout('git', [ - 'diff', - '--name-only', - 'HEAD', - `origin/${branch}`, - '^origin/main', // excluding changes already in origin/main - ]) - .then(diffs => diffs.split('\n')); +const diffAgainstRemote = (branch) => + execa + .stdout('git', [ + 'diff', + '--name-only', + 'HEAD', + `origin/${branch}`, + '^origin/main', // excluding changes already in origin/main + ]) + .then((diffs) => diffs.split('\n')); // return files that have changed // compared to local main branch const diffAgainstMain = () => - execa - .stdout('git', ['diff', '--name-only', 'HEAD', 'origin/main']) - .then(diffs => diffs.split('\n')); + execa + .stdout('git', ['diff', '--name-only', 'HEAD', 'origin/main']) + .then((diffs) => diffs.split('\n')); const getChangedFiles = () => - getCurrentBranchName().then(localBranch => - hasRemoteBranch(localBranch).then(remoteBranchExists => { - if (remoteBranchExists) { - return diffAgainstRemote(localBranch); - } + getCurrentBranchName().then((localBranch) => + hasRemoteBranch(localBranch).then((remoteBranchExists) => { + if (remoteBranchExists) { + return diffAgainstRemote(localBranch); + } - return diffAgainstMain(); - }) - ); + return diffAgainstMain(); + }), + ); module.exports = getChangedFiles; diff --git a/tools/__tasks__/test/index.js b/tools/__tasks__/test/index.js index cfa253fad78e..cc55b1f6828b 100644 --- a/tools/__tasks__/test/index.js +++ b/tools/__tasks__/test/index.js @@ -1,5 +1,11 @@ -module.exports = { - description: 'Test assets', - task: [require('../compile/data'), require('./javascript')], - concurrent: true, +const task = { + description: 'Test assets', + task: [ + // prettier: multi-line + require('../compile/data'), + require('./javascript'), + ], + concurrent: true, }; + +module.exports = task; diff --git a/tools/__tasks__/test/javascript/index.js b/tools/__tasks__/test/javascript/index.js index 9b8b992dd6cd..ffdc62c6a55b 100644 --- a/tools/__tasks__/test/javascript/index.js +++ b/tools/__tasks__/test/javascript/index.js @@ -8,35 +8,40 @@ const rxjs = require('rxjs'); const rxjsOperators = require('rxjs/operators'); const exec = (cmd, args, opts) => { - const cp = execa(cmd, args, opts); + const cp = execa(cmd, args, opts); - return rxjs.merge( - streamToObservable(cp.stdout.pipe(split()), { await: cp }), - streamToObservable(cp.stderr.pipe(split()), { await: cp }) - ).pipe(rxjsOperators.filter(Boolean)); + return rxjs + .merge( + streamToObservable(cp.stdout.pipe(split()), { await: cp }), + streamToObservable(cp.stderr.pipe(split()), { await: cp }), + ) + .pipe(rxjsOperators.filter(Boolean)); }; -module.exports = { - description: 'Test JS app', - task: [ - { - description: 'Run tests', - task: [ - { - description: 'JS tests', - task: () => exec('jest', null, { - env: { - /** - * We test some things like relative dates and formatting - * that rely on a specific timezone. We set this here so - * that it's not determined by the machine's timezone. - */ - TZ: 'Europe/London' - } - }), - }, - ], - concurrent: true, - }, - ], +const task = { + description: 'Test JS app', + task: [ + { + description: 'Run tests', + task: [ + { + description: 'JS tests', + task: () => + exec('jest', null, { + env: { + /** + * We test some things like relative dates and formatting + * that rely on a specific timezone. We set this here so + * that it's not determined by the machine's timezone. + */ + TZ: 'Europe/London', + }, + }), + }, + ], + concurrent: true, + }, + ], }; + +module.exports = task; diff --git a/tools/__tasks__/validate-head/index.js b/tools/__tasks__/validate-head/index.js index 434e22ef8c03..044406ec767b 100644 --- a/tools/__tasks__/validate-head/index.js +++ b/tools/__tasks__/validate-head/index.js @@ -1,5 +1,11 @@ -module.exports = { - description: 'Validate commits', - task: [require('./javascript'), require('./sass')], - concurrent: true, +const task = { + description: 'Validate commits', + task: [ + // prettier: multi-line + require('./javascript'), + require('./sass'), + ], + concurrent: true, }; + +module.exports = task; diff --git a/tools/__tasks__/validate-head/javascript-fix.js b/tools/__tasks__/validate-head/javascript-fix.js index 43931f8b50ae..6c2789bd490f 100644 --- a/tools/__tasks__/validate-head/javascript-fix.js +++ b/tools/__tasks__/validate-head/javascript-fix.js @@ -1,17 +1,19 @@ const execa = require('execa'); const getChangedFiles = require('../lib/get-changed-files'); -module.exports = { - description: 'Fix committed linting errors', - task: () => - getChangedFiles().then(files => { - const jsFiles = files.filter( - file => - file.endsWith('.js') || - file.endsWith('.jsx') || - file.startsWith('git-hooks') - ); +const task = { + description: 'Fix committed linting errors', + task: () => + getChangedFiles().then((files) => { + const jsFiles = files.filter( + (file) => + file.endsWith('.js') || + file.endsWith('.jsx') || + file.startsWith('git-hooks'), + ); - return execa('eslint', [...jsFiles, '--quiet', '--color', '--fix']); - }), + return execa('eslint', [...jsFiles, '--quiet', '--color', '--fix']); + }), }; + +module.exports = task; diff --git a/tools/__tasks__/validate-head/javascript.js b/tools/__tasks__/validate-head/javascript.js index 2726568fa85e..df20f602a7fd 100644 --- a/tools/__tasks__/validate-head/javascript.js +++ b/tools/__tasks__/validate-head/javascript.js @@ -5,77 +5,80 @@ const getChangedFiles = require('../lib/get-changed-files'); const getCpuCount = () => os.cpus().length; -module.exports = { - description: 'Validate committed JS', - task: [ - { - description: 'Lint committed JS', - task: () => - getChangedFiles().then(files => { - const errors = []; - const jsFiles = files.filter( - file => - (file.endsWith('.js') && !file.endsWith('.scala.js')) || - file.endsWith('.jsx') || - file.startsWith('git-hooks') - ); - const lint = (proc, batchedFiles) => - proc.then(() => - Promise.all( - batchedFiles.map(filePath => - execa - .shell( - `git show HEAD:${filePath} | eslint --stdin --stdin-filename ${filePath}` - ) - .catch(e => { - errors.push(e); - }) - ) - ) - ); - const batch = (arr, batchSize) => { - const batchFold = (xss, x) => { - if (!xss.length) { - return [[x]]; - } - if (xss[0].length < batchSize) { - return [xss[0].concat(x), ...xss.slice(1)]; - } +const task = { + description: 'Validate committed JS', + task: [ + { + description: 'Lint committed JS', + task: () => + getChangedFiles().then((files) => { + const errors = []; + const jsFiles = files.filter( + (file) => + (file.endsWith('.js') && + !file.endsWith('.scala.js')) || + file.endsWith('.jsx') || + file.startsWith('git-hooks'), + ); + const lint = (proc, batchedFiles) => + proc.then(() => + Promise.all( + batchedFiles.map((filePath) => + execa + .shell( + `git show HEAD:${filePath} | eslint --stdin --stdin-filename ${filePath}`, + ) + .catch((e) => { + errors.push(e); + }), + ), + ), + ); + const batch = (arr, batchSize) => { + const batchFold = (xss, x) => { + if (!xss.length) { + return [[x]]; + } + if (xss[0].length < batchSize) { + return [xss[0].concat(x), ...xss.slice(1)]; + } - return [[x], ...xss]; - }; + return [[x], ...xss]; + }; - return arr.reduce(batchFold, []); - }; + return arr.reduce(batchFold, []); + }; - return batch(jsFiles, getCpuCount()) - .reduce(lint, Promise.resolve()) - .then(() => { - if (errors.length) { - const error = errors.reduce( - (acc, curr) => { - acc.stdout += curr.stdout; + return batch(jsFiles, getCpuCount()) + .reduce(lint, Promise.resolve()) + .then(() => { + if (errors.length) { + const error = errors.reduce( + (acc, curr) => { + acc.stdout += curr.stdout; - return acc; - }, - { stdout: '' } - ); + return acc; + }, + { stdout: '' }, + ); - error.stdout += `\n${chalk.red( - `✋ Your changes have not been pushed.\n${chalk.reset( - `You may be able to fix things by running ${chalk.dim( - 'make fix-commits' - )}.` - )}` - )}`; + error.stdout += `\n${chalk.red( + `✋ Your changes have not been pushed.\n${chalk.reset( + `You may be able to fix things by running ${chalk.dim( + 'make fix-commits', + )}.`, + )}`, + )}`; - return Promise.reject(error); - } + return Promise.reject(error); + } - return Promise.resolve(); - }); - }), - }, - ], - concurrent: true, + return Promise.resolve(); + }); + }), + }, + ], + concurrent: true, }; + +module.exports = task; diff --git a/tools/__tasks__/validate-head/sass.js b/tools/__tasks__/validate-head/sass.js index 42165db1e521..dcd31b42c4c7 100644 --- a/tools/__tasks__/validate-head/sass.js +++ b/tools/__tasks__/validate-head/sass.js @@ -5,70 +5,72 @@ const getChangedFiles = require('../lib/get-changed-files'); const getCpuCount = () => os.cpus().length; -module.exports = { - description: 'Validate committed Sass', - task: [ - { - description: 'Lint committed Sass', - task: () => - getChangedFiles().then(files => { - const errors = []; - const sassFiles = files.filter(file => - file.endsWith('.scss') - ); - const lint = (proc, batchedFiles) => - proc.then(() => - Promise.all( - batchedFiles.map(filePath => - execa - .shell( - `git show HEAD:${filePath} | yarn stylelint --max-warnings 0 '${filePath}'` - ) - .catch(e => { - errors.push(e); - }) - ) - ) - ); - const batch = (arr, batchSize) => { - const batchFold = (xss, x) => { - if (!xss.length) { - return [[x]]; - } - if (xss[0].length < batchSize) { - return [xss[0].concat(x), ...xss.slice(1)]; - } +const task = { + description: 'Validate committed Sass', + task: [ + { + description: 'Lint committed Sass', + task: () => + getChangedFiles().then((files) => { + const errors = []; + const sassFiles = files.filter((file) => + file.endsWith('.scss'), + ); + const lint = (proc, batchedFiles) => + proc.then(() => + Promise.all( + batchedFiles.map((filePath) => + execa + .shell( + `git show HEAD:${filePath} | yarn stylelint --max-warnings 0 '${filePath}'`, + ) + .catch((e) => { + errors.push(e); + }), + ), + ), + ); + const batch = (arr, batchSize) => { + const batchFold = (xss, x) => { + if (!xss.length) { + return [[x]]; + } + if (xss[0].length < batchSize) { + return [xss[0].concat(x), ...xss.slice(1)]; + } - return [[x], ...xss]; - }; + return [[x], ...xss]; + }; - return arr.reduce(batchFold, []); - }; + return arr.reduce(batchFold, []); + }; - return batch(sassFiles, getCpuCount()) - .reduce(lint, Promise.resolve()) - .then(() => { - if (errors.length) { - const error = errors.reduce( - (acc, curr) => { - acc.stdout += curr.stdout; + return batch(sassFiles, getCpuCount()) + .reduce(lint, Promise.resolve()) + .then(() => { + if (errors.length) { + const error = errors.reduce( + (acc, curr) => { + acc.stdout += curr.stdout; - return acc; - }, - { stdout: '' } - ); + return acc; + }, + { stdout: '' }, + ); - error.stdout += `\n${chalk.red( - `✋ Your changes have not been pushed.` - )}`; + error.stdout += `\n${chalk.red( + `✋ Your changes have not been pushed.`, + )}`; - return Promise.reject(error); - } + return Promise.reject(error); + } - return Promise.resolve(); - }); - }), - }, - ], - concurrent: true, + return Promise.resolve(); + }); + }), + }, + ], + concurrent: true, }; + +module.exports = task; diff --git a/tools/__tasks__/validate/check-for-disallowed-strings.js b/tools/__tasks__/validate/check-for-disallowed-strings.js index 4e972c4133f9..e53ec733ef05 100644 --- a/tools/__tasks__/validate/check-for-disallowed-strings.js +++ b/tools/__tasks__/validate/check-for-disallowed-strings.js @@ -2,54 +2,54 @@ const execa = require('execa'); const chalk = require('chalk'); const disallowedStrings = require('../../../disallowed-strings.js'); -module.exports = { - description: 'Check for disallowed strings', - task: () => - Promise.all( - disallowedStrings.map( - ({ regex, message, maxOccurrences, pathspecs }) => - execa - .stdout('git', [ - 'grep', - '-Ein', - '--color', - regex.source, - ...pathspecs, - ]) - .then(matches => matches.split('\n')) - .then(matches => { - if (matches.length > maxOccurrences) { - const msg = [ - chalk.red( - `More than ${maxOccurrences} match for regex ${ - regex.source - }` - ), - chalk.red(message), - ...matches, - ].join('\n'); +const task = { + description: 'Check for disallowed strings', + task: () => + Promise.all( + disallowedStrings.map( + ({ regex, message, maxOccurrences, pathspecs }) => + execa + .stdout('git', [ + 'grep', + '-Ein', + '--color', + regex.source, + ...pathspecs, + ]) + .then((matches) => matches.split('\n')) + .then((matches) => { + if (matches.length > maxOccurrences) { + const msg = [ + chalk.red( + `More than ${maxOccurrences} match for regex ${regex.source}`, + ), + chalk.red(message), + ...matches, + ].join('\n'); - const err = new Error(); - err.stdout = msg; - throw err; - } - }) - .catch(err => { - // git grep returns with error code 1 when there are no matches. - // For us, this is not actually an error state so we swallow the - // error by returning a fake resolved Promise. - if ( - err.code === 1 && - err.stdout === '' && - err.stderr === '' - ) { - return Promise.resolve(); - } + const err = new Error(); + err.stdout = msg; + throw err; + } + }) + .catch((err) => { + // git grep returns with error code 1 when there are no matches. + // For us, this is not actually an error state so we swallow the + // error by returning a fake resolved Promise. + if ( + err.code === 1 && + err.stdout === '' && + err.stderr === '' + ) { + return Promise.resolve(); + } - // In all other cases, assume it's a real error - return Promise.reject(err); - }) - ) - ), - concurrent: true, + // In all other cases, assume it's a real error + return Promise.reject(err); + }), + ), + ), + concurrent: true, }; + +module.exports = task; diff --git a/tools/__tasks__/validate/index.js b/tools/__tasks__/validate/index.js index 818e2c3cbd55..9bcaf89ee4f5 100644 --- a/tools/__tasks__/validate/index.js +++ b/tools/__tasks__/validate/index.js @@ -1,10 +1,13 @@ -module.exports = { - description: 'Lint assets', - task: [ - require('./javascript'), - require('./typescript'), - require('./sass'), - require('./check-for-disallowed-strings'), - ], - concurrent: true, +const task = { + description: 'Lint assets', + task: [ + // prettier: multi-line + require('./javascript'), + require('./typescript'), + require('./sass'), + require('./check-for-disallowed-strings'), + ], + concurrent: true, }; + +module.exports = task; diff --git a/tools/__tasks__/validate/javascript-fix.js b/tools/__tasks__/validate/javascript-fix.js index 3dcd93f6f099..5159616aafc7 100644 --- a/tools/__tasks__/validate/javascript-fix.js +++ b/tools/__tasks__/validate/javascript-fix.js @@ -2,33 +2,38 @@ const execa = require('execa'); const config = ['--quiet', '--color', '--fix']; -const handleSuccess = ctx => { - ctx.messages.push("Don't forget to commit any fixes..."); +const handleSuccess = (ctx) => { + ctx.messages.push("Don't forget to commit any fixes..."); }; -module.exports = { - description: 'Fix JS linting errors', - task: [ - { - description: 'Fix static/src', - task: ctx => - execa('eslint', ['static/src/javascripts', '--ext=ts,tsx,js'].concat(config)).then( - handleSuccess.bind(null, ctx) - ), - }, - { - description: 'Fix everything else', - task: ctx => - execa( - 'eslint', - [ - '*.js', - 'tools/**/*.js', - 'dev/**/*.js', - 'git-hooks/*', - ].concat(config) - ).then(handleSuccess.bind(null, ctx)), - }, - ], - concurrent: true, +const task = { + description: 'Fix JS linting errors', + task: [ + { + description: 'Fix static/src', + task: (ctx) => + execa( + 'eslint', + ['static/src/javascripts', '--ext=ts,tsx,js'].concat( + config, + ), + ).then(handleSuccess.bind(null, ctx)), + }, + { + description: 'Fix everything else', + task: (ctx) => + execa( + 'eslint', + [ + '*.js', + 'tools/**/*.js', + 'dev/**/*.js', + 'git-hooks/*', + ].concat(config), + ).then(handleSuccess.bind(null, ctx)), + }, + ], + concurrent: true, }; + +module.exports = task; diff --git a/tools/__tasks__/validate/javascript.js b/tools/__tasks__/validate/javascript.js index 8bba5a79b655..0b37dc6ac419 100644 --- a/tools/__tasks__/validate/javascript.js +++ b/tools/__tasks__/validate/javascript.js @@ -1,35 +1,37 @@ const chalk = require('chalk'); const config = '--quiet --color'; -const error = ctx => { - ctx.messages.push( - `${chalk.blue('make fix')} can correct simple errors automatically.` - ); - ctx.messages.push( - `Your editor may be able to catch eslint errors as you work:\n${chalk.underline( - 'http://eslint.org/docs/user-guide/integrations#editors' - )}` - ); +const error = (ctx) => { + ctx.messages.push( + `${chalk.blue('make fix')} can correct simple errors automatically.`, + ); + ctx.messages.push( + `Your editor may be able to catch eslint errors as you work:\n${chalk.underline( + 'http://eslint.org/docs/user-guide/integrations#editors', + )}`, + ); }; -module.exports = { - description: 'Lint JS', - task: [ - { - description: 'Static', - task: `eslint static/src/javascripts --ext=ts,tsx,js ${config}`, - onError: error, - }, - { - description: 'Tools etc.', - task: `eslint tools ${config}`, - onError: error, - }, - { - description: 'Git hooks', - task: `eslint git-hooks/* ${config}`, - onError: error, - }, - ], - concurrent: true, +const task = { + description: 'Lint JS', + task: [ + { + description: 'Static', + task: `eslint static/src/javascripts --ext=ts,tsx,js ${config}`, + onError: error, + }, + { + description: 'Tools etc.', + task: `eslint tools ${config}`, + onError: error, + }, + { + description: 'Git hooks', + task: `eslint git-hooks/* ${config}`, + onError: error, + }, + ], + concurrent: true, }; + +module.exports = task; diff --git a/tools/__tasks__/validate/sass.js b/tools/__tasks__/validate/sass.js index 18ec9fccbf4e..8fa6262a4242 100644 --- a/tools/__tasks__/validate/sass.js +++ b/tools/__tasks__/validate/sass.js @@ -1,4 +1,6 @@ -module.exports = { - description: 'Lint Sass', - task: 'stylelint **/*.scss', +const task = { + description: 'Lint Sass', + task: 'stylelint **/*.scss', }; + +module.exports = task; diff --git a/tools/__tasks__/validate/scalafmt.js b/tools/__tasks__/validate/scalafmt.js index aea5c893e381..faf5ca543cbe 100644 --- a/tools/__tasks__/validate/scalafmt.js +++ b/tools/__tasks__/validate/scalafmt.js @@ -1,19 +1,21 @@ const chalk = require('chalk'); const config = '--error'; -const error = ctx => { - ctx.messages.push( - `Run ${chalk.blue('./sbt scalafmt')} to format Scala files.` - ); +const error = (ctx) => { + ctx.messages.push( + `Run ${chalk.blue('./sbt scalafmt')} to format Scala files.`, + ); }; -module.exports = { - description: 'scalafmt check', - task: [ - { - description: 'scalafmtCheckAll', - task: `./sbt scalafmtCheckAll ${config}`, - onError: error, - }, - ] -} +const task = { + description: 'scalafmt check', + task: [ + { + description: 'scalafmtCheckAll', + task: `./sbt scalafmtCheckAll ${config}`, + onError: error, + }, + ], +}; + +module.exports = task; diff --git a/tools/__tasks__/validate/typescript.js b/tools/__tasks__/validate/typescript.js index 98443b25ee00..49d254556e09 100644 --- a/tools/__tasks__/validate/typescript.js +++ b/tools/__tasks__/validate/typescript.js @@ -1,24 +1,26 @@ const chalk = require('chalk'); -const error = ctx => { - ctx.messages.push( - `${chalk.blue('make fix')} can correct simple errors automatically.` - ); - ctx.messages.push( - `Your editor may be able to catch eslint errors as you work:\n${chalk.underline( - 'http://eslint.org/docs/user-guide/integrations#editors' - )}` - ); +const error = (ctx) => { + ctx.messages.push( + `${chalk.blue('make fix')} can correct simple errors automatically.`, + ); + ctx.messages.push( + `Your editor may be able to catch eslint errors as you work:\n${chalk.underline( + 'http://eslint.org/docs/user-guide/integrations#editors', + )}`, + ); }; -module.exports = { - description: 'Compile TS', - task: [ - { - description: 'Compile', - task: `tsc --noEmit`, - onError: error, - }, - ], - concurrent: true, +const task = { + description: 'Compile TS', + task: [ + { + description: 'Compile', + task: `tsc --noEmit`, + onError: error, + }, + ], + concurrent: true, }; + +module.exports = task; diff --git a/tools/asset-monitor/cloudwatch.js b/tools/asset-monitor/cloudwatch.js index e2ace02fb0c5..8a0ef756eb23 100644 --- a/tools/asset-monitor/cloudwatch.js +++ b/tools/asset-monitor/cloudwatch.js @@ -4,119 +4,122 @@ const { CloudWatch } = require('@aws-sdk/client-cloudwatch'); let cloudwatch; module.exports.getProperty = (property, file) => - file - .toString() - .split('\n') - .filter(line => line.search(property) !== -1)[0] - .split('=')[1]; + file + .toString() + .split('\n') + .filter((line) => line.search(property) !== -1)[0] + .split('=')[1]; -module.exports.configure = filename => - new Promise((resolve, reject) => { - fs.readFile(filename, { encoding: 'utf-8' }, (err, data) => { - if (err) { - return reject( - new Error('Failed to read AWS credentials from file') - ); - } +module.exports.configure = (filename) => + new Promise((resolve, reject) => { + fs.readFile(filename, { encoding: 'utf-8' }, (err, data) => { + if (err) { + return reject( + new Error('Failed to read AWS credentials from file'), + ); + } - try { - cloudwatch = new CloudWatch({ - region: 'eu-west-1', - credentials: { - accessKeyId: module.exports.getProperty('aws.access.key', data), - secretAccessKey: module.exports.getProperty( - 'aws.access.secret.key', - data - ), - }, - }); - return resolve({}); - } catch (e) { - return reject(e); - } - }); - }); + try { + cloudwatch = new CloudWatch({ + region: 'eu-west-1', + credentials: { + accessKeyId: module.exports.getProperty( + 'aws.access.key', + data, + ), + secretAccessKey: module.exports.getProperty( + 'aws.access.secret.key', + data, + ), + }, + }); + return resolve({}); + } catch (e) { + return reject(e); + } + }); + }); module.exports.log = (metricName, metricData) => - new Promise((resolve, reject) => { - const params = { - Namespace: 'Assets', - MetricData: [ - { - MetricName: metricName, - Value: metricData.uncompressed, - Unit: 'Kilobytes', - Dimensions: [ - { - Name: 'Compression', - Value: 'None', - }, - ], - }, - { - MetricName: metricName, - Value: metricData.compressed, - Unit: 'Kilobytes', - Dimensions: [ - { - Name: 'Compression', - Value: 'GZip', - }, - ], - }, - ], - }; - if (metricData.rules) { - params.MetricData.push({ - MetricName: metricName, - Value: metricData.rules, - Unit: 'Count', - Dimensions: [ - { - Name: 'Metric', - Value: 'Rules', - }, - ], - }); - } + new Promise((resolve, reject) => { + const params = { + Namespace: 'Assets', + MetricData: [ + { + MetricName: metricName, + Value: metricData.uncompressed, + Unit: 'Kilobytes', + Dimensions: [ + { + Name: 'Compression', + Value: 'None', + }, + ], + }, + { + MetricName: metricName, + Value: metricData.compressed, + Unit: 'Kilobytes', + Dimensions: [ + { + Name: 'Compression', + Value: 'GZip', + }, + ], + }, + ], + }; + if (metricData.rules) { + params.MetricData.push({ + MetricName: metricName, + Value: metricData.rules, + Unit: 'Count', + Dimensions: [ + { + Name: 'Metric', + Value: 'Rules', + }, + ], + }); + } - if (metricData.totalSelectors) { - params.MetricData.push({ - MetricName: metricName, - Value: metricData.totalSelectors, - Unit: 'Count', - Dimensions: [ - { - Name: 'Metric', - Value: 'Total Selectors', - }, - ], - }); - } + if (metricData.totalSelectors) { + params.MetricData.push({ + MetricName: metricName, + Value: metricData.totalSelectors, + Unit: 'Count', + Dimensions: [ + { + Name: 'Metric', + Value: 'Total Selectors', + }, + ], + }); + } - if (metricData.averageSelectors) { - params.MetricData.push({ - MetricName: metricName, - Value: metricData.averageSelectors, - Unit: 'Count', - Dimensions: [ - { - Name: 'Metric', - Value: 'Average Selectors', - }, - ], - }); - } + if (metricData.averageSelectors) { + params.MetricData.push({ + MetricName: metricName, + Value: metricData.averageSelectors, + Unit: 'Count', + Dimensions: [ + { + Name: 'Metric', + Value: 'Average Selectors', + }, + ], + }); + } - cloudwatch.putMetricData(params, (err, data) => { - if (err) { - return reject( - new Error(`Failed to log metrics to cloudwatch: ${err}`) - ); - } - return resolve({ - file: metricName, - id: data.ResponseMetadata.RequestId, - }); - }); - }); + cloudwatch.putMetricData(params, (err, data) => { + if (err) { + return reject( + new Error(`Failed to log metrics to cloudwatch: ${err}`), + ); + } + return resolve({ + file: metricName, + id: data.ResponseMetadata.RequestId, + }); + }); + }); diff --git a/tools/asset-monitor/index.js b/tools/asset-monitor/index.js index 2957cfb693cf..33542a4c76f2 100644 --- a/tools/asset-monitor/index.js +++ b/tools/asset-monitor/index.js @@ -14,69 +14,69 @@ const { target } = require('../__tasks__/config').paths; const credentials = '/etc/gu/frontend.properties'; const files = [].concat( - glob.sync(`${target}/javascripts/**/*.js`, { - ignore: '**/{components,vendor}/**', - nodir: true, - }), - glob.sync(`${target}/stylesheets/**/*`, { - ignore: '**/*head.identity.css', - nodir: true, - }) + glob.sync(`${target}/javascripts/**/*.js`, { + ignore: '**/{components,vendor}/**', + nodir: true, + }), + glob.sync(`${target}/stylesheets/**/*`, { + ignore: '**/*head.identity.css', + nodir: true, + }), ); const size = (filePath, fileData) => { - const unZipped = fs.statSync(filePath).size; - const zipped = gzipSize.sync(fileData); - return { - uncompressed: Number((unZipped / 1024).toFixed(1)), - uncompressedPretty: pretty(unZipped), - compressed: Number((zipped / 1024).toFixed(1)), - compressedPretty: pretty(zipped), - }; + const unZipped = fs.statSync(filePath).size; + const zipped = gzipSize.sync(fileData); + return { + uncompressed: Number((unZipped / 1024).toFixed(1)), + uncompressedPretty: pretty(unZipped), + compressed: Number((zipped / 1024).toFixed(1)), + compressedPretty: pretty(zipped), + }; }; const css = (filePath, fileData) => { - if (!filePath.match(/.css$/)) return {}; - const { - rules: { total: rules }, - selectors: { total: totalSelectors }, - } = cssstats(fileData, { mediaQueries: false }); + if (!filePath.match(/.css$/)) return {}; + const { + rules: { total: rules }, + selectors: { total: totalSelectors }, + } = cssstats(fileData, { mediaQueries: false }); - return { - rules, - totalSelectors, - averageSelectors: +(totalSelectors / rules).toFixed(1), - }; + return { + rules, + totalSelectors, + averageSelectors: +(totalSelectors / rules).toFixed(1), + }; }; -const analyse = filePath => { - console.log(`Analysing ${filePath}`); - try { - const fileData = fs.readFileSync(filePath, 'utf8'); +const analyse = (filePath) => { + console.log(`Analysing ${filePath}`); + try { + const fileData = fs.readFileSync(filePath, 'utf8'); - const gzipData = size(filePath, fileData); - const cssData = css(filePath, fileData); - const data = Object.assign(gzipData, cssData); + const gzipData = size(filePath, fileData); + const cssData = css(filePath, fileData); + const data = Object.assign(gzipData, cssData); - console.log(`Uncompressed: ${chalk.cyan(data.uncompressedPretty)}`); - console.log(`Compressed: ${chalk.cyan(data.compressedPretty)}`); + console.log(`Uncompressed: ${chalk.cyan(data.uncompressedPretty)}`); + console.log(`Compressed: ${chalk.cyan(data.compressedPretty)}`); - return cloudwatch - .configure(credentials) - .then(() => cloudwatch.log(path.basename(filePath), data)) - .then(msg => { - console.log( - chalk.green( - `Successfully logged file data to CloudWatch ${msg.id}` - ) - ); - return true; - }) - .catch(console.log); - } catch (e) { - console.log(e); - return null; - } + return cloudwatch + .configure(credentials) + .then(() => cloudwatch.log(path.basename(filePath), data)) + .then((msg) => { + console.log( + chalk.green( + `Successfully logged file data to CloudWatch ${msg.id}`, + ), + ); + return true; + }) + .catch(console.log); + } catch (e) { + console.log(e); + return null; + } }; files.forEach(analyse); diff --git a/tools/compile-css.js b/tools/compile-css.js index d1fb67bdb7b6..24e4c4cb5d6d 100644 --- a/tools/compile-css.js +++ b/tools/compile-css.js @@ -25,84 +25,86 @@ const { src, target } = require('./__tasks__/config').paths; const sassDir = path.resolve(src, 'stylesheets'); const SASS_SETTINGS = { - outputStyle: 'compressed', - sourceMap: true, - precision: 5, + outputStyle: 'compressed', + sourceMap: true, + precision: 5, }; const BROWSERS_LIST = [ - 'Firefox >= 45', - 'Explorer >= 10', - 'Safari >= 7', - 'Chrome >= 50', - - 'iOS >= 7', - 'Android >= 5', - 'BlackBerry >= 10', - 'ExplorerMobile >= 10', - - '> 2% in US', - '> 2% in AU', - '> 2% in GB', + 'Firefox >= 45', + 'Explorer >= 10', + 'Safari >= 7', + 'Chrome >= 50', + + 'iOS >= 7', + 'Android >= 5', + 'BlackBerry >= 10', + 'ExplorerMobile >= 10', + + '> 2% in US', + '> 2% in AU', + '> 2% in GB', ]; const REMIFICATIONS = { - replace: true, - root_value: 16, - unit_precision: 5, - propList: ['*'], + replace: true, + root_value: 16, + unit_precision: 5, + propList: ['*'], }; -const getFiles = sassGlob => glob.sync(path.resolve(sassDir, sassGlob)); +const getFiles = (sassGlob) => glob.sync(path.resolve(sassDir, sassGlob)); module.exports = ( - sassGlob, - { remify = true, browsers = BROWSERS_LIST } = {} + sassGlob, + { remify = true, browsers = BROWSERS_LIST } = {}, ) => { - if (typeof sassGlob !== 'string') { - return Promise.reject(new Error('No glob provided.')); - } - - return Promise.all( - getFiles(sassGlob).map(filePath => { - const dest = path.resolve( - target, - 'stylesheets', - path.relative(sassDir, filePath).replace('scss', 'css') - ); - const sassOptions = Object.assign( - { - file: filePath, - outFile: dest, - sourceMapContents: true, - includePaths: ['node_modules'], - }, - SASS_SETTINGS - ); - - const postcssPlugins = [autoprefixer({ overrideBrowserslist: browsers })]; - if (remify) { - postcssPlugins.push(pxtorem(REMIFICATIONS)); - } - - mkdirp.sync(path.parse(dest).dir); - return sassRenderP(sassOptions) - .then(result => - postcss(postcssPlugins).process(result.css.toString(), { - from: filePath, - to: dest, - map: { - inline: false, - prev: result.map.toString(), - }, - }) - ) - .then(result => - Promise.all([ - writeFileP(dest, result.css.toString()), - writeFileP(`${dest}.map`, result.map.toString()), - ]) - ); - }) - ); + if (typeof sassGlob !== 'string') { + return Promise.reject(new Error('No glob provided.')); + } + + return Promise.all( + getFiles(sassGlob).map((filePath) => { + const dest = path.resolve( + target, + 'stylesheets', + path.relative(sassDir, filePath).replace('scss', 'css'), + ); + const sassOptions = Object.assign( + { + file: filePath, + outFile: dest, + sourceMapContents: true, + includePaths: ['node_modules'], + }, + SASS_SETTINGS, + ); + + const postcssPlugins = [ + autoprefixer({ overrideBrowserslist: browsers }), + ]; + if (remify) { + postcssPlugins.push(pxtorem(REMIFICATIONS)); + } + + mkdirp.sync(path.parse(dest).dir); + return sassRenderP(sassOptions) + .then((result) => + postcss(postcssPlugins).process(result.css.toString(), { + from: filePath, + to: dest, + map: { + inline: false, + prev: result.map.toString(), + }, + }), + ) + .then((result) => + Promise.all([ + writeFileP(dest, result.css.toString()), + writeFileP(`${dest}.map`, result.map.toString()), + ]), + ); + }), + ); }; diff --git a/tools/eslint-plugin-guardian-frontend/README.md b/tools/eslint-plugin-guardian-frontend/README.md index 01a8e1f46953..c13238650afb 100644 --- a/tools/eslint-plugin-guardian-frontend/README.md +++ b/tools/eslint-plugin-guardian-frontend/README.md @@ -9,9 +9,13 @@ Generally, that will be project-specific stuff e.g. preferring the `config` modu They're not that intuitive to write, ask around if you're lost. 1. Add a new test e.g. `__tests__/my-new-rule.js`. - - ask in the [dotcom-platform slack channel](https://theguardian.slack.com/messages/dotcom-platform) if you need some guidance, or see the existing ones + +- ask in the [dotcom-platform slack channel](https://theguardian.slack.com/messages/dotcom-platform) if you need some guidance, or see the existing ones + 2. Confirm it fails by running `npm test` from this directory. - - tests are run with [Jest](https://facebook.github.io/jest/docs/getting-started.html) - - you can run individual tests with `npm test -- ./__tests__/my-new-rule.js` + +- tests are run with [Jest](https://facebook.github.io/jest/docs/getting-started.html) +- you can run individual tests with `npm test -- ./__tests__/my-new-rule.js` + 3. Write your rule till it passes! 4. Since we have to install the package from the local FS with `yarn`, you'll need to bump the version in `package.json` to pick your new rule up. diff --git a/tools/eslint-plugin-guardian-frontend/__tests__/exports-last.js b/tools/eslint-plugin-guardian-frontend/__tests__/exports-last.js index 440068a00aea..0343f6900358 100644 --- a/tools/eslint-plugin-guardian-frontend/__tests__/exports-last.js +++ b/tools/eslint-plugin-guardian-frontend/__tests__/exports-last.js @@ -2,24 +2,24 @@ const { RuleTester } = require('eslint'); const rule = require('../rules/exports-last'); const ruleTester = new RuleTester({ - parser: 'babel-eslint', - parserOptions: { ecmaVersion: 2015, sourceType: 'module' }, + parser: 'babel-eslint', + parserOptions: { ecmaVersion: 2015, sourceType: 'module' }, }); ruleTester.run('exports-last', rule, { - valid: [ - `const foo = 'bar'; const bar = 'baz';`, - `const foo = 'bar'; export {foo};`, - `const foo = 'bar'; export default foo;`, - `const foo = 'bar'; export default foo; export const bar = true;`, - `export type a = { a: string }; const foo = 'bar';`, - ], + valid: [ + `const foo = 'bar'; const bar = 'baz';`, + `const foo = 'bar'; export {foo};`, + `const foo = 'bar'; export default foo;`, + `const foo = 'bar'; export default foo; export const bar = true;`, + `export type a = { a: string }; const foo = 'bar';`, + ], - invalid: [ - `export default 'bar'; const bar = true;`, - `export const foo = 'bar'; const bar = true;`, - ].map(code => ({ - code, - errors: ['Export statements should appear at the end of the file'], - })), + invalid: [ + `export default 'bar'; const bar = true;`, + `export const foo = 'bar'; const bar = true;`, + ].map((code) => ({ + code, + errors: ['Export statements should appear at the end of the file'], + })), }); diff --git a/tools/eslint-plugin-guardian-frontend/__tests__/global-config.js b/tools/eslint-plugin-guardian-frontend/__tests__/global-config.js index e47e2d8f798e..aa0fa94b2d64 100644 --- a/tools/eslint-plugin-guardian-frontend/__tests__/global-config.js +++ b/tools/eslint-plugin-guardian-frontend/__tests__/global-config.js @@ -4,25 +4,25 @@ const rule = require('../rules/global-config'); const ruleTester = new RuleTester({ parserOptions: { ecmaVersion: 2015 } }); ruleTester.run('global-config', rule, { - valid: [ - 'config.page', - 'config', - 'var config = 2', - 'foo.config', - 'foo.config.bar', - 'foo.guardian.config', - ], + valid: [ + 'config.page', + 'config', + 'var config = 2', + 'foo.config', + 'foo.config.bar', + 'foo.guardian.config', + ], - invalid: [ - 'var hello = guardian.config', - 'var hello = window.guardian.config', - 'var hello = loadscript(guardian.config)', - ].map(code => ({ - code, - errors: [ - { - type: 'Identifier', - }, - ], - })), + invalid: [ + 'var hello = guardian.config', + 'var hello = window.guardian.config', + 'var hello = loadscript(guardian.config)', + ].map((code) => ({ + code, + errors: [ + { + type: 'Identifier', + }, + ], + })), }); diff --git a/tools/eslint-plugin-guardian-frontend/__tests__/no-default-export.js b/tools/eslint-plugin-guardian-frontend/__tests__/no-default-export.js index 85bf843497d5..b5159fa3bf2c 100644 --- a/tools/eslint-plugin-guardian-frontend/__tests__/no-default-export.js +++ b/tools/eslint-plugin-guardian-frontend/__tests__/no-default-export.js @@ -2,22 +2,22 @@ const { RuleTester } = require('eslint'); const rule = require('../rules/no-default-export'); const ruleTester = new RuleTester({ - parserOptions: { ecmaVersion: 2015, sourceType: 'module' }, + parserOptions: { ecmaVersion: 2015, sourceType: 'module' }, }); ruleTester.run('no-default-export', rule, { - valid: [ - "export const hi = { hi: 'hi' }", - 'export const hi = "hi"', - 'export { hi }', - ], + valid: [ + "export const hi = { hi: 'hi' }", + 'export const hi = "hi"', + 'export { hi }', + ], - invalid: [ - 'export default "hi"', - "export default { hi: 'hi' }", - 'export default () => {}', - ].map(code => ({ - code, - errors: [{ message: 'Prefer named exports over default export.' }], - })), + invalid: [ + 'export default "hi"', + "export default { hi: 'hi' }", + 'export default () => {}', + ].map((code) => ({ + code, + errors: [{ message: 'Prefer named exports over default export.' }], + })), }); diff --git a/tools/eslint-plugin-guardian-frontend/__tests__/no-direct-access-config.js b/tools/eslint-plugin-guardian-frontend/__tests__/no-direct-access-config.js index 017cdc0f2bc1..35ceb906a5a1 100644 --- a/tools/eslint-plugin-guardian-frontend/__tests__/no-direct-access-config.js +++ b/tools/eslint-plugin-guardian-frontend/__tests__/no-direct-access-config.js @@ -4,14 +4,14 @@ const rule = require('../rules/no-direct-access-config'); const ruleTester = new RuleTester({ parserOptions: { ecmaVersion: 2015 } }); ruleTester.run('no-direct-access-config', rule, { - valid: ['config', 'config.get()'], + valid: ['config', 'config.get()'], - invalid: ['config.page', 'config.page.keywordIds'].map(code => ({ - code, - errors: [ - { - type: 'Identifier', - }, - ], - })), + invalid: ['config.page', 'config.page.keywordIds'].map((code) => ({ + code, + errors: [ + { + type: 'Identifier', + }, + ], + })), }); diff --git a/tools/eslint-plugin-guardian-frontend/__tests__/no-multiple-classlist-parameter.js b/tools/eslint-plugin-guardian-frontend/__tests__/no-multiple-classlist-parameter.js index 813ef036b21c..31a676bbf583 100644 --- a/tools/eslint-plugin-guardian-frontend/__tests__/no-multiple-classlist-parameter.js +++ b/tools/eslint-plugin-guardian-frontend/__tests__/no-multiple-classlist-parameter.js @@ -4,20 +4,20 @@ const rule = require('../rules/no-multiple-classlist-parameters'); const ruleTester = new RuleTester({ parserOptions: { ecmaVersion: 2015 } }); ruleTester.run('no-multiple-classlist-parameter', rule, { - valid: [ - 'element.classList.add("class1")', - 'element.classList.remove("class1")', - ], + valid: [ + 'element.classList.add("class1")', + 'element.classList.remove("class1")', + ], - invalid: [ - 'element.classList.add("class1", "class2")', - 'element.classList.add("class1", "class2", "class3")', - ].map(code => ({ - code, - errors: [ - { - type: 'Identifier', - }, - ], - })), + invalid: [ + 'element.classList.add("class1", "class2")', + 'element.classList.add("class1", "class2", "class3")', + ].map((code) => ({ + code, + errors: [ + { + type: 'Identifier', + }, + ], + })), }); diff --git a/tools/eslint-plugin-guardian-frontend/index.js b/tools/eslint-plugin-guardian-frontend/index.js index 2308c8cbc0c0..88a227fa6b8d 100644 --- a/tools/eslint-plugin-guardian-frontend/index.js +++ b/tools/eslint-plugin-guardian-frontend/index.js @@ -1,5 +1,5 @@ // just grab everything out of './rules' const path = require('path'); module.exports.rules = require('requireindex')( - path.resolve(__dirname, 'rules') + path.resolve(__dirname, 'rules'), ); diff --git a/tools/eslint-plugin-guardian-frontend/package.json b/tools/eslint-plugin-guardian-frontend/package.json index 912202d2ab22..2958a56ae167 100644 --- a/tools/eslint-plugin-guardian-frontend/package.json +++ b/tools/eslint-plugin-guardian-frontend/package.json @@ -1,9 +1,9 @@ { - "name": "eslint-plugin-guardian-frontend", - "main": "index.js", - "private": true, - "version": "5.0.0", - "scripts": { - "test": "jest" - } + "name": "eslint-plugin-guardian-frontend", + "main": "index.js", + "private": true, + "version": "5.0.0", + "scripts": { + "test": "jest" + } } diff --git a/tools/eslint-plugin-guardian-frontend/rules/exports-last.js b/tools/eslint-plugin-guardian-frontend/rules/exports-last.js index 5795816d6635..db1a69428d70 100644 --- a/tools/eslint-plugin-guardian-frontend/rules/exports-last.js +++ b/tools/eslint-plugin-guardian-frontend/rules/exports-last.js @@ -1,46 +1,46 @@ // cribbed from https://github.com/k15a/eslint-plugin-import/commit/84fd4f27eb537c9230196d6403aafd406e46e6e9 const isExportStatement = (node, context) => { - if ( - [ - 'ExportAllDeclaration', - 'ExportNamedDeclaration', - 'ExportDefaultDeclaration', - ].includes(node.type) && - // ignore flowtype exports - !['type', 'interface'].includes( - context.getSourceCode().getTokens(node)[1].value - ) - ) { - return true; - } + if ( + [ + 'ExportAllDeclaration', + 'ExportNamedDeclaration', + 'ExportDefaultDeclaration', + ].includes(node.type) && + // ignore flowtype exports + !['type', 'interface'].includes( + context.getSourceCode().getTokens(node)[1].value, + ) + ) { + return true; + } - return false; + return false; }; module.exports = { - create(context) { - return { - Program({ body }) { - const lastNonExportStatement = body.reduce( - (acc, node, index) => - isExportStatement(node, context) ? acc : index, - 0 - ); + create(context) { + return { + Program({ body }) { + const lastNonExportStatement = body.reduce( + (acc, node, index) => + isExportStatement(node, context) ? acc : index, + 0, + ); - body.forEach((node, index) => { - if ( - isExportStatement(node, context) && - index < lastNonExportStatement - ) { - context.report({ - node, - message: - 'Export statements should appear at the end of the file', - }); - } - }); - }, - }; - }, + body.forEach((node, index) => { + if ( + isExportStatement(node, context) && + index < lastNonExportStatement + ) { + context.report({ + node, + message: + 'Export statements should appear at the end of the file', + }); + } + }); + }, + }; + }, }; diff --git a/tools/eslint-plugin-guardian-frontend/rules/global-config.js b/tools/eslint-plugin-guardian-frontend/rules/global-config.js index 711ce2d0e8e6..53e330a5a7e8 100644 --- a/tools/eslint-plugin-guardian-frontend/rules/global-config.js +++ b/tools/eslint-plugin-guardian-frontend/rules/global-config.js @@ -1,31 +1,31 @@ module.exports = { - create(context) { - const isDot = token => - token && token.type === 'Punctuator' && token.value === '.'; - const isIdentifierOf = (prop, token) => - token && token.type === 'Identifier' && token.value === prop; + create(context) { + const isDot = (token) => + token && token.type === 'Punctuator' && token.value === '.'; + const isIdentifierOf = (prop, token) => + token && token.type === 'Identifier' && token.value === prop; - return { - Identifier: node => { - if (node.name === 'config') { - const [dot1, parent, dot2, grandparent] = context - .getSourceCode() - .getTokensBefore(node, 4) - .reverse(); + return { + Identifier: (node) => { + if (node.name === 'config') { + const [dot1, parent, dot2, grandparent] = context + .getSourceCode() + .getTokensBefore(node, 4) + .reverse(); - if ( - isDot(dot1) && - isIdentifierOf('guardian', parent) && - (!isDot(dot2) || isIdentifierOf('window', grandparent)) - ) { - context.report({ - node, - message: - "use the 'config' module instead of window.guardian.config", - }); - } - } - }, - }; - }, + if ( + isDot(dot1) && + isIdentifierOf('guardian', parent) && + (!isDot(dot2) || isIdentifierOf('window', grandparent)) + ) { + context.report({ + node, + message: + "use the 'config' module instead of window.guardian.config", + }); + } + } + }, + }; + }, }; diff --git a/tools/eslint-plugin-guardian-frontend/rules/no-default-export.js b/tools/eslint-plugin-guardian-frontend/rules/no-default-export.js index 7b4ccecb797a..69cf16653cf2 100644 --- a/tools/eslint-plugin-guardian-frontend/rules/no-default-export.js +++ b/tools/eslint-plugin-guardian-frontend/rules/no-default-export.js @@ -1,19 +1,19 @@ const message = 'Prefer named exports over default export.'; module.exports = { - create: context => ({ - ExportDefaultSpecifier(node) { - context.report({ - node: node.exported, - message, - }); - }, + create: (context) => ({ + ExportDefaultSpecifier(node) { + context.report({ + node: node.exported, + message, + }); + }, - ExportDefaultDeclaration(node) { - context.report({ - loc: node.loc, - message, - }); - }, - }), + ExportDefaultDeclaration(node) { + context.report({ + loc: node.loc, + message, + }); + }, + }), }; diff --git a/tools/eslint-plugin-guardian-frontend/rules/no-direct-access-config.js b/tools/eslint-plugin-guardian-frontend/rules/no-direct-access-config.js index 450c3dabb939..f1e2b1eeea58 100644 --- a/tools/eslint-plugin-guardian-frontend/rules/no-direct-access-config.js +++ b/tools/eslint-plugin-guardian-frontend/rules/no-direct-access-config.js @@ -1,39 +1,39 @@ module.exports = { - create(context) { - const configInterface = [ - 'get', - 'set', - 'hasTone', - 'hasSeries', - 'referencesOfType', - 'referenceOfType', - 'webPublicationDateAsUrlPart', - 'dateFromSlug', - ]; - const isDot = token => - token && token.type === 'Punctuator' && token.value === '.'; - const isIdentifier = token => token && token.type === 'Identifier'; + create(context) { + const configInterface = [ + 'get', + 'set', + 'hasTone', + 'hasSeries', + 'referencesOfType', + 'referenceOfType', + 'webPublicationDateAsUrlPart', + 'dateFromSlug', + ]; + const isDot = (token) => + token && token.type === 'Punctuator' && token.value === '.'; + const isIdentifier = (token) => token && token.type === 'Identifier'; - return { - Identifier: node => { - if (node.name === 'config') { - const [dot, child] = context - .getSourceCode() - .getTokensAfter(node, 2); + return { + Identifier: (node) => { + if (node.name === 'config') { + const [dot, child] = context + .getSourceCode() + .getTokensAfter(node, 2); - if ( - isDot(dot) && - isIdentifier(child) && - !configInterface.includes(child.value) - ) { - context.report({ - node, - message: - 'Prefer accessing properties on config using get() method', - }); - } - } - }, - }; - }, + if ( + isDot(dot) && + isIdentifier(child) && + !configInterface.includes(child.value) + ) { + context.report({ + node, + message: + 'Prefer accessing properties on config using get() method', + }); + } + } + }, + }; + }, }; diff --git a/tools/eslint-plugin-guardian-frontend/rules/no-multiple-classlist-parameters.js b/tools/eslint-plugin-guardian-frontend/rules/no-multiple-classlist-parameters.js index 8c84640940db..7c296a13d42f 100644 --- a/tools/eslint-plugin-guardian-frontend/rules/no-multiple-classlist-parameters.js +++ b/tools/eslint-plugin-guardian-frontend/rules/no-multiple-classlist-parameters.js @@ -1,38 +1,36 @@ module.exports = { - create(context) { - const isDot = token => - token && token.type === 'Punctuator' && token.value === '.'; - const isComma = token => - token && token.type === 'Punctuator' && token.value === ','; - const isIdentifierOf = (prop, token) => - token && token.type === 'Identifier' && token.value === prop; + create(context) { + const isDot = (token) => + token && token.type === 'Punctuator' && token.value === '.'; + const isComma = (token) => + token && token.type === 'Punctuator' && token.value === ','; + const isIdentifierOf = (prop, token) => + token && token.type === 'Identifier' && token.value === prop; - return { - Identifier: node => { - if (node.name === 'add') { - const [dot, parent] = context - .getSourceCode() - .getTokensBefore(node, 2) - .reverse(); - const [ - , - , - maybeComma, - ] = context.getSourceCode().getTokensAfter(node, 3); + return { + Identifier: (node) => { + if (node.name === 'add') { + const [dot, parent] = context + .getSourceCode() + .getTokensBefore(node, 2) + .reverse(); + const [, , maybeComma] = context + .getSourceCode() + .getTokensAfter(node, 3); - if ( - isDot(dot) && - isIdentifierOf('classList', parent) && - isComma(maybeComma) - ) { - context.report({ - node, - message: `Only one class name can be passed to classList.add + if ( + isDot(dot) && + isIdentifierOf('classList', parent) && + isComma(maybeComma) + ) { + context.report({ + node, + message: `Only one class name can be passed to classList.add See: https://github.com/Financial-Times/polyfill-service/issues/268`, - }); - } - } - }, - }; - }, + }); + } + } + }, + }; + }, }; diff --git a/tools/sync-githooks.js b/tools/sync-githooks.js index ef8d49874044..47ac71dad46e 100755 --- a/tools/sync-githooks.js +++ b/tools/sync-githooks.js @@ -9,9 +9,9 @@ const target = path.resolve(__dirname, '..', '.git', 'hooks'); // always try and remove any old ones try { - rimraf.sync(target); + rimraf.sync(target); } catch (e) { - /* do nothing */ + /* do nothing */ } // TC doesn't want them, but everyone else does diff --git a/tools/task-runner/README.md b/tools/task-runner/README.md index 28a5eea44794..009abd1e7c9e 100644 --- a/tools/task-runner/README.md +++ b/tools/task-runner/README.md @@ -21,8 +21,9 @@ You can pass a `--dev` flag to prefer a dev version, if it exists (suffix the ta ``` ./tools/task-runner/runner fakemodule/fakemodule --dev ``` -- runs `tools/__tasks__/fakemodule/fakemodule.dev.js` if it exists -- reverts to `tools/__tasks__/fakemodule/fakemodule.js` if the above fails + +- runs `tools/__tasks__/fakemodule/fakemodule.dev.js` if it exists +- reverts to `tools/__tasks__/fakemodule/fakemodule.js` if the above fails ### Modes @@ -39,7 +40,6 @@ Task definitions are standard node modules that export a task object. As a minum 1. `description` 2. `task` - ### `description#String` Describes the task! @@ -50,23 +50,22 @@ Strings are treated as a standard terminal command: ```js module.exports = { - description: "Print 'hello'", - task: "echo 'hello'" -} + description: "Print 'hello'", + task: "echo 'hello'", +}; ``` They are run with [execa](https://github.com/sindresorhus/execa), which checks for locally installed binaries (`./node_modules`) before global ones, just as with npm scripts. ### `task#Function` - - ```js module.exports = { description: "run a JS function", task: () => {...} } ``` + The ['Task' section of the listr docs](https://github.com/SamVerschueren/listr#task) covers this best (since they map directly onto them). ### `task#Array` @@ -75,32 +74,32 @@ Tasks can also be an array of subtasks (which must also be valid tasks): ```js module.exports = { - description: "my task", - task: [ - { - description: 'a subtask', - task: "command to run" - }, - { - description: 'another subtask', - task: () => { - // do something... - } - } - ] -} + description: 'my task', + task: [ + { + description: 'a subtask', + task: 'command to run', + }, + { + description: 'another subtask', + task: () => { + // do something... + }, + }, + ], +}; ``` If the task is an array of other tasks, you can also specify that its tasks should run concurrently: ```js module.exports = { - description: "my concurrent task", - task: [ - // tasks... - ], - concurrent: true -} + description: 'my concurrent task', + task: [ + // tasks... + ], + concurrent: true, +}; ``` ### Requiring task modules @@ -109,7 +108,7 @@ Since tasks are just JS objects, you can `require` them in the standard node way ```js module.exports = { - description: "run another task", - task: require('./another-task') -} + description: 'run another task', + task: require('./another-task'), +}; ``` diff --git a/tools/task-runner/run-task-verbose-formater.js b/tools/task-runner/run-task-verbose-formater.js index faff4700703e..bbb8da5923f7 100644 --- a/tools/task-runner/run-task-verbose-formater.js +++ b/tools/task-runner/run-task-verbose-formater.js @@ -2,67 +2,63 @@ const figures = require('figures'); const chalk = require('chalk'); const log = (title, parents, message = '') => { - console.log( - `${chalk.dim( - `${parents.concat(['']).join(` ${figures.arrowRight} `)}${title}` - )} ${message}` - ); + console.log( + `${chalk.dim( + `${parents.concat(['']).join(` ${figures.arrowRight} `)}${title}`, + )} ${message}`, + ); }; const render = (tasks, parents = []) => { - // eslint-disable-next-line no-restricted-syntax - for (const task of tasks) { - task.on('SUBTASKS', event => { - render(task.subtasks, parents.concat([task.title])); - }); - task.on('STATE', event => { - if (task.isPending()) { - log(task.title, parents, chalk.dim('...')); - } - if (task.hasFailed()) { - log(task.title, parents, chalk.red(figures.cross)); - } - if (task.isSkipped()) { - log( - task.title, - parents, - `${chalk.dim(figures.arrowDown)} (${task.output})` - ); - } - if ( - task.isCompleted() && - !task.hasFailed() && - !task.isSkipped() - ) { - log(task.title, parents, chalk.dim.green(figures.tick)); - } - }); - task.on('DATA', event => { - console.log(event.data); - }); - } + // eslint-disable-next-line no-restricted-syntax + for (const task of tasks) { + task.on('SUBTASKS', (event) => { + render(task.subtasks, parents.concat([task.title])); + }); + task.on('STATE', (event) => { + if (task.isPending()) { + log(task.title, parents, chalk.dim('...')); + } + if (task.hasFailed()) { + log(task.title, parents, chalk.red(figures.cross)); + } + if (task.isSkipped()) { + log( + task.title, + parents, + `${chalk.dim(figures.arrowDown)} (${task.output})`, + ); + } + if (task.isCompleted() && !task.hasFailed() && !task.isSkipped()) { + log(task.title, parents, chalk.dim.green(figures.tick)); + } + }); + task.on('DATA', (event) => { + console.log(event.data); + }); + } }; class VerboseRenderer { - constructor(tasks) { - // eslint-disable-next-line no-underscore-dangle - this._tasks = tasks; - } + constructor(tasks) { + // eslint-disable-next-line no-underscore-dangle + this._tasks = tasks; + } - // eslint-disable-next-line class-methods-use-this - get nonTTY() { - return true; - } + // eslint-disable-next-line class-methods-use-this + get nonTTY() { + return true; + } - render() { - // eslint-disable-next-line no-underscore-dangle - render(this._tasks); - } + render() { + // eslint-disable-next-line no-underscore-dangle + render(this._tasks); + } - // eslint-disable-next-line class-methods-use-this - end() { - // do nothing - } + // eslint-disable-next-line class-methods-use-this + end() { + // do nothing + } } module.exports = VerboseRenderer; diff --git a/tools/webpack-progress-reporter.js b/tools/webpack-progress-reporter.js index eb54690a7555..db9330f06f4b 100644 --- a/tools/webpack-progress-reporter.js +++ b/tools/webpack-progress-reporter.js @@ -1,8 +1,10 @@ const ProgressPlugin = require('webpack/lib/ProgressPlugin'); -module.exports = observer => - new ProgressPlugin((progress, msg, ...details) => { - const [a, b] = details; - const state = a && b ? `[${a}, ${b}]` : ''; - return observer.next(`${Math.round(progress * 100)}% ${msg} ${state}`); - }); +const reporter = (observer) => + new ProgressPlugin((progress, msg, ...details) => { + const [a, b] = details; + const state = a && b ? `[${a}, ${b}]` : ''; + return observer.next(`${Math.round(progress * 100)}% ${msg} ${state}`); + }); + +module.exports = reporter; diff --git a/yarn.lock b/yarn.lock index fb0f5f27187a..be3b009f5be3 100644 --- a/yarn.lock +++ b/yarn.lock @@ -8324,8 +8324,8 @@ __metadata: "eslint-plugin-guardian-frontend@file:tools/eslint-plugin-guardian-frontend::locator=%40guardian%2Ffrontend%40workspace%3A.": version: 5.0.0 - resolution: "eslint-plugin-guardian-frontend@file:tools/eslint-plugin-guardian-frontend#tools/eslint-plugin-guardian-frontend::hash=6c4c1e&locator=%40guardian%2Ffrontend%40workspace%3A." - checksum: 10c0/ac97662d8e3d9312e34ff13f2c9708a030afc8028098d0ef7b875a09f8c16107720ff9032810caadbc539cb94babeac304fca7c264e864fb21ed03fdb8609797 + resolution: "eslint-plugin-guardian-frontend@file:tools/eslint-plugin-guardian-frontend#tools/eslint-plugin-guardian-frontend::hash=53ad28&locator=%40guardian%2Ffrontend%40workspace%3A." + checksum: 10c0/436fbc5541e58f42e943544bdd98163d113a3421082ac875e6d3dbb6a5f3a278e80f857042f67a308aaf29c09fbf00bd6f57c430628c4151dad79f01d9e79629 languageName: node linkType: hard