Skip to content

Commit

Permalink
Rename successUrlsThreshold to more accurate maxPages
Browse files Browse the repository at this point in the history
  • Loading branch information
haqadn committed Jun 16, 2023
1 parent 7b1008d commit 66dd5aa
Showing 1 changed file with 20 additions and 22 deletions.
42 changes: 20 additions & 22 deletions src/generate-critical-css.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,15 @@ const noop = () => {
*
* Errors that occur during this process are collated, but not thrown yet.
*
* @param {BrowserInterface} browserInterface - interface to access pages
* @param {string[]} urls - list of URLs to scan for CSS files
* @param {number} successUrlsThreshold - success urls amount threshold
* @param {BrowserInterface} browserInterface - interface to access pages
* @param {string[]} urls - list of URLs to scan for CSS files
* @param {number} maxPages - number of pages to process at most
* @return {Array} - Two member array; CSSFileSet, and an object containing errors that occurred at each URL.
*/
async function collateCssFiles(
browserInterface: BrowserInterface,
urls: string[],
successUrlsThreshold: number
maxPages: number
): Promise< [ CSSFileSet, { [ url: string ]: UrlError } ] > {
const cssFiles = new CSSFileSet( browserInterface );
const errors = {};
Expand Down Expand Up @@ -53,9 +53,9 @@ async function collateCssFiles(
const internalStyles = await browserInterface.getInternalStyles( url );
await cssFiles.addInternalStyles( url, internalStyles );

// Abort early if we hit the threshold of success urls.
// Abort early if we hit the critical mass
successes++;
if ( successes >= successUrlsThreshold ) {
if ( successes >= maxPages ) {
break;
}
} catch ( err ) {
Expand All @@ -69,13 +69,13 @@ async function collateCssFiles(
/**
* Get CSS selectors for above the fold content for the valid URLs.
*
* @param {Object} param - All the parameters as object.
* @param {BrowserInterface} param.browserInterface - Interface to access pages
* @param {Object} param.selectorPages - All the CSS selectors to URLs map object
* @param {string[]} param.validUrls - List of all the valid URLs
* @param {Array} param.viewports - Browser viewports
* @param {number} param.successUrlsThreshold - Success URLs amount threshold
* @param {Function} param.updateProgress - Update progress callback function
* @param {Object} param - All the parameters as object.
* @param {BrowserInterface} param.browserInterface - Interface to access pages
* @param {Object} param.selectorPages - All the CSS selectors to URLs map object
* @param {string[]} param.validUrls - List of all the valid URLs
* @param {Array} param.viewports - Browser viewports
* @param {number} param.maxPages - Maximum number of pages to process
* @param {Function} param.updateProgress - Update progress callback function
*
* @return {Set<string>} - List of above the fold selectors.
*/
Expand All @@ -84,14 +84,14 @@ async function getAboveFoldSelectors( {
selectorPages,
validUrls,
viewports,
successUrlsThreshold,
maxPages,
updateProgress,
}: {
browserInterface: BrowserInterface;
selectorPages: { [ selector: string ]: Set< string > };
validUrls: string[];
viewports: Viewport[];
successUrlsThreshold: number;
maxPages: number;
updateProgress: () => void;
} ): Promise< Set< string > > {
// For each selector string, create a "trimmed" version with the stuff JavaScript can't handle cut out.
Expand All @@ -105,7 +105,7 @@ async function getAboveFoldSelectors( {
const aboveFoldSelectors = new Set< string >();
const dangerousSelectors = new Set< string >();

for ( const url of validUrls.slice( 0, successUrlsThreshold ) ) {
for ( const url of validUrls.slice( 0, maxPages ) ) {
// Work out which CSS selectors match any element on this page.
const pageSelectors = await browserInterface.runInPage<
ReturnType< typeof BrowserInterface.innerFindMatchingSelectors >
Expand Down Expand Up @@ -143,13 +143,15 @@ export async function generateCriticalCSS( {
viewports,
filters,
successRatio = 1,
maxPages = 10,
}: {
browserInterface: BrowserInterface;
progressCallback?: ( step: number, total: number ) => void;
urls: string[];
viewports: Viewport[];
filters?: FilterSpec;
successRatio?: number;
maxPages?: number;
} ): Promise< [ string, Error[] ] > {
const successUrlsThreshold = Math.ceil( urls.length * successRatio );

Expand All @@ -160,11 +162,7 @@ export async function generateCriticalCSS( {
const updateProgress = () => progressCallback( ++progress, progressSteps );

// Collate all CSS Files used by all valid URLs.
const [ cssFiles, cssFileErrors ] = await collateCssFiles(
browserInterface,
urls,
successUrlsThreshold
);
const [ cssFiles, cssFileErrors ] = await collateCssFiles( browserInterface, urls, maxPages );
updateProgress();

// Verify there are enough valid URLs to carry on with.
Expand All @@ -186,7 +184,7 @@ export async function generateCriticalCSS( {
selectorPages,
validUrls,
viewports,
successUrlsThreshold,
maxPages,
updateProgress,
} );

Expand Down

0 comments on commit 66dd5aa

Please sign in to comment.