Skip to content

Commit

Permalink
feat(events): add async event handling
Browse files Browse the repository at this point in the history
  • Loading branch information
j-mendez committed Sep 5, 2022
1 parent c33fda2 commit 9fe40a8
Show file tree
Hide file tree
Showing 16 changed files with 294 additions and 244 deletions.
6 changes: 3 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ RUN mkdir ./doc && cp -R node_modules/@a11ywatch/protos proto

RUN protoc --doc_out=./doc --doc_opt=html,index.html proto/*.proto

FROM node:18.7.0-alpine AS installer
FROM node:18.8.0-alpine AS installer

WORKDIR /usr/src/app

Expand All @@ -21,7 +21,7 @@ RUN apk upgrade --update-cache --available && \
COPY . .
RUN npm ci

FROM node:18.7.0-alpine AS builder
FROM node:18.8.0-alpine AS builder

WORKDIR /usr/src/app

Expand All @@ -32,7 +32,7 @@ RUN npm run build
RUN rm -R ./node_modules
RUN npm install --production

FROM node:18.7-alpine
FROM node:18.8.0-alpine

WORKDIR /usr/src/app

Expand Down
2 changes: 1 addition & 1 deletion Dockerfile.dev
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ RUN mkdir ./doc && cp -R node_modules/@a11ywatch/protos proto

RUN protoc --doc_out=./doc --doc_opt=html,index.html proto/*.proto

FROM node:18.7.0-alpine
FROM node:18.8.0-alpine

WORKDIR /usr/src/app

Expand Down
18 changes: 9 additions & 9 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@a11ywatch/core",
"version": "0.5.49",
"version": "0.5.54",
"description": "a11ywatch central api",
"main": "./server.js",
"scripts": {
Expand All @@ -18,7 +18,7 @@
"@fastify/cors": "^7.0.0",
"@fastify/rate-limit": "^6.0.1",
"@graphql-tools/schema": "^8.5.1",
"@grpc/grpc-js": "1.6.10",
"@grpc/grpc-js": "1.6.12",
"@grpc/proto-loader": "0.7.2",
"apollo-server": "3.10.1",
"apollo-server-core": "3.10.1",
Expand Down
180 changes: 93 additions & 87 deletions src/core/actions/accessibility/crawl.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,16 +44,17 @@ const trackerProccess = (
{ domain, urlMap, userId, shutdown = false }: any,
blockEvent?: boolean
) => {
if (!blockEvent && data) {
crawlEmitter.emit(`crawl-${domainName(domain)}-${userId || 0}`, data);
}

// determine crawl has been processed top level tracking
crawlTrackingEmitter.emit("crawl-processed", {
user_id: userId,
domain,
pages: [urlMap],
shutdown,
process.nextTick(() => {
if (!blockEvent && data) {
crawlEmitter.emit(`crawl-${domainName(domain)}-${userId || 0}`, data);
}
// determine crawl has been processed top level tracking
crawlTrackingEmitter.emit("crawl-processed", {
user_id: userId,
domain,
pages: [urlMap],
shutdown,
});
});
};

Expand Down Expand Up @@ -233,96 +234,101 @@ export const crawlPage = async (

// if website record exist update integrity of the data.
if (website) {
// if ROOT domain for scan update Website Collection.
if (rootPage) {
const { issuesInfo, ...updateProps } = updateWebsiteProps;

await collectionUpsert(
updateProps,
[websiteCollection, !!updateWebsiteProps],
{
searchProps: { url: pageUrl, userId },
}
);
}
setImmediate(async () => {
// if ROOT domain for scan update Website Collection.
if (rootPage) {
const { issuesInfo, ...updateProps } = updateWebsiteProps;

await collectionUpsert(
updateProps,
[websiteCollection, !!updateWebsiteProps],
{
searchProps: { url: pageUrl, userId },
}
);
}

// if scripts enabled get collection
if (scriptsEnabled) {
[scripts, scriptsCollection] = await ScriptsController().getScript(
{ pageUrl, userId, noRetries: true },
true
);

if (script) {
script.userId = userId;
// TODO: look into auto meta reason
if (!scripts?.scriptMeta) {
script.scriptMeta = {
skipContentEnabled: true,
};
// if scripts enabled get collection
if (scriptsEnabled) {
[scripts, scriptsCollection] = await ScriptsController().getScript(
{ pageUrl, userId, noRetries: true },
true
);

if (script) {
script.userId = userId;
// TODO: look into auto meta reason
if (!scripts?.scriptMeta) {
script.scriptMeta = {
skipContentEnabled: true,
};
}
}
}
}

const shouldUpsertCollections = pageConstainsIssues || issueExist; // if issues exist prior or current update collection

// Add to Issues collection if page contains issues or if record should update/delete.
if (shouldUpsertCollections) {
await collectionUpsert(lighthouseData, [pageSpeedCollection, pageSpeed]); // PageInsights

const { issueMeta, ...analyticsProps } = issuesInfo;
await collectionUpsert(
{
pageUrl,
domain,
userId,
adaScore,
...analyticsProps,
},
[analyticsCollection, analytics]
); // ANALYTICS

await collectionUpsert(
newIssue,
[issuesCollection, issueExist, !pageConstainsIssues],
{
searchProps: { pageUrl, userId },
}
); // ISSUES COLLECTION
}
const shouldUpsertCollections = pageConstainsIssues || issueExist; // if issues exist prior or current update collection

// Add to Issues collection if page contains issues or if record should update/delete.
if (shouldUpsertCollections) {
await collectionUpsert(lighthouseData, [
pageSpeedCollection,
pageSpeed,
]); // PageInsights

const { issueMeta, ...analyticsProps } = issuesInfo;
await collectionUpsert(
{
pageUrl,
domain,
userId,
adaScore,
...analyticsProps,
},
[analyticsCollection, analytics]
); // ANALYTICS

await collectionUpsert(
newIssue,
[issuesCollection, issueExist, !pageConstainsIssues],
{
searchProps: { pageUrl, userId },
}
); // ISSUES COLLECTION
}

// Pages
if ((!newSite && shouldUpsertCollections) || newSite) {
await collectionUpsert(
updateWebsiteProps,
[pagesCollection, newSite, !pageConstainsIssues], // delete collection if issues do not exist
{
searchProps: { url: pageUrl, userId },
}
);
}
// Pages
if ((!newSite && shouldUpsertCollections) || newSite) {
await collectionUpsert(
updateWebsiteProps,
[pagesCollection, newSite, !pageConstainsIssues], // delete collection if issues do not exist
{
searchProps: { url: pageUrl, userId },
}
);
}

// Add script to collection
if (scriptsEnabled) {
await collectionUpsert(script, [scriptsCollection, scripts]);
}
// Add script to collection
if (scriptsEnabled) {
await collectionUpsert(script, [scriptsCollection, scripts]);
}
});
}

// Flatten issues with the array set results without meta.
const responseData = {
data: Object.assign({}, updateWebsiteProps, {
issues: subIssues,
}),
data: updateWebsiteProps,
};
responseData.data.issues = subIssues;

if (pageConstainsIssues) {
if (sendSub) {
try {
await pubsub.publish(ISSUE_ADDED, { issueAdded: newIssue });
} catch (_) {
// silent pub sub errors
}
}
sendSub &&
setImmediate(async () => {
try {
await pubsub.publish(ISSUE_ADDED, { issueAdded: newIssue });
} catch (_) {
// silent pub sub errors
}
});

// send email if issues of type error exist for the page. TODO: remove from layer.
if (sendEmail && issuesInfo?.errorCount) {
Expand Down
7 changes: 4 additions & 3 deletions src/core/controllers/page-actions/find.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,11 @@ import { connect } from "../../../database";

// find page actions by path
export const findPageActionsByPath = async ({ path, userId }) => {
let actions;
try {
const [actionsCollection] = await connect("PageActions");
const [actionsCollection] = await connect("PageActions");

let actions = [];

try {
const action = await actionsCollection.findOne({
path,
userId,
Expand Down
38 changes: 21 additions & 17 deletions src/core/utils/crawl-stream-slim.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,32 +32,36 @@ export const crawlHttpStreamSlim = (
const crawlEvent = `crawl-${domainName(domain)}-${userId || 0}`;

const crawlListener = (source) => {
const data = source?.data;
setImmediate(() => {
const data = source?.data;

// only send when true
if (data) {
// trim data for sending minimally
if (onlyData) {
data.pageLoadTime = null;
data.issues = null;
// only send when true
if (data) {
// trim data for sending minimally
if (onlyData) {
data.pageLoadTime = null;
data.issues = null;
}
res.raw.write(`${JSON.stringify(data)},`);
}
res.raw.write(`${JSON.stringify(data)},`);
}
});
};

crawlEmitter.on(crawlEvent, crawlListener);

const crawlComplete = () => {
crawlTrackingEmitter.off(crawlEvent, crawlListener);
setImmediate(() => {
crawlTrackingEmitter.off(crawlEvent, crawlListener);

if (client && client.includes("a11ywatch_cli/")) {
// send extra item for trailing comma handler
res.raw.write(`${JSON.stringify({ url: "", domain: "" })}`, () => {
if (client && client.includes("a11ywatch_cli/")) {
// send extra item for trailing comma handler
res.raw.write(`${JSON.stringify({ url: "", domain: "" })}`, () => {
resolve(true);
});
} else {
resolve(true);
});
} else {
resolve(true);
}
}
});
};

crawlTrackingEmitter.once(
Expand Down
Loading

0 comments on commit 9fe40a8

Please sign in to comment.