diff --git a/package.json b/package.json index 3c195a81..686294e0 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@a11ywatch/core", - "version": "0.5.44", + "version": "0.5.46", "description": "a11ywatch central api", "main": "./server.js", "scripts": { diff --git a/src/core/controllers/websites/set/add-website.ts b/src/core/controllers/websites/set/add-website.ts index a8dc35ae..be21f8c5 100644 --- a/src/core/controllers/websites/set/add-website.ts +++ b/src/core/controllers/websites/set/add-website.ts @@ -130,10 +130,10 @@ export const addWebsite = async ({ await watcherCrawl({ url: url, userId, - scan: true, robots, subdomains: subdomainsEnabled, tld: tldEnabled, + scan: true, }); } }); diff --git a/src/core/mutations.ts b/src/core/mutations.ts index 4807b5db..4a096f5d 100644 --- a/src/core/mutations.ts +++ b/src/core/mutations.ts @@ -56,9 +56,9 @@ export const Mutation = { await watcherCrawl({ url: url, userId: keyid, - scan: true, subdomains: website?.subdomains, tld: website?.tld, + scan: true, }); }); return { diff --git a/src/core/utils/crawl-stream-slim.ts b/src/core/utils/crawl-stream-slim.ts index ec16daae..a8a9ed90 100644 --- a/src/core/utils/crawl-stream-slim.ts +++ b/src/core/utils/crawl-stream-slim.ts @@ -19,10 +19,10 @@ export const crawlHttpStreamSlim = ( setImmediate(async () => { await watcherCrawl({ url, - scan: true, userId, subdomains: !!subdomains, tld: !!tld, + scan: true, }); }); diff --git a/src/core/utils/crawl-stream.ts b/src/core/utils/crawl-stream.ts index efea86cb..c33175ad 100644 --- a/src/core/utils/crawl-stream.ts +++ b/src/core/utils/crawl-stream.ts @@ -23,10 +23,10 @@ export const crawlHttpStream = ( setImmediate(async () => { await watcherCrawl({ url, - scan: true, userId, subdomains: !!subdomains, tld: !!tld, + scan: true, }); }); @@ -54,7 +54,7 @@ export const crawlHttpStream = ( crawlTrackingEmitter.once( `crawl-complete-${getKey(domain, undefined, userId)}`, () => { - // send extra item for trailing comma handler + // send extra item for trailing comma handler non rpc if (client && client.includes("a11ywatch_cli/")) { res.raw.write( `${JSON.stringify({ diff --git a/src/proto/calls/core-crawl.ts b/src/proto/calls/core-crawl.ts index c028c5ca..b7ad7a3e 100644 --- a/src/proto/calls/core-crawl.ts +++ b/src/proto/calls/core-crawl.ts @@ -41,10 +41,10 @@ export const crawlStreaming = ( setImmediate(async () => { await watcherCrawl({ url, - scan: true, userId, subdomains: !!subdomains, tld: !!tld, + scan: true, }); }); diff --git a/src/proto/calls/scan-end.ts b/src/proto/calls/scan-end.ts index 7276831e..e9fb95e7 100644 --- a/src/proto/calls/scan-end.ts +++ b/src/proto/calls/scan-end.ts @@ -8,7 +8,6 @@ export const scanEnd = async ( callback: sendUnaryData ) => { await crawlTrackerComplete(call.request); // TODO: remove - fully handled via events - crawlTrackingEmitter.emit("crawl-complete", call.request); callback(null, {}); diff --git a/src/queues/crawl/handle.ts b/src/queues/crawl/handle.ts index 5258f712..f85583b9 100644 --- a/src/queues/crawl/handle.ts +++ b/src/queues/crawl/handle.ts @@ -42,7 +42,7 @@ if ( ) { cwLimit = Number(process.env.CRAWL_QUEUE_LIMIT); } else { - cwLimit = Math.max(3 * (cpus().length || 1), 4); + cwLimit = Math.max(4 * (cpus().length || 1), 4); } // crawl queue handler