diff --git a/package.json b/package.json index b77a1aff..6588cf0d 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@a11ywatch/core", - "version": "0.5.48", + "version": "0.5.49", "description": "a11ywatch central api", "main": "./server.js", "scripts": { diff --git a/src/core/actions/index.ts b/src/core/actions/index.ts index 1385094f..13a561a2 100644 --- a/src/core/actions/index.ts +++ b/src/core/actions/index.ts @@ -1,3 +1,2 @@ -export { crawlWebsite } from "./accessibility/crawl-group"; export { crawlPage, crawlMultiSite } from "./accessibility/crawl"; export { scanWebsite } from "./accessibility/scan"; diff --git a/src/core/controllers/websites/set/add-website.ts b/src/core/controllers/websites/set/add-website.ts index be21f8c5..557b4fb7 100644 --- a/src/core/controllers/websites/set/add-website.ts +++ b/src/core/controllers/websites/set/add-website.ts @@ -15,6 +15,7 @@ import { getWebsite } from "../find"; import { getUser } from "../../users"; import { watcherCrawl } from "../../../actions/accessibility/watcher_crawl"; import { connect } from "../../../../database"; +import { SUPER_MODE } from "../../../../config/config"; // used on mutations performs a website created following a multi-site scan if enabled export const addWebsite = async ({ @@ -74,8 +75,8 @@ export const addWebsite = async ({ const actionsEnabled = actions && Array.isArray(actions) && actions.length; - const subdomainsEnabled = subdomains && user.role >= 1; - const tldEnabled = tld && user.role >= 2; + const subdomainsEnabled = subdomains && (SUPER_MODE || user.role >= 1); + const tldEnabled = tld && (SUPER_MODE || user.role >= 2); const website = makeWebsite({ userId, diff --git a/src/queues/crawl/handle.ts b/src/queues/crawl/handle.ts index 07464b08..667859bb 100644 --- a/src/queues/crawl/handle.ts +++ b/src/queues/crawl/handle.ts @@ -1,9 +1,10 @@ import type { queueAsPromised } from "fastq"; import fastq from "fastq"; import { cpus } from "os"; -import { crawlWebsite } from "../../core/actions"; +import { crawlWebsite } from "../../core/actions/accessibility/crawl-group"; import { setWebsiteScore } from "../../core/utils/stats/score"; import type { Method } from "../../database/config"; +import type { ResponseModel } from "../../core/models/response/types"; interface Meta { method?: Method; @@ -16,6 +17,11 @@ type Task = { meta?: Meta; }; +// the async worker to use for crawling pages +async function asyncWorker(arg: Task): Promise { + return await crawlWebsite(arg); +} + // the async worker to use for completed crawl actions. TODO: remove for collection appending raw value to score. async function asyncWorkerCrawlComplete(arg: Task): Promise { const { userId, meta } = arg; @@ -36,11 +42,11 @@ if ( ) { cwLimit = Number(process.env.CRAWL_QUEUE_LIMIT); } else { - cwLimit = Math.max(10 * (cpus().length || 1), 4); + cwLimit = Math.max(8 * (cpus().length || 1), 4); } // crawl queue handler -export const q: queueAsPromised = fastq.promise(crawlWebsite, cwLimit); +export const q: queueAsPromised = fastq.promise(asyncWorker, cwLimit); // determine when crawl completed. export const qWebsiteWorker: queueAsPromised = fastq.promise(