diff --git a/deployment_config/prod_vars.yml b/deployment_config/prod_vars.yml index 665e806d06..14d1d96c84 100644 --- a/deployment_config/prod_vars.yml +++ b/deployment_config/prod_vars.yml @@ -1,6 +1,6 @@ env: prod web_instances: 3 -web_memory: 3GB +web_memory: 4GB worker_instances: 2 worker_memory: 1GB similarity_api_instances: 1 diff --git a/email_templates/changes_requested_by_manager/html.pug b/email_templates/changes_requested_by_manager/html.pug index 2f5b2b8383..c12e4f7a82 100644 --- a/email_templates/changes_requested_by_manager/html.pug +++ b/email_templates/changes_requested_by_manager/html.pug @@ -2,7 +2,7 @@ style include ../email.css p Hello, p -p #{managerName} requested changed to report #{displayId}. +p #{managerName} requested changes to report #{displayId}. if comments p #{managerName} provided the following comments: blockquote !{comments} diff --git a/frontend/src/fetchers/activityReports.js b/frontend/src/fetchers/activityReports.js index 1569eb760f..6f98a9dcd8 100644 --- a/frontend/src/fetchers/activityReports.js +++ b/frontend/src/fetchers/activityReports.js @@ -102,6 +102,12 @@ export const getRecipients = async (region) => { return recipients.json(); }; +export const getRecipientsForExistingAR = async (reportId) => { + const url = join(activityReportUrl, `${reportId}`, 'activity-recipients'); + const recipients = await get(url); + return recipients.json(); +}; + export const getGoals = async (grantIds) => { const params = grantIds.map((grantId) => `grantIds=${grantId}`); const url = join(activityReportUrl, 'goals', `?${params.join('&')}`); diff --git a/frontend/src/pages/ActivityReport/__tests__/index.js b/frontend/src/pages/ActivityReport/__tests__/index.js index 017b0ca7f4..79a2da463d 100644 --- a/frontend/src/pages/ActivityReport/__tests__/index.js +++ b/frontend/src/pages/ActivityReport/__tests__/index.js @@ -42,6 +42,7 @@ describe('ActivityReport', () => { beforeEach(() => { fetchMock.get('/api/activity-reports/activity-recipients?region=1', recipients); + fetchMock.get('/api/activity-reports/1/activity-recipients', recipients); fetchMock.get('/api/activity-reports/groups?region=1', [{ id: 110, name: 'Group 1', @@ -155,6 +156,7 @@ describe('ActivityReport', () => { }; fetchMock.get('/api/activity-reports/activity-recipients?region=1', groupRecipients, { overwriteRoutes: true }); + fetchMock.get('/api/activity-reports/1/activity-recipients', groupRecipients, { overwriteRoutes: true }); const data = formData(); fetchMock.get('/api/activity-reports/1', { ...data, activityRecipients: [] }); @@ -226,6 +228,7 @@ describe('ActivityReport', () => { }; fetchMock.get('/api/activity-reports/activity-recipients?region=1', groupRecipients, { overwriteRoutes: true }); + fetchMock.get('/api/activity-reports/1/activity-recipients', groupRecipients, { overwriteRoutes: true }); const data = formData(); fetchMock.get('/api/activity-reports/1', { ...data, activityRecipients: [] }); @@ -363,6 +366,7 @@ describe('ActivityReport', () => { describe('resetToDraft', () => { it('navigates to the correct page', async () => { + fetchMock.get('/api/activity-reports/3/activity-recipients', recipients); const data = formData(); // load the report fetchMock.get('/api/activity-reports/3', { diff --git a/frontend/src/pages/ActivityReport/__tests__/localStorage.js b/frontend/src/pages/ActivityReport/__tests__/localStorage.js index 206e22e88d..dac60ae551 100644 --- a/frontend/src/pages/ActivityReport/__tests__/localStorage.js +++ b/frontend/src/pages/ActivityReport/__tests__/localStorage.js @@ -37,6 +37,7 @@ describe('Local storage fallbacks', () => { beforeEach(() => { fetchMock.get('/api/activity-reports/activity-recipients?region=1', recipients); + fetchMock.get('/api/activity-reports/1/activity-recipients', recipients); fetchMock.get('/api/activity-reports/groups?region=1', []); fetchMock.get('/api/users/collaborators?region=1', []); fetchMock.get('/api/activity-reports/approvers?region=1', []); diff --git a/frontend/src/pages/ActivityReport/index.js b/frontend/src/pages/ActivityReport/index.js index 41bded708a..323bb0f5d8 100644 --- a/frontend/src/pages/ActivityReport/index.js +++ b/frontend/src/pages/ActivityReport/index.js @@ -34,13 +34,14 @@ import { submitReport, saveReport, getReport, - getRecipients, + getRecipientsForExistingAR, createReport, getCollaborators, getApprovers, reviewReport, resetToDraft, getGroupsForActivityReport, + getRecipients, } from '../../fetchers/activityReports'; import useLocalStorage, { setConnectionActiveWithError } from '../../hooks/useLocalStorage'; import NetworkContext, { isOnlineMode } from '../../NetworkContext'; @@ -277,8 +278,16 @@ function ActivityReport({ }; } + const getRecips = async () => { + if (reportId.current && reportId.current !== 'new') { + return getRecipientsForExistingAR(reportId.current); + } + + return getRecipients(report.regionId); + }; + const apiCalls = [ - getRecipients(report.regionId), + getRecips(), getCollaborators(report.regionId), getApprovers(report.regionId), getGroupsForActivityReport(report.regionId), diff --git a/src/lib/apiErrorHandler.js b/src/lib/apiErrorHandler.js index 745048828c..0568aafb5a 100644 --- a/src/lib/apiErrorHandler.js +++ b/src/lib/apiErrorHandler.js @@ -13,7 +13,6 @@ import { sequelize } from '../models'; * @returns {Promise} - The ID of the stored request error, or null if storing failed. */ async function logRequestError(req, operation, error, logContext) { - // Check if error logging should be suppressed if ( operation !== 'SequelizeError' && process.env.SUPPRESS_ERROR_LOGGING @@ -21,26 +20,19 @@ async function logRequestError(req, operation, error, logContext) { ) { return 0; } + if (!error) { + return 0; + } try { - // Prepare the response body for storage const responseBody = typeof error === 'object' - && error !== null ? { ...error, errorStack: error?.stack } : error; + ? { ...error, errorStack: error?.stack } + : error; - // Prepare the request body for storage const requestBody = { - ...(req.body - && typeof req.body === 'object' - && Object.keys(req.body).length > 0 - && { body: req.body }), - ...(req.params - && typeof req.params === 'object' - && Object.keys(req.params).length > 0 - && { params: req.params }), - ...(req.query - && typeof req.query === 'object' - && Object.keys(req.query).length > 0 - && { query: req.query }), + ...(req.body && typeof req.body === 'object' && Object.keys(req.body).length > 0 && { body: req.body }), + ...(req.params && typeof req.params === 'object' && Object.keys(req.params).length > 0 && { params: req.params }), + ...(req.query && typeof req.query === 'object' && Object.keys(req.query).length > 0 && { query: req.query }), }; // Create a request error in the database and get its ID @@ -69,7 +61,6 @@ async function logRequestError(req, operation, error, logContext) { * @param {Object} logContext - The context for logging. */ export const handleError = async (req, res, error, logContext) => { - // Check if the environment is development if (process.env.NODE_ENV === 'development') { logger.error(error); } @@ -77,7 +68,6 @@ export const handleError = async (req, res, error, logContext) => { let operation; let label; - // Check if the error is an instance of Sequelize.Error if (error instanceof Sequelize.Error) { operation = 'SequelizeError'; label = 'Sequelize error'; @@ -86,27 +76,17 @@ export const handleError = async (req, res, error, logContext) => { label = 'UNEXPECTED ERROR'; } - // eslint-disable-next-line max-len - if (error instanceof Sequelize.ConnectionError || error instanceof Sequelize.ConnectionAcquireTimeoutError) { + if (error instanceof Sequelize.ConnectionError + || error instanceof Sequelize.ConnectionAcquireTimeoutError) { const pool = sequelize?.connectionManager?.pool; - const usedConnections = pool ? pool?.used?.length : null; - const waitingConnections = pool ? pool?.pending?.length : null; + const usedConnections = pool ? pool.used.length : null; + const waitingConnections = pool ? pool.pending.length : null; logger.error(`${logContext.namespace} Connection Pool: Used Connections - ${usedConnections}, Waiting Connections - ${waitingConnections}`); } - - // Log the request error and get the error ID const requestErrorId = await logRequestError(req, operation, error, logContext); - let errorMessage; + const errorMessage = error?.stack || error; - // Check if the error has a stack property - if (error?.stack) { - errorMessage = error.stack; - } else { - errorMessage = error; - } - - // Log the error message with the error ID if available if (requestErrorId) { logger.error(`${logContext.namespace} - id: ${requestErrorId} ${label} - ${errorMessage}`); } else { @@ -117,12 +97,11 @@ export const handleError = async (req, res, error, logContext) => { }; /** - * Handles any unexpected errors in an error handler catch block - * - * @param {*} req - request - * @param {*} res - response - * @param {*} error - error - * @param {*} logContext - useful data for logging + * Handles any unexpected errors in an error handler catch block. + * @param {Object} req - The request object. + * @param {Object} res - The response object. + * @param {Error} error - The error object. + * @param {Object} logContext - The context for logging. */ export function handleUnexpectedErrorInCatchBlock(req, res, error, logContext) { logger.error(`${logContext.namespace} - Unexpected error in catch block - ${error}`); @@ -131,11 +110,10 @@ export function handleUnexpectedErrorInCatchBlock(req, res, error, logContext) { /** * Handles API errors. Saves data in the RequestErrors table and sends 500 error. - * - * @param {*} req - request - * @param {*} res - response - * @param {*} error - error - * @param {*} logContext - useful data for logging + * @param {Object} req - The request object. + * @param {Object} res - The response object. + * @param {Error} error - The error object. + * @param {Object} logContext - The context for logging. */ export default async function handleErrors(req, res, error, logContext) { try { @@ -144,3 +122,116 @@ export default async function handleErrors(req, res, error, logContext) { handleUnexpectedErrorInCatchBlock(req, res, e, logContext); } } + +/** + * Logs a worker error and stores it in the database. + * @param {Object} job - The job object. + * @param {string} operation - The operation name. + * @param {Error} error - The error object. + * @param {Object} logContext - The logging context. + * @returns {Promise} - The ID of the stored request error, or null if storing failed. + */ +const logWorkerError = async (job, operation, error, logContext) => { + if ( + operation !== 'SequelizeError' + && process.env.SUPPRESS_ERROR_LOGGING + && process.env.SUPPRESS_ERROR_LOGGING.toLowerCase() === 'true' + ) { + return 0; + } + if (!error) { + return 0; + } + + try { + const responseBody = typeof error === 'object' + ? { ...error, errorStack: error?.stack } + : error; + + const requestBody = { + ...(job.data && typeof job.data === 'object' && Object.keys(job.data).length > 0 && { data: job.data }), + }; + + const requestErrorId = await createRequestError({ + operation, + uri: job.queue.name, + method: 'PROCESS_JOB', + requestBody, + responseBody, + responseCode: INTERNAL_SERVER_ERROR, + }); + + return requestErrorId; + } catch (e) { + logger.error(`${logContext.namespace} - Sequelize error - unable to store RequestError - ${e}`); + } + + return null; +}; + +/** + * Handles errors in a worker job. + * @param {Object} job - The job object. + * @param {Error} error - The error object. + * @param {Object} logContext - The context for logging. + */ +export const handleWorkerError = async (job, error, logContext) => { + if (process.env.NODE_ENV === 'development') { + logger.error(error); + } + + let operation; + let label; + + if (error instanceof Sequelize.Error) { + operation = 'SequelizeError'; + label = 'Sequelize error'; + } else { + operation = 'UNEXPECTED_ERROR'; + label = 'UNEXPECTED ERROR'; + } + + if (error instanceof Sequelize.ConnectionError + || error instanceof Sequelize.ConnectionAcquireTimeoutError) { + const pool = sequelize?.connectionManager?.pool; + const usedConnections = pool ? pool.used.length : null; + const waitingConnections = pool ? pool.pending.length : null; + logger.error(`${logContext.namespace} Connection Pool: Used Connections - ${usedConnections}, Waiting Connections - ${waitingConnections}`); + } + + const requestErrorId = await logWorkerError(job, operation, error, logContext); + + const errorMessage = error?.stack || error; + + if (requestErrorId) { + logger.error(`${logContext.namespace} - id: ${requestErrorId} ${label} - ${errorMessage}`); + } else { + logger.error(`${logContext.namespace} - ${label} - ${errorMessage}`); + } + + // Handle job failure as needed +}; + +/** + * Handles any unexpected errors in a worker error handler catch block. + * @param {Object} job - The job object. + * @param {Error} error - The error object. + * @param {Object} logContext - The context for logging. + */ +export const handleUnexpectedWorkerError = (job, error, logContext) => { + logger.error(`${logContext.namespace} - Unexpected error in catch block - ${error}`); +}; + +/** + * Handles worker job errors. Logs the error and stores it in the database. + * @param {Object} job - The job object. + * @param {Error} error - The error object. + * @param {Object} logContext - The context for logging. + */ +export const handleWorkerErrors = async (job, error, logContext) => { + try { + await handleWorkerError(job, error, logContext); + } catch (e) { + handleUnexpectedWorkerError(job, e, logContext); + } +}; diff --git a/src/lib/apiErrorHandler.test.js b/src/lib/apiErrorHandler.test.js index 59506f6e7e..fca179fedd 100644 --- a/src/lib/apiErrorHandler.test.js +++ b/src/lib/apiErrorHandler.test.js @@ -1,7 +1,8 @@ import Sequelize from 'sequelize'; import { INTERNAL_SERVER_ERROR } from 'http-codes'; import db, { RequestErrors } from '../models'; -import handleErrors, { handleUnexpectedErrorInCatchBlock } from './apiErrorHandler'; +import handleErrors, { handleUnexpectedErrorInCatchBlock, handleWorkerErrors, handleUnexpectedWorkerError } from './apiErrorHandler'; +import { auditLogger as logger } from '../logger'; const mockUser = { id: 47, @@ -31,16 +32,29 @@ const mockResponse = { })), }; +const mockJob = { + data: { jobDetail: 'example job detail' }, + queue: { name: 'exampleQueue' }, +}; + const mockSequelizeError = new Sequelize.Error('Not all ok here'); const mockLogContext = { namespace: 'TEST', }; +jest.mock('../logger', () => ({ + auditLogger: { + error: jest.fn(), + }, +})); + describe('apiErrorHandler', () => { beforeEach(async () => { await RequestErrors.destroy({ where: {} }); + jest.clearAllMocks(); }); + afterAll(async () => { await RequestErrors.destroy({ where: {} }); await db.sequelize.close(); @@ -54,6 +68,7 @@ describe('apiErrorHandler', () => { const requestErrors = await RequestErrors.findAll(); expect(requestErrors.length).not.toBe(0); + expect(requestErrors[0].operation).toBe('SequelizeError'); }); it('handles a generic error', async () => { @@ -65,9 +80,10 @@ describe('apiErrorHandler', () => { const requestErrors = await RequestErrors.findAll(); expect(requestErrors.length).not.toBe(0); + expect(requestErrors[0].operation).toBe('UNEXPECTED_ERROR'); }); - it('can handle unexpected error in catch block', async () => { + it('handles unexpected error in catch block', async () => { const mockUnexpectedErr = new Error('Unexpected error'); handleUnexpectedErrorInCatchBlock(mockRequest, mockResponse, mockUnexpectedErr, mockLogContext); @@ -77,4 +93,99 @@ describe('apiErrorHandler', () => { expect(requestErrors.length).toBe(0); }); + + it('handles error suppression when SUPPRESS_ERROR_LOGGING is true', async () => { + process.env.SUPPRESS_ERROR_LOGGING = 'true'; + const mockGenericError = new Error('Unknown error'); + await handleErrors(mockRequest, mockResponse, mockGenericError, mockLogContext); + + expect(mockResponse.status).toHaveBeenCalledWith(INTERNAL_SERVER_ERROR); + + const requestErrors = await RequestErrors.findAll(); + + expect(requestErrors.length).toBe(0); + + delete process.env.SUPPRESS_ERROR_LOGGING; + }); + + it('logs connection pool information on connection errors', async () => { + const mockConnectionError = new Sequelize.ConnectionError(new Error('Connection error')); + await handleErrors(mockRequest, mockResponse, mockConnectionError, mockLogContext); + + expect(mockResponse.status).toHaveBeenCalledWith(INTERNAL_SERVER_ERROR); + expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('Connection Pool: Used Connections')); + + const requestErrors = await RequestErrors.findAll(); + + expect(requestErrors.length).not.toBe(0); + expect(requestErrors[0].operation).toBe('SequelizeError'); + }); + + it('handles worker errors', async () => { + const mockWorkerError = new Error('Worker error'); + await handleWorkerErrors(mockJob, mockWorkerError, mockLogContext); + + const requestErrors = await RequestErrors.findAll(); + + expect(requestErrors.length).not.toBe(0); + expect(requestErrors[0].operation).toBe('UNEXPECTED_ERROR'); + }); + + it('handles worker Sequelize errors', async () => { + const mockSequelizeWorkerError = new Sequelize.Error('Sequelize worker error'); + await handleWorkerErrors(mockJob, mockSequelizeWorkerError, mockLogContext); + + const requestErrors = await RequestErrors.findAll(); + + expect(requestErrors.length).not.toBe(0); + expect(requestErrors[0].operation).toBe('SequelizeError'); + }); + + it('handles unexpected worker error in catch block', async () => { + const mockUnexpectedWorkerError = new Error('Unexpected worker error'); + handleUnexpectedWorkerError(mockJob, mockUnexpectedWorkerError, mockLogContext); + + const requestErrors = await RequestErrors.findAll(); + + expect(requestErrors.length).toBe(0); + }); + + it('handles null error', async () => { + await handleErrors(mockRequest, mockResponse, null, mockLogContext); + + expect(mockResponse.status).toHaveBeenCalledWith(INTERNAL_SERVER_ERROR); + + const requestErrors = await RequestErrors.findAll(); + + expect(requestErrors.length).toBe(0); + }); + + it('handles undefined error', async () => { + await handleErrors(mockRequest, mockResponse, undefined, mockLogContext); + + expect(mockResponse.status).toHaveBeenCalledWith(INTERNAL_SERVER_ERROR); + + const requestErrors = await RequestErrors.findAll(); + + expect(requestErrors.length).toBe(0); + }); + + it('handles specific Sequelize connection acquire timeout error', async () => { + const mockConnectionAcquireTimeoutError = new Sequelize + .ConnectionAcquireTimeoutError(new Error('Connection acquire timeout error')); + await handleErrors( + mockRequest, + mockResponse, + mockConnectionAcquireTimeoutError, + mockLogContext, + ); + + expect(mockResponse.status).toHaveBeenCalledWith(INTERNAL_SERVER_ERROR); + expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('Connection Pool: Used Connections')); + + const requestErrors = await RequestErrors.findAll(); + + expect(requestErrors.length).not.toBe(0); + expect(requestErrors[0].operation).toBe('SequelizeError'); + }); }); diff --git a/src/lib/mailer/index.js b/src/lib/mailer/index.js index de4b2091de..412e882969 100644 --- a/src/lib/mailer/index.js +++ b/src/lib/mailer/index.js @@ -1,4 +1,5 @@ /* eslint-disable @typescript-eslint/return-await */ +import httpContext from 'express-http-context'; import { createTransport } from 'nodemailer'; import { uniq } from 'lodash'; import { QueryTypes } from 'sequelize'; @@ -17,6 +18,8 @@ import { } from '../../services/activityReports'; import { userById } from '../../services/users'; import logEmailNotification from './logNotifications'; +import transactionQueueWrapper from '../../workers/transactionWrapper'; +import referenceData from '../../workers/referenceData'; export const notificationQueue = newQueue('notifications'); @@ -355,6 +358,7 @@ export const collaboratorAssignedNotification = (report, newCollaborators) => { const data = { report, newCollaborator: collaborator.user, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.COLLABORATOR_ADDED, data); } catch (err) { @@ -370,6 +374,7 @@ export const approverAssignedNotification = (report, newApprovers) => { const data = { report, newApprover: approver, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.SUBMITTED, data); } catch (err) { @@ -385,6 +390,7 @@ export const reportApprovedNotification = (report, authorWithSetting, collabsWit report, authorWithSetting, collabsWithSettings, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.APPROVED, data); } catch (err) { @@ -408,6 +414,7 @@ export const programSpecialistRecipientReportApprovedNotification = ( report, programSpecialists, recipients, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED, data); } catch (err) { @@ -482,6 +489,7 @@ export const trVisionAndGoalComplete = async (event) => { emailTo: [user.email], debugMessage: `MAILER: Notifying ${user.email} that a POC completed work on TR ${event.id} | ${eId}`, templatePath: 'tr_poc_vision_goal_complete', + ...referenceData(), }; return notificationQueue.add(EMAIL_ACTIONS.TRAINING_REPORT_POC_VISION_GOAL_COMPLETE, data); @@ -514,6 +522,7 @@ export const trPocSessionComplete = async (event) => { emailTo: [user.email], debugMessage: `MAILER: Notifying ${user.email} that a POC completed work on TR ${event.id}`, templatePath: 'tr_poc_session_complete', + ...referenceData(), }; return notificationQueue.add(EMAIL_ACTIONS.TRAINING_REPORT_POC_SESSION_COMPLETE, data); @@ -550,6 +559,7 @@ export const trSessionCreated = async (event) => { ...event, displayId: eventId, }, + ...referenceData(), }; return notificationQueue.add(EMAIL_ACTIONS.TRAINING_REPORT_SESSION_CREATED, data); @@ -582,6 +592,7 @@ export const trSessionCompleted = async (event) => { emailTo: [user.email], debugMessage: `MAILER: Notifying ${user.email} that a session was completed for TR ${event.id}`, templatePath: 'tr_session_completed', + ...referenceData(), }; return notificationQueue.add(EMAIL_ACTIONS.TRAINING_REPORT_SESSION_COMPLETED, data); })); @@ -619,6 +630,7 @@ export const trCollaboratorAdded = async ( emailTo: [collaborator.email], templatePath: 'tr_collaborator_added', debugMessage: `MAILER: Notifying ${collaborator.email} that they were added as a collaborator to TR ${report.id}`, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.TRAINING_REPORT_COLLABORATOR_ADDED, data); @@ -651,6 +663,7 @@ export const trPocAdded = async ( emailTo: [poc.email], debugMessage: `MAILER: Notifying ${poc.email} that they were added as a collaborator to TR ${report.id}`, templatePath: 'tr_poc_added', + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.TRAINING_REPORT_POC_ADDED, data); @@ -686,6 +699,7 @@ export const trPocEventComplete = async ( reportPath, debugMessage: `MAILER: Notifying ${user.email} that TR ${event.id} is complete`, templatePath: 'tr_event_complete', + ...referenceData(), }; return notificationQueue.add(EMAIL_ACTIONS.TRAINING_REPORT_EVENT_COMPLETED, data); @@ -708,6 +722,7 @@ export const changesRequestedNotification = ( approver, authorWithSetting, collabsWithSettings, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.NEEDS_ACTION, data); } catch (err) { @@ -742,6 +757,7 @@ export async function collaboratorDigest(freq, subjectFreq) { type: EMAIL_ACTIONS.COLLABORATOR_DIGEST, freq, subjectFreq, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.COLLABORATOR_DIGEST, data); return data; @@ -779,6 +795,7 @@ export async function changesRequestedDigest(freq, subjectFreq) { type: EMAIL_ACTIONS.NEEDS_ACTION_DIGEST, freq, subjectFreq, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.NEEDS_ACTION_DIGEST, data); @@ -817,6 +834,7 @@ export async function submittedDigest(freq, subjectFreq) { type: EMAIL_ACTIONS.SUBMITTED_DIGEST, freq, subjectFreq, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.SUBMITTED_DIGEST, data); @@ -856,6 +874,7 @@ export async function approvedDigest(freq, subjectFreq) { type: EMAIL_ACTIONS.APPROVED_DIGEST, freq, subjectFreq, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.APPROVED_DIGEST, data); @@ -918,6 +937,7 @@ export async function recipientApprovedDigest(freq, subjectFreq) { type: EMAIL_ACTIONS.RECIPIENT_APPROVED_DIGEST, freq, subjectFreq, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.RECIPIENT_APPROVED_DIGEST, data); @@ -992,51 +1012,136 @@ export const processNotificationQueue = () => { notificationQueue.on('completed', onCompletedNotification); increaseListeners(notificationQueue, 10); - notificationQueue.process(EMAIL_ACTIONS.NEEDS_ACTION, notifyChangesRequested); - notificationQueue.process(EMAIL_ACTIONS.SUBMITTED, notifyApproverAssigned); - notificationQueue.process(EMAIL_ACTIONS.APPROVED, notifyReportApproved); - notificationQueue.process(EMAIL_ACTIONS.COLLABORATOR_ADDED, notifyCollaboratorAssigned); - notificationQueue.process(EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED, notifyRecipientReportApproved); + notificationQueue.process( + EMAIL_ACTIONS.NEEDS_ACTION, + transactionQueueWrapper( + notifyApproverAssigned, + EMAIL_ACTIONS.NEEDS_ACTION, + ), + ); + + notificationQueue.process( + EMAIL_ACTIONS.SUBMITTED, + transactionQueueWrapper( + notifyApproverAssigned, + EMAIL_ACTIONS.SUBMITTED, + ), + ); + + notificationQueue.process( + EMAIL_ACTIONS.APPROVED, + transactionQueueWrapper( + notifyApproverAssigned, + EMAIL_ACTIONS.APPROVED, + ), + ); + + notificationQueue.process( + EMAIL_ACTIONS.COLLABORATOR_ADDED, + transactionQueueWrapper( + notifyApproverAssigned, + EMAIL_ACTIONS.COLLABORATOR_ADDED, + ), + ); + + notificationQueue.process( + EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED, + transactionQueueWrapper( + notifyApproverAssigned, + EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED, + ), + ); - notificationQueue.process(EMAIL_ACTIONS.NEEDS_ACTION_DIGEST, notifyDigest); - notificationQueue.process(EMAIL_ACTIONS.SUBMITTED_DIGEST, notifyDigest); - notificationQueue.process(EMAIL_ACTIONS.APPROVED_DIGEST, notifyDigest); - notificationQueue.process(EMAIL_ACTIONS.COLLABORATOR_DIGEST, notifyDigest); - notificationQueue.process(EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED_DIGEST, notifyDigest); + notificationQueue.process( + EMAIL_ACTIONS.NEEDS_ACTION_DIGEST, + transactionQueueWrapper( + notifyDigest, + EMAIL_ACTIONS.NEEDS_ACTION_DIGEST, + ), + ); + notificationQueue.process( + EMAIL_ACTIONS.SUBMITTED_DIGEST, + transactionQueueWrapper( + notifyDigest, + EMAIL_ACTIONS.SUBMITTED_DIGEST, + ), + ); + notificationQueue.process( + EMAIL_ACTIONS.APPROVED_DIGEST, + transactionQueueWrapper( + notifyDigest, + EMAIL_ACTIONS.APPROVED_DIGEST, + ), + ); + notificationQueue.process( + EMAIL_ACTIONS.COLLABORATOR_DIGEST, + transactionQueueWrapper( + notifyDigest, + EMAIL_ACTIONS.COLLABORATOR_DIGEST, + ), + ); + notificationQueue.process( + EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED_DIGEST, + transactionQueueWrapper( + notifyDigest, + EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED_DIGEST, + ), + ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_COLLABORATOR_ADDED, - sendTrainingReportNotification, + transactionQueueWrapper( + sendTrainingReportNotification, + EMAIL_ACTIONS.TRAINING_REPORT_COLLABORATOR_ADDED, + ), ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_SESSION_CREATED, - sendTrainingReportNotification, + transactionQueueWrapper( + sendTrainingReportNotification, + EMAIL_ACTIONS.TRAINING_REPORT_SESSION_CREATED, + ), ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_SESSION_COMPLETED, - sendTrainingReportNotification, + transactionQueueWrapper( + sendTrainingReportNotification, + EMAIL_ACTIONS.TRAINING_REPORT_SESSION_COMPLETED, + ), ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_EVENT_COMPLETED, - sendTrainingReportNotification, + transactionQueueWrapper( + sendTrainingReportNotification, + EMAIL_ACTIONS.TRAINING_REPORT_EVENT_COMPLETED, + ), ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_POC_ADDED, - sendTrainingReportNotification, + transactionQueueWrapper( + sendTrainingReportNotification, + EMAIL_ACTIONS.TRAINING_REPORT_POC_ADDED, + ), ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_POC_VISION_GOAL_COMPLETE, - sendTrainingReportNotification, + transactionQueueWrapper( + sendTrainingReportNotification, + EMAIL_ACTIONS.TRAINING_REPORT_POC_VISION_GOAL_COMPLETE, + ), ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_POC_SESSION_COMPLETE, - sendTrainingReportNotification, + transactionQueueWrapper( + sendTrainingReportNotification, + EMAIL_ACTIONS.TRAINING_REPORT_POC_SESSION_COMPLETE, + ), ); }; diff --git a/src/lib/mailer/index.test.js b/src/lib/mailer/index.test.js index 9562612b43..2d5b528fb5 100644 --- a/src/lib/mailer/index.test.js +++ b/src/lib/mailer/index.test.js @@ -132,7 +132,6 @@ const submittedReport = { ...reportObject, activityRecipients: [{ grantId: 1 }], submissionStatus: REPORT_STATUSES.SUBMITTED, - // calculatedStatus: REPORT_STATUSES.SUBMITTED, numberOfParticipants: 1, deliveryMethod: 'method', duration: 0, @@ -170,9 +169,10 @@ describe('mailer tests', () => { process.env = oldEnv; await db.sequelize.close(); }); + describe('Changes requested by manager', () => { it('Tests that an email is sent', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyChangesRequested({ data: { report: mockReport, @@ -189,12 +189,12 @@ describe('mailer tests', () => { ]); const message = JSON.parse(email.message); expect(message.subject).toBe(`Activity Report ${mockReport.displayId}: Changes requested`); - expect(message.text).toContain(`${mockManager.name} requested changed to report ${mockReport.displayId}.`); + expect(message.text).toContain(`${mockManager.name} requested changes to report ${mockReport.displayId}.`); expect(message.text).toContain(mockApprover.note); expect(message.text).toContain(reportPath); }); it('Tests that an email is not sent if no recipients', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyChangesRequested({ data: { report: mockReport, @@ -206,19 +206,20 @@ describe('mailer tests', () => { expect(email).toBe(null); }); it('Tests that emails are not sent without SEND_NOTIFICATIONS', async () => { - process.env.SEND_NOTIFICATIONS = false; - await expect(notifyChangesRequested({ + process.env.SEND_NOTIFICATIONS = 'false'; + const email = await notifyChangesRequested({ data: { report: mockReport }, - }, jsonTransport)).toBeNull(); + }, jsonTransport); + expect(email).toBeNull(); }); }); + describe('Report Approved', () => { it('Tests that an email is sent', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyReportApproved({ data: { report: mockReport, - approver: mockApprover, authorWithSetting: mockReport.author, collabsWithSettings: [mockCollaborator1, mockCollaborator2], }, @@ -235,11 +236,10 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('Tests that an email is not sent if no recipients', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyReportApproved({ data: { report: mockReport, - approver: mockApprover, authorWithSetting: null, collabsWithSettings: [], }, @@ -247,15 +247,17 @@ describe('mailer tests', () => { expect(email).toBe(null); }); it('Tests that emails are not sent without SEND_NOTIFICATIONS', async () => { - process.env.SEND_NOTIFICATIONS = false; - await expect(notifyReportApproved({ + process.env.SEND_NOTIFICATIONS = 'false'; + const email = await notifyReportApproved({ data: { report: mockReport }, - }, jsonTransport)).toBeNull(); + }, jsonTransport); + expect(email).toBeNull(); }); }); + describe('Program Specialists: Recipient Report Approved', () => { it('Tests that an email is sent', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyRecipientReportApproved({ data: { report: mockReport, @@ -272,7 +274,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('Tests that an email is not sent if no program specialists/recipients', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyRecipientReportApproved({ data: { report: mockReport, @@ -283,19 +285,21 @@ describe('mailer tests', () => { expect(email).toBe(null); }); it('Tests that emails are not sent without SEND_NOTIFICATIONS', async () => { - process.env.SEND_NOTIFICATIONS = false; - await expect(notifyRecipientReportApproved({ + process.env.SEND_NOTIFICATIONS = 'false'; + const email = await notifyRecipientReportApproved({ data: { report: mockReport, programSpecialists: [mockProgramSpecialist], recipients: [mockRecipient], }, - }, jsonTransport)).toBeNull(); + }, jsonTransport); + expect(email).toBeNull(); }); }); + describe('Manager Approval Requested', () => { it('Tests that an email is sent', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyApproverAssigned({ data: { report: mockReport, newApprover: mockApprover }, }, jsonTransport); @@ -309,15 +313,17 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('Tests that emails are not sent without SEND_NOTIFICATIONS', async () => { - process.env.SEND_NOTIFICATIONS = false; - expect(notifyApproverAssigned({ + process.env.SEND_NOTIFICATIONS = 'false'; + const email = await notifyApproverAssigned({ data: { report: mockReport }, - }, jsonTransport)).toBeNull(); + }, jsonTransport); + expect(email).toBeNull(); }); }); + describe('Add Collaborators', () => { it('Tests that an email is sent', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyCollaboratorAssigned({ data: { report: mockReport, newCollaborator: mockNewCollaborator }, }, jsonTransport); @@ -331,16 +337,17 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('Tests that emails are not sent without SEND_NOTIFICATIONS', async () => { - process.env.SEND_NOTIFICATIONS = false; - expect(notifyCollaboratorAssigned({ + process.env.SEND_NOTIFICATIONS = 'false'; + const email = await notifyCollaboratorAssigned({ data: { report: mockReport, newCollaborator: mockCollaborator1 }, - }, jsonTransport)).toBeNull(); + }, jsonTransport); + expect(email).toBeNull(); }); }); describe('sendTrainingReportNotification', () => { it('Tests that an email is sent', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; process.env.CI = ''; const data = { emailTo: [mockNewCollaborator.email], @@ -366,7 +373,7 @@ describe('mailer tests', () => { expect(message.text).toContain('/asdf/'); }); it('Honors no send', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; process.env.CI = ''; const data = { emailTo: [`no-send_${mockNewCollaborator.email}`], @@ -385,7 +392,7 @@ describe('mailer tests', () => { expect(email).toBeNull(); }); it('Tests that emails are not sent without SEND_NOTIFICATIONS', async () => { - process.env.SEND_NOTIFICATIONS = false; + process.env.SEND_NOTIFICATIONS = 'false'; const data = { emailTo: [mockNewCollaborator.email], templatePath: 'tr_session_completed', @@ -397,14 +404,15 @@ describe('mailer tests', () => { displayId: 'mockReport-1', }, }; - await expect(sendTrainingReportNotification({ + const email = await sendTrainingReportNotification({ data, - }, jsonTransport)).resolves.toBeNull(); + }, jsonTransport); + expect(email).toBeNull(); }); it('Tests that emails are not sent on CI', async () => { - process.env.SEND_NOTIFICATIONS = true; - process.env.CI = true; + process.env.SEND_NOTIFICATIONS = 'true'; + process.env.CI = 'true'; const data = { emailTo: [mockNewCollaborator.email], templatePath: 'tr_session_completed', @@ -416,15 +424,16 @@ describe('mailer tests', () => { displayId: 'mockReport-1', }, }; - await expect(sendTrainingReportNotification({ + const email = await sendTrainingReportNotification({ data, - }, jsonTransport)).resolves.toBeNull(); + }, jsonTransport); + expect(email).toBeNull(); }); }); describe('Collaborators digest', () => { it('tests that an email is sent for a daily setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -451,7 +460,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent for a weekly setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -476,7 +485,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent for a monthly setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -501,7 +510,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent if there are no new collaborator notifications', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -523,21 +532,22 @@ describe('mailer tests', () => { }); it('tests that emails are not sent without SEND_NOTIFICATIONS', async () => { - process.env.SEND_NOTIFICATIONS = false; - await expect(notifyDigest({ + process.env.SEND_NOTIFICATIONS = 'false'; + const email = await notifyDigest({ data: { user: mockNewCollaborator, reports: [], type: EMAIL_ACTIONS.COLLABORATOR_DIGEST, freq: EMAIL_DIGEST_FREQ.DAILY, }, - }, jsonTransport)).toBeNull(); + }, jsonTransport); + expect(email).toBeNull(); }); }); describe('Changes requested digest', () => { it('tests that an email is sent for a daily setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -564,7 +574,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent for a weekly setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -589,7 +599,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent for a monthly setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -614,7 +624,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent if there are no changes requested notifications', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -638,7 +648,7 @@ describe('mailer tests', () => { describe('Submitted digest', () => { it('tests that an email is sent for a daily setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -665,7 +675,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent for a weekly setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -690,7 +700,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent for a monthly setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -715,7 +725,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent if there are no submitted notifications', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -739,7 +749,7 @@ describe('mailer tests', () => { describe('Approved digest', () => { it('tests that an email is sent for a daily setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -766,7 +776,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent for a weekly setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -791,7 +801,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent for a monthly setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -816,7 +826,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent if there are no approved reports notifications', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -840,7 +850,7 @@ describe('mailer tests', () => { describe('Program Specialist: Report approved digest', () => { it('tests that an email is sent for a daily setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { reports: [mockReport], @@ -865,7 +875,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent for a weekly setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { reports: [mockReport], @@ -890,7 +900,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent for a monthly setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { reports: [mockReport], @@ -915,7 +925,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent if there are no approved reports notifications', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { reports: [], diff --git a/src/lib/maintenance/common.js b/src/lib/maintenance/common.js index 3f4849eddd..5bcb5239c4 100644 --- a/src/lib/maintenance/common.js +++ b/src/lib/maintenance/common.js @@ -5,6 +5,8 @@ const { MaintenanceLog } = require('../../models'); const { MAINTENANCE_TYPE, MAINTENANCE_CATEGORY } = require('../../constants'); const { auditLogger, logger } = require('../../logger'); const { default: LockManager } = require('../lockManager'); +const { default: transactionQueueWrapper } = require('../../workers/transactionWrapper'); +const { default: referenceData } = require('../../workers/referenceData'); const maintenanceQueue = newQueue('maintenance'); const maintenanceQueueProcessors = {}; @@ -103,7 +105,13 @@ const processMaintenanceQueue = () => { // Process each category in the queue using its corresponding processor Object.entries(maintenanceQueueProcessors) - .map(([category, processor]) => maintenanceQueue.process(category, processor)); + .map(([category, processor]) => maintenanceQueue.process( + category, + transactionQueueWrapper( + processor, + category, + ), + )); }; /** @@ -124,7 +132,7 @@ const enqueueMaintenanceJob = async ( if (category in maintenanceQueueProcessors) { try { // Add the job to the maintenance queue - maintenanceQueue.add(category, data); + maintenanceQueue.add(category, { ...data, ...referenceData() }); } catch (err) { // Log any errors that occur when adding the job to the queue auditLogger.error(err); diff --git a/src/lib/maintenance/common.test.js b/src/lib/maintenance/common.test.js index d4623cc750..f3f0a87f5b 100644 --- a/src/lib/maintenance/common.test.js +++ b/src/lib/maintenance/common.test.js @@ -21,6 +21,7 @@ const { MAINTENANCE_TYPE, MAINTENANCE_CATEGORY } = require('../../constants'); const { MaintenanceLog } = require('../../models'); const { auditLogger, logger } = require('../../logger'); +const { default: transactionWrapper } = require('../../workers/transactionWrapper'); jest.mock('../../models', () => ({ MaintenanceLog: { @@ -113,13 +114,25 @@ describe('Maintenance Queue', () => { addQueueProcessor(category1, processor1); addQueueProcessor(category2, processor2); processMaintenanceQueue(); + expect(maintenanceQueue.process).toHaveBeenCalledTimes(3); - expect(maintenanceQueue.process).toHaveBeenCalledWith(category1, processor1); - expect(maintenanceQueue.process).toHaveBeenCalledWith(category2, processor2); expect(maintenanceQueue.process) - .toHaveBeenCalledWith( + .toHaveBeenNthCalledWith( + 1, MAINTENANCE_CATEGORY.MAINTENANCE, - maintenance, + expect.any(Function), + ); + expect(maintenanceQueue.process) + .toHaveBeenNthCalledWith( + 2, + category1, + expect.any(Function), + ); + expect(maintenanceQueue.process) + .toHaveBeenNthCalledWith( + 3, + category2, + expect.any(Function), ); }); }); @@ -129,7 +142,15 @@ describe('Maintenance Queue', () => { jest.clearAllMocks(); }); it('should add a job to the maintenance queue if a processor is defined for the given category', () => { - const data = { test: 'enqueueMaintenanceJob - should add a job to the maintenance queue if a processor is defined for the given category' }; + const data = { + test: 'enqueueMaintenanceJob - should add a job to the maintenance queue if a processor is defined for the given category', + referenceData: { + impersonationId: undefined, + sessionSig: undefined, + transactionId: undefined, + userId: undefined, + }, + }; const category = 'test-category'; const processor = jest.fn(); addQueueProcessor(category, processor); diff --git a/src/migrations/20240715000000-fix-old-session-regions.js b/src/migrations/20240715000000-fix-old-session-regions.js new file mode 100644 index 0000000000..16b9e24352 --- /dev/null +++ b/src/migrations/20240715000000-fix-old-session-regions.js @@ -0,0 +1,45 @@ +const { + prepMigration, +} = require('../lib/migration'); + +module.exports = { + up: async (queryInterface) => queryInterface.sequelize.transaction( + async (transaction) => { + await prepMigration(queryInterface, transaction, __filename); + await queryInterface.sequelize.query(/* sql */` + + -- One very old session lacks the regionId value + -- This finds and sets it + DROP TABLE IF EXISTS sr_updates; + CREATE TEMP TABLE sr_updates + AS + WITH updater AS ( + UPDATE "SessionReportPilots" srp + SET data = JSONB_SET(srp.data,'{regionId}',TO_JSONB(erp."regionId")) + FROM "EventReportPilots" erp + WHERE erp.id = srp."eventId" + AND srp.data->>'regionId' = '' + RETURNING + srp.id srpid, + erp."regionId" + ) SELECT * FROM updater + ; + + SELECT * FROM sr_updates; + -- Looks like: + ---------------------- + -- srpid | regionId + -- -------+---------- + -- 2 | 3 + `, { transaction }); + }, + ), + + down: async (queryInterface) => queryInterface.sequelize.transaction( + async (transaction) => { + await prepMigration(queryInterface, transaction, __filename); + // If we end up needing to revert this, it would be easier to use a separate + // migration using the txid (or a similar identifier) after it's already set + }, + ), +}; diff --git a/src/policies/generic.test.js b/src/policies/generic.test.js new file mode 100644 index 0000000000..2484353216 --- /dev/null +++ b/src/policies/generic.test.js @@ -0,0 +1,55 @@ +import Generic from './generic'; +import SCOPES from '../middleware/scopeConstants'; + +describe('Generic', () => { + const user = { + permissions: [ + { scopeId: SCOPES.READ_REPORTS, regionId: 1 }, + { scopeId: SCOPES.APPROVE_REPORTS, regionId: 2 }, + { scopeId: SCOPES.READ_WRITE_REPORTS, regionId: 3 }, + ], + }; + + test('should correctly initialize with user', () => { + const generic = new Generic(user); + expect(generic.user).toBe(user); + }); + + test('should return true if user can access a region', () => { + const generic = new Generic(user); + expect(generic.canAccessRegion(1)).toBe(true); + expect(generic.canAccessRegion(2)).toBe(true); + expect(generic.canAccessRegion(3)).toBe(true); + }); + + test('should return false if user cannot access a region', () => { + const generic = new Generic(user); + expect(generic.canAccessRegion(4)).toBe(false); + }); + + test('should filter regions based on user permissions', () => { + const generic = new Generic(user); + const regionList = [1, 2, 3, 4]; + const filteredRegions = generic.filterRegions(regionList); + expect(filteredRegions).toEqual([1, 2, 3]); + }); + + test('should return all accessible regions if region list is empty', () => { + const generic = new Generic(user); + const filteredRegions = generic.filterRegions([]); + expect(filteredRegions).toEqual([1, 2, 3]); + }); + + test('should return an empty array if user has no permissions', () => { + const emptyUser = { permissions: [] }; + const generic = new Generic(emptyUser); + const filteredRegions = generic.filterRegions([]); + expect(filteredRegions).toEqual([]); + }); + + test('should return all accessible regions', () => { + const generic = new Generic(user); + const accessibleRegions = generic.getAllAccessibleRegions(); + expect(accessibleRegions).toEqual([1, 2, 3]); + }); +}); diff --git a/src/policies/generic.ts b/src/policies/generic.ts new file mode 100644 index 0000000000..23c069098a --- /dev/null +++ b/src/policies/generic.ts @@ -0,0 +1,48 @@ +import SCOPES from '../middleware/scopeConstants'; + +interface Permission { + scopeId: number; + regionId: number; +} + +interface User { + permissions: Permission[]; + roles?: { name: string }[]; +} + +export default class Generic { + user: User; + + constructor(user: User) { + this.user = user; + } + + canAccessRegion(region: number): boolean { + // Check if the user has any scopeId for the given region + return this.user.permissions.some((permission) => ( + Object.values(SCOPES).includes(permission.scopeId) + && permission.regionId === region + )); + } + + filterRegions(regionList: number[]): number[] { + // If the list passed is empty, return all regions the user has rights for + if (regionList.length === 0) { + return this.getAllAccessibleRegions(); + } + + // Return the list with only the regions the user has a right for + return regionList.filter((region) => this.canAccessRegion(region)); + } + + getAllAccessibleRegions(): number[] { + // Return all regions the user has rights for + const accessibleRegions = new Set(); + this.user.permissions.forEach((permission) => { + if (Object.values(SCOPES).includes(permission.scopeId)) { + accessibleRegions.add(permission.regionId); + } + }); + return Array.from(accessibleRegions); + } +} diff --git a/src/queries/class-goal-dataset.sql b/src/queries/class-goal-dataset.sql index 816ccc5ee6..a0af84f1da 100644 --- a/src/queries/class-goal-dataset.sql +++ b/src/queries/class-goal-dataset.sql @@ -1,19 +1,21 @@ /** -* This query collects all the goals. +* @name: Goals Report +* @description: This query collects all the goals. +* @defaultOutputName: goals_report * -* The query results are filterable by the SSDI flags. All SSDI flags are passed as an array of values +* The query results are filterable by the SSDI flags. All SSDI flags are passed as an array of values. * The following are the available flags within this script: -* - ssdi.regionIds - one or more values for 1 through 12 -* - ssdi.recipients - one or more verbatium recipient names -* - ssdi.grantNumbers - one or more verbatium grant numbers -* - ssdi.goals - one or more verbatium goal text -* - ssdi.status - one or more verbatium statuses -* - ssdi.createdVia - one or more verbatium created via values -* - ssdi.onApprovedAR - true or false -* - ssdi.createdbetween - two dates defining a range for the createdAt to be within +* - ssdi.regionIds - integer[] - one or more values for 1 through 12 +* - ssdi.recipients - string[] - one or more verbatim recipient names +* - ssdi.grantNumbers - string[] - one or more verbatim grant numbers +* - ssdi.goals - string[] - one or more verbatim goal text +* - ssdi.status - string[] - one or more verbatim statuses +* - ssdi.createdVia - string[] - one or more verbatim created via values +* - ssdi.onApprovedAR - boolean[] - true or false +* - ssdi.createdbetween - date[] - two dates defining a range for the createdAt to be within * -* zero or more SSDI flags can be set within the same transaction as the query is executed. -* The following is an example of how to set a SSDI flag: +* Zero or more SSDI flags can be set within the same transaction as the query is executed. +* The following is an example of how to set an SSDI flag: * SELECT SET_CONFIG('ssdi.createdbetween','["2022-07-01","2023-06-30"]',TRUE); */ SELECT @@ -53,7 +55,7 @@ AND FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.grantNumbers', true), ''),'[]')::json) AS value )) AND --- Filter for status if ssdi.goals is defined +-- Filter for goals if ssdi.goals is defined (NULLIF(current_setting('ssdi.goals', true), '') IS NULL OR g.name in ( SELECT value::text AS my_array @@ -76,9 +78,10 @@ AND AND -- Filter for onApprovedAR if ssdi.onApprovedAR is defined (NULLIF(current_setting('ssdi.onApprovedAR', true), '') IS NULL - OR g."onApprovedAR" in ( - SELECT value::BOOLEAN AS my_array - FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.onApprovedAR', true), ''),'[]')::json) AS value + OR EXISTS ( + SELECT 1 + FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.onApprovedAR', true), ''),'[]')::json) AS value + WHERE value::boolean = true )) AND -- Filter for createdAt dates between two values if ssdi.createdbetween is defined diff --git a/src/queries/class-goal-use.sql b/src/queries/class-goal-use.sql index e531a9b4e8..2f51b193ad 100644 --- a/src/queries/class-goal-use.sql +++ b/src/queries/class-goal-use.sql @@ -1,21 +1,23 @@ /** -* This query collects all the Monitoring goals used on approved reports within the defined time range. +* @name: Monitoring Goals Report +* @description: This query collects all the Monitoring goals used on approved reports within the defined time range. +* @defaultOutputName: monitoring_goals_report * -* The query results are filterable by the SSDI flags. All SSDI flags are passed as an array of values +* The query results are filterable by the SSDI flags. All SSDI flags are passed as an array of values. * The following are the available flags within this script: -* - ssdi.regionIds - one or more values for 1 through 12 -* - ssdi.recipients - one or more verbatium recipient names -* - ssdi.grantNumbers - one or more verbatium grant numbers -* - ssdi.goals - one or more verbatium goal text -* - ssdi.status - one or more verbatium statuses -* - ssdi.createdVia - one or more verbatium created via values -* - ssdi.onApprovedAR - true or false -* - ssdi.createdbetween - two dates defining a range for the createdAt to be within -* - ssdi.startDate - two dates defining a range for the startDate to be within -* - ssdi.endDate - two dates defining a range for the endDate to be within +* - ssdi.regionIds - integer[] - one or more values for 1 through 12 +* - ssdi.recipients - string[] - one or more verbatim recipient names +* - ssdi.grantNumbers - string[] - one or more verbatim grant numbers +* - ssdi.goals - string[] - one or more verbatim goal text +* - ssdi.status - string[] - one or more verbatim statuses +* - ssdi.createdVia - string[] - one or more verbatim created via values +* - ssdi.onApprovedAR - boolean[] - true or false +* - ssdi.createdbetween - date[] - two dates defining a range for the createdAt to be within +* - ssdi.startDate - date[] - two dates defining a range for the startDate to be within +* - ssdi.endDate - date[] - two dates defining a range for the endDate to be within * -* zero or more SSDI flags can be set within the same transaction as the query is executed. -* The following is an example of how to set a SSDI flag: +* Zero or more SSDI flags can be set within the same transaction as the query is executed. +* The following is an example of how to set an SSDI flag: * SELECT SET_CONFIG('ssdi.createdbetween','["2023-10-01","2023-10-15"]',TRUE); */ SELECT @@ -78,9 +80,10 @@ AND AND -- Filter for onApprovedAR if ssdi.onApprovedAR is defined (NULLIF(current_setting('ssdi.onApprovedAR', true), '') IS NULL - OR g."onApprovedAR" in ( - SELECT value::BOOLEAN AS my_array - FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.onApprovedAR', true), ''),'[]')::json) AS value + OR EXISTS ( + SELECT 1 + FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.onApprovedAR', true), ''),'[]')::json) AS value + WHERE value::boolean = true )) AND -- Filter for createdAt dates between two values if ssdi.createdbetween is defined diff --git a/src/queries/communication-logs.sql b/src/queries/communication-logs.sql index 907ebe7856..24df2445b8 100644 --- a/src/queries/communication-logs.sql +++ b/src/queries/communication-logs.sql @@ -1,15 +1,19 @@ /** +* @name: Communication Log Report +* @description: A comprehensive report of communication logs based on several criteria. +* @defaultOutputName: communication_log_report +* * This query collects all the communication logs based on several criteria. * -* The query results are filterable by the SSDI flags. All SSDI flags are passed as an array of values +* The query results are filterable by the SSDI flags. All SSDI flags are passed as an array of values. * The following are the available flags within this script: -* - ssdi.regionIds - one or more values for 1 through 12 -* - ssdi.recipients - one or more verbatim recipient names -* - ssdi.users - one or more verbatim user names -* - ssdi.role - one or more verbatim role names -* - ssdi.method - one or more verbatim method names -* - ssdi.communicationDate - two dates defining a range for the communicationDate to be within -* - ssdi.uei - one or more verbatim UEI values +* - ssdi.regionIds - integer[] - one or more values for 1 through 12 +* - ssdi.recipients - text[] - one or more verbatim recipient names +* - ssdi.users - text[] - one or more verbatim user names +* - ssdi.role - text[] - one or more verbatim role names +* - ssdi.method - text[] - one or more verbatim method names +* - ssdi.communicationDate - date[] - two dates defining a range for the communicationDate to be within +* - ssdi.uei - text[] - one or more verbatim UEI values * * Zero or more SSDI flags can be set within the same transaction as the query is executed. * The following is an example of how to set a SSDI flag: @@ -19,17 +23,17 @@ SELECT r.name, r.uei, - u.name "user", - STRING_AGG(DISTINCT rr.name, ' ') "roles", + u.name AS "user", + STRING_AGG(DISTINCT rr.name, ' ') AS "roles", cl."createdAt", - COALESCE(cl.data ->> 'method', '') method, - cl.data ->> 'result' result, - COALESCE(cl.data ->> 'purpose', '') purpose, - COALESCE(cl.data ->> 'duration', '') duration, - COALESCE(cl.data ->> 'regionId', '') region, - to_date(cl.data ->> 'communicationDate', 'MM/DD/YYYY') "communicationDate", - COALESCE(cl.data ->> 'pocComplete', '') "pocComplete", - COALESCE(cl.data ->> 'notes', '') "notes", + COALESCE(cl.data ->> 'method', '') AS "method", + cl.data ->> 'result' AS "result", + COALESCE(cl.data ->> 'purpose', '') AS "purpose", + COALESCE(cl.data ->> 'duration', '') AS "duration", + COALESCE(cl.data ->> 'regionId', '') AS "region", + to_date(cl.data ->> 'communicationDate', 'MM/DD/YYYY') AS "communicationDate", + COALESCE(cl.data ->> 'pocComplete', '') AS "pocComplete", + COALESCE(cl.data ->> 'notes', '') AS "notes", COALESCE(( SELECT jsonb_agg(elem) FROM jsonb_array_elements(cl.data -> 'recipientNextSteps') elem @@ -107,4 +111,4 @@ AND cl.data -> 'pageState' ->> '1' = 'Complete' AND cl.data -> 'pageState' ->> '2' = 'Complete' AND cl.data -> 'pageState' ->> '3' = 'Complete' GROUP BY 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 -ORDER BY 1,11; +ORDER BY 1, 11; diff --git a/src/queries/fake-class-goal-use.sql b/src/queries/fake-class-goal-use.sql index 16106388f2..f46721aca5 100644 --- a/src/queries/fake-class-goal-use.sql +++ b/src/queries/fake-class-goal-use.sql @@ -1,21 +1,23 @@ /** -* This query collects all the fake Monitoring goals used on approved reports within the defined time range. +* @name: Fake Monitoring Goals Report +* @description: This query collects all the fake Monitoring goals used on approved reports within the defined time range. +* @defaultOutputName: fake_monitoring_goals_report * -* The query results are filterable by the SSDI flags. All SSDI flags are passed as an array of values +* The query results are filterable by the SSDI flags. All SSDI flags are passed as an array of values. * The following are the available flags within this script: -* - ssdi.regionIds - one or more values for 1 through 12 -* - ssdi.recipients - one or more verbatium recipient names -* - ssdi.grantNumbers - one or more verbatium grant numbers -* - ssdi.goals - one or more verbatium goal text -* - ssdi.status - one or more verbatium statuses -* - ssdi.createdVia - one or more verbatium created via values -* - ssdi.onApprovedAR - true or false -* - ssdi.createdbetween - two dates defining a range for the createdAt to be within -* - ssdi.startDate - two dates defining a range for the startDate to be within -* - ssdi.endDate - two dates defining a range for the endDate to be within +* - ssdi.regionIds - integer[] - one or more values for 1 through 12 +* - ssdi.recipients - string[] - one or more verbatim recipient names +* - ssdi.grantNumbers - string[] - one or more verbatim grant numbers +* - ssdi.goals - string[] - one or more verbatim goal text +* - ssdi.status - string[] - one or more verbatim statuses +* - ssdi.createdVia - string[] - one or more verbatim created via values +* - ssdi.onApprovedAR - boolean[] - true or false +* - ssdi.createdbetween - date[] - two dates defining a range for the createdAt to be within +* - ssdi.startDate - date[] - two dates defining a range for the startDate to be within +* - ssdi.endDate - date[] - two dates defining a range for the endDate to be within * -* zero or more SSDI flags can be set within the same transaction as the query is executed. -* The following is an example of how to set a SSDI flag: +* Zero or more SSDI flags can be set within the same transaction as the query is executed. +* The following is an example of how to set an SSDI flag: * SELECT SET_CONFIG('ssdi.createdbetween','["2023-10-01","2023-10-15"]',TRUE); */ SELECT @@ -63,6 +65,13 @@ AND FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.grantNumbers', true), ''),'[]')::json) AS value )) AND +-- Filter for goals if ssdi.goals is defined +(NULLIF(current_setting('ssdi.goals', true), '') IS NULL + OR g.name in ( + SELECT value::text AS my_array + FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.goals', true), ''),'[]')::json) AS value + )) +AND -- Filter for status if ssdi.status is defined (NULLIF(current_setting('ssdi.status', true), '') IS NULL OR g.status in ( @@ -79,9 +88,10 @@ AND AND -- Filter for onApprovedAR if ssdi.onApprovedAR is defined (NULLIF(current_setting('ssdi.onApprovedAR', true), '') IS NULL - OR g."onApprovedAR" in ( - SELECT value::BOOLEAN AS my_array - FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.onApprovedAR', true), ''),'[]')::json) AS value + OR EXISTS ( + SELECT 1 + FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.onApprovedAR', true), ''),'[]')::json) AS value + WHERE value::boolean = true )) AND -- Filter for createdAt dates between two values if ssdi.createdbetween is defined diff --git a/src/queries/fei-goals.sql b/src/queries/fei-goals.sql index 706376fa94..ef68fdad8d 100644 --- a/src/queries/fei-goals.sql +++ b/src/queries/fei-goals.sql @@ -1,20 +1,22 @@ /** -* This query collects all the FEI and near FEI goals based on several criteria. +* @name: FEI Goals Report +* @description: This query collects all the FEI and near FEI goals based on several criteria. +* @defaultOutputName: fei_goals_report * * The query results are filterable by the SSDI flags. All SSDI flags are passed as an array of values * The following are the available flags within this script: -* - ssdi.regionIds - one or more values for 1 through 12 -* - ssdi.recipients - one or more verbatium recipient names -* - ssdi.grantNumbers - one or more verbatium grant numbers -* - ssdi.goals - one or more verbatium goal text -* - ssdi.status - one or more verbatium statuses -* - ssdi.createdVia - one or more verbatium created via values -* - ssdi.onApprovedAR - true or false -* - ssdi.response - one or more verbatium response values -* - ssdi.createdbetween - two dates defining a range for the createdAt to be within +* - ssdi.regionIds - integer[] - one or more values for 1 through 12 +* - ssdi.recipients - string[] - one or more verbatim recipient names +* - ssdi.grantNumbers - string[] - one or more verbatim grant numbers +* - ssdi.goals - string[] - one or more verbatim goal text +* - ssdi.status - string[] - one or more verbatim statuses +* - ssdi.createdVia - string[] - one or more verbatim created via values +* - ssdi.onApprovedAR - boolean[] - true or false +* - ssdi.response - string[] - one or more verbatim response values +* - ssdi.createdbetween - date[] - two dates defining a range for the createdAt to be within * -* zero or more SSDI flags can be set within the same transaction as the query is executed. -* The following is an example of how to set a SSDI flag: +* Zero or more SSDI flags can be set within the same transaction as the query is executed. +* The following is an example of how to set an SSDI flag: * SELECT SET_CONFIG('ssdi.createdbetween','["2023-10-01","2023-10-15"]',TRUE); */ WITH bad AS ( @@ -104,9 +106,10 @@ AND AND -- Filter for onApprovedAR if ssdi.onApprovedAR is defined (NULLIF(current_setting('ssdi.onApprovedAR', true), '') IS NULL - OR b."onApprovedAR" in ( - SELECT value::BOOLEAN AS my_array - FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.onApprovedAR', true), ''),'[]')::json) AS value + OR EXISTS ( + SELECT 1 + FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.onApprovedAR', true), ''),'[]')::json) AS value + WHERE value::boolean = true )) AND -- Filter for response if ssdi.response is defined diff --git a/src/queries/monthly-delivery-report.sql b/src/queries/monthly-delivery-report.sql index fe299b794d..43a743218c 100644 --- a/src/queries/monthly-delivery-report.sql +++ b/src/queries/monthly-delivery-report.sql @@ -1,10 +1,14 @@ /** +* @name: Delivery report +* @description: A time boxed report of services delivered by user and role +* @defaultOutputName: delivery_report +* * This query collects all the Monitoring goals used on approved reports within the defined time range. * * The query results are filterable by the SSDI flags. All SSDI flags are passed as an array of values * The following are the available flags within this script: -* - ssdi.regionIds - one or more values for 1 through 12 -* - ssdi.startDate - two dates defining a range for the startDate to be within +* - ssdi.regionIds - integer[] - one or more values for 1 through 12 +* - ssdi.startDate - date[] - two dates defining a range for the startDate to be within * * zero or more SSDI flags can be set within the same transaction as the query is executed. * The following is an example of how to set a SSDI flag: diff --git a/src/routes/activityReports/handlers.js b/src/routes/activityReports/handlers.js index a9b6ae6233..7b6189879d 100644 --- a/src/routes/activityReports/handlers.js +++ b/src/routes/activityReports/handlers.js @@ -722,6 +722,24 @@ export async function getActivityRecipients(req, res) { res.json(activityRecipients); } +export async function getActivityRecipientsForExistingReport(req, res) { + const { activityReportId } = req.params; + + const [report] = await activityReportAndRecipientsById(activityReportId); + const userId = await currentUserId(req, res); + const user = await userById(userId); + const authorization = new ActivityReport(user, report); + + if (!authorization.canGet()) { + res.sendStatus(403); + return; + } + + const targetRegion = parseInt(report.regionId, DECIMAL_BASE); + const activityRecipients = await possibleRecipients(targetRegion, activityReportId); + res.json(activityRecipients); +} + /** * Retrieve an activity report * diff --git a/src/routes/activityReports/index.js b/src/routes/activityReports/index.js index e787b72cd8..24329cbc91 100644 --- a/src/routes/activityReports/index.js +++ b/src/routes/activityReports/index.js @@ -9,6 +9,7 @@ import { getReports, getReportAlerts, getActivityRecipients, + getActivityRecipientsForExistingReport, getGoals, reviewReport, resetToDraft, @@ -40,6 +41,7 @@ router.post('/', transactionWrapper(createReport)); router.get('/approvers', transactionWrapper(getApprovers)); router.get('/groups', transactionWrapper(getGroups)); router.get('/activity-recipients', transactionWrapper(getActivityRecipients)); +router.get('/activity-recipients/:reportId', transactionWrapper(getActivityRecipientsForExistingReport)); router.get('/goals', transactionWrapper(getGoals)); router.post('/goals', transactionWrapper(createGoalsForReport)); router.post('/objectives', transactionWrapper(saveOtherEntityObjectivesForReport)); @@ -60,5 +62,6 @@ router.put('/:activityReportId/review', checkActivityReportIdParam, transactionW router.put('/:activityReportId/submit', checkActivityReportIdParam, transactionWrapper(submitReport)); router.put('/:activityReportId/unlock', checkActivityReportIdParam, transactionWrapper(unlockReport)); router.put('/:activityReportId/goals/edit', checkActivityReportIdParam, transactionWrapper(setGoalAsActivelyEdited)); +router.get('/:activityReportId/activity-recipients', transactionWrapper(getActivityRecipientsForExistingReport)); export default router; diff --git a/src/routes/admin/index.js b/src/routes/admin/index.js index 140dc96074..b62021cd3a 100644 --- a/src/routes/admin/index.js +++ b/src/routes/admin/index.js @@ -13,6 +13,7 @@ import ssRouter from './ss'; import trainingReportRouter from './trainingReport'; import legacyReportRouter from './legacyReports'; import courseRouter from './course'; +import ssdiRouter from './ssdi'; import userAdminAccessMiddleware from '../../middleware/userAdminAccessMiddleware'; import transactionWrapper from '../transactionWrapper'; @@ -35,5 +36,6 @@ router.use('/training-reports', trainingReportRouter); router.use('/legacy-reports', legacyReportRouter); router.use('/courses', courseRouter); router.use('/ss', ssRouter); +router.use('/ssdi', ssdiRouter); export default router; diff --git a/src/routes/admin/ssdi/handlers.test.js b/src/routes/admin/ssdi/handlers.test.js new file mode 100644 index 0000000000..a6bdf4bd68 --- /dev/null +++ b/src/routes/admin/ssdi/handlers.test.js @@ -0,0 +1,223 @@ +import request from 'supertest'; +import express from 'express'; +import { + listQueryFiles, + readFlagsAndQueryFromFile, + validateFlagValues, + setFlags, + sanitizeFilename, + generateFlagString, + executeQuery, +} from '../../../services/ssdi'; +import { currentUserId } from '../../../services/currentUser'; +import { userById } from '../../../services/users'; +import { listQueries, getFlags, runQuery } from './handlers'; +import Generic from '../../../policies/generic'; + +jest.mock('../../../services/ssdi', () => ({ + listQueryFiles: jest.fn(), + readFlagsAndQueryFromFile: jest.fn(), + validateFlagValues: jest.fn(), + setFlags: jest.fn(), + sanitizeFilename: jest.fn(), + generateFlagString: jest.fn(), + executeQuery: jest.fn(), +})); + +jest.mock('../../../services/currentUser', () => ({ + currentUserId: jest.fn(), +})); + +jest.mock('../../../services/users', () => ({ + userById: jest.fn(), +})); + +jest.mock('../../../policies/generic'); + +const app = express(); +app.use(express.json()); +app.get('/listQueries', listQueries); +app.get('/getFlags', getFlags); +app.post('/runQuery', runQuery); + +describe('API Endpoints', () => { + describe('listQueries', () => { + it('should list all available query files', async () => { + listQueryFiles.mockReturnValue([{ name: 'Test Query', description: 'Test Description' }]); + + const response = await request(app).get('/listQueries'); + expect(response.status).toBe(200); + expect(response.body).toEqual([{ name: 'Test Query', description: 'Test Description' }]); + }); + + it('should handle errors', async () => { + listQueryFiles.mockImplementation(() => { throw new Error('Error listing query files'); }); + + const response = await request(app).get('/listQueries'); + expect(response.status).toBe(500); + expect(response.text).toBe('Error listing query files'); + }); + }); + + describe('getFlags', () => { + it('should get flags from the script', async () => { + readFlagsAndQueryFromFile.mockReturnValue({ flags: { flag1: { type: 'integer[]', description: 'Test Flag' } } }); + + const response = await request(app).get('/getFlags').query({ path: 'src/queries/test/path' }); + expect(response.status).toBe(200); + expect(response.body).toEqual({ flag1: { type: 'integer[]', description: 'Test Flag' } }); + }); + + it('should return 400 if script path is not provided', async () => { + const response = await request(app).get('/getFlags'); + expect(response.status).toBe(400); + expect(response.text).toBe('Script path is required'); + }); + + it('should return 400 for path traversal attempts', async () => { + const response = await request(app).get('/getFlags').query({ path: '../outside/path' }); + expect(response.status).toBe(400); + expect(response.body).toEqual({ error: 'Invalid script path: Path traversal detected' }); + }); + + it('should return 400 if script path is not within allowed directory', async () => { + const response = await request(app).get('/getFlags').query({ path: 'some/other/path' }); + expect(response.status).toBe(400); + expect(response.body).toEqual({ error: 'Invalid script path: all scripts are located within "src/queries/"' }); + }); + + it('should handle errors', async () => { + readFlagsAndQueryFromFile.mockImplementation(() => { throw new Error('Error reading flags'); }); + + const response = await request(app).get('/getFlags').query({ path: 'src/queries/test/path' }); + expect(response.status).toBe(500); + expect(response.text).toBe('Error reading flags'); + }); + }); + + describe('runQuery', () => { + const user = { + id: 1, + permissions: [ + { scopeId: 1, regionId: 1 }, + { scopeId: 2, regionId: 2 }, + { scopeId: 3, regionId: 3 }, + ], + }; + + beforeEach(() => { + readFlagsAndQueryFromFile.mockReturnValue({ + flags: { recipientIds: { type: 'integer[]', description: 'Test Flag' } }, + query: 'SELECT * FROM test', + defaultOutputName: 'test_output', + }); + validateFlagValues.mockImplementation(() => {}); + setFlags.mockResolvedValue([]); + executeQuery.mockResolvedValue([{ id: 1, name: 'Test' }]); + sanitizeFilename.mockReturnValue('test_output_recipientIds_1-2-3'); + generateFlagString.mockReturnValue('recipientIds_1-2-3'); + currentUserId.mockResolvedValue(user.id); + userById.mockResolvedValue(user); + Generic.mockImplementation(() => ({ + filterRegions: jest.fn((ids) => ids.filter((id) => id <= 3)), + getAllAccessibleRegions: jest.fn(() => [1, 2, 3]), + })); + }); + + it('should run the query and return JSON result', async () => { + const response = await request(app) + .post('/runQuery') + .query({ path: 'src/queries/test/path' }) + .send({ recipientIds: [1, 2, 3, 4] }); + + expect(response.status).toBe(200); + expect(response.body).toEqual([{ id: 1, name: 'Test' }]); + }); + + it('should run the query and return CSV result', async () => { + const response = await request(app) + .post('/runQuery') + .query({ path: 'src/queries/test/path', format: 'csv' }) + .send({ recipientIds: [1, 2, 3, 4] }); + + expect(response.status).toBe(200); + expect(response.headers['content-type']).toBe('text/csv; charset=utf-8'); + expect(response.headers['content-disposition']).toBe('attachment; filename="test_output_recipientIds_1-2-3.csv"'); + }); + + it('should return 400 if script path is not provided', async () => { + const response = await request(app).post('/runQuery').send({ recipientIds: [1, 2, 3] }); + expect(response.status).toBe(400); + expect(response.text).toBe('Script path is required'); + }); + + it('should return 400 for path traversal attempts', async () => { + const response = await request(app).post('/runQuery').query({ path: '../outside/path' }).send({ recipientIds: [1, 2, 3] }); + expect(response.status).toBe(400); + expect(response.body).toEqual({ error: 'Invalid script path: Path traversal detected' }); + }); + + it('should return 400 if script path is not within allowed directory', async () => { + const response = await request(app).post('/runQuery').query({ path: 'some/other/path' }).send({ recipientIds: [1, 2, 3] }); + expect(response.status).toBe(400); + expect(response.body).toEqual({ error: 'Invalid script path: all scripts are located within "src/queries/"' }); + }); + + it('should return 401 if regionIds is an empty set', async () => { + Generic.mockImplementation(() => ({ + filterRegions: jest.fn(() => []), + getAllAccessibleRegions: jest.fn(() => []), + })); + + const response = await request(app) + .post('/runQuery') + .query({ path: 'src/queries/test/path' }) + .send({}); + + expect(response.status).toBe(401); + }); + + it('should handle errors', async () => { + readFlagsAndQueryFromFile.mockImplementation(() => { throw new Error('Error reading query'); }); + + const response = await request(app) + .post('/runQuery') + .query({ path: 'src/queries/test/path' }) + .send({ recipientIds: [1, 2, 3] }); + + expect(response.status).toBe(500); + expect(response.text).toBe('Error executing query: Error reading query'); + }); + + it('should filter out non-integer regionIds', async () => { + const filterRegionsMock = jest.fn((val) => val); + Generic.mockImplementation(() => ({ + filterRegions: filterRegionsMock, + getAllAccessibleRegions: jest.fn(() => []), + })); + const response = await request(app) + .post('/runQuery') + .query({ path: 'src/queries/test/path' }) + .send({ regionIds: [1, 'a', 2, 'b', 3] }); + + expect(response.status).toBe(200); + expect(filterRegionsMock).toHaveBeenCalledWith([1, 2, 3]); + }); + + it('should filter regionIds using policy', async () => { + const filterRegionsMock = jest.fn((val) => val); + Generic.mockImplementation(() => ({ + filterRegions: filterRegionsMock, + getAllAccessibleRegions: jest.fn(() => []), + })); + const response = await request(app) + .post('/runQuery') + .query({ path: 'src/queries/test/path' }) + .send({ regionIds: [1, 2, 3, 4] }); + + expect(response.status).toBe(200); + expect(filterRegionsMock).toHaveBeenCalledWith([1, 2, 3, 4]); + expect(response.body).toEqual([{ id: 1, name: 'Test' }]); + }); + }); +}); diff --git a/src/routes/admin/ssdi/handlers.ts b/src/routes/admin/ssdi/handlers.ts new file mode 100644 index 0000000000..700f7d7f9c --- /dev/null +++ b/src/routes/admin/ssdi/handlers.ts @@ -0,0 +1,129 @@ +import stringify from 'csv-stringify/lib/sync'; +import { Request, Response } from 'express'; +import { currentUserId } from '../../../services/currentUser'; +import { userById } from '../../../services/users'; +import { + FlagValues, + listQueryFiles, + readFlagsAndQueryFromFile, + validateFlagValues, + setFlags, + sanitizeFilename, + generateFlagString, + executeQuery, +} from '../../../services/ssdi'; +import Generic from '../../../policies/generic'; + +// list all available query files with name and description +const listQueries = async (req: Request, res: Response) => { + try { + const queryFiles = listQueryFiles('./src/queries/'); + res.json(queryFiles); + } catch (error) { + res.status(500).send('Error listing query files'); + } +}; + +// Reads the flags and query from the file and sends the flags to the UI +const getFlags = async (req: Request, res: Response) => { + const scriptPath = req.query.path as string; + if (!scriptPath) { + res.status(400).send('Script path is required'); + return; + } + if (scriptPath.includes('../')) { + res.status(400).json({ error: 'Invalid script path: Path traversal detected' }); + return; + } + if (!scriptPath.startsWith('src/queries/')) { + res.status(400).json({ error: 'Invalid script path: all scripts are located within "src/queries/"' }); + return; + } + + try { + const { flags } = readFlagsAndQueryFromFile(`./${scriptPath}`); + res.json(flags); + } catch (error) { + res.status(500).send('Error reading flags'); + } +}; + +const filterAttributes = ( + obj: T, + keysToRemove: (keyof T)[], +): Partial => Object.fromEntries( + Object.entries(obj).filter(([key]) => !keysToRemove.includes(key as keyof T)), + ) as Partial; + +// Reads the flags and runs the query after setting the flags +const runQuery = async (req: Request, res: Response) => { + const scriptPath = req.query.path as string; + const outputFormat = (req.query.format as string) || 'json'; + if (!scriptPath) { + res.status(400).send('Script path is required'); + return; + } + if (scriptPath.includes('../')) { + res.status(400).json({ error: 'Invalid script path: Path traversal detected' }); + return; + } + if (!scriptPath.startsWith('src/queries/')) { + res.status(400).json({ error: 'Invalid script path: all scripts are located within "src/queries/"' }); + return; + } + + try { + const { flags, query, defaultOutputName } = readFlagsAndQueryFromFile(scriptPath); + const flagValues: FlagValues = filterAttributes( + { + ...req.body, + ...req.query, + }, + ['path', 'format'], + ); + const userId = await currentUserId(req, res); + const user = await userById(userId); + const policy = new Generic(user); + + // Check if flagValues contains regionIds, filter the values with policy.filterRegions + // passing in the regionIds. If flagValues does not contain regionIds, use + // policy.getAllAccessibleRegions to define it. Before calling validateFlagValues, + // regionIds must be defined and not be an empty set + + if (flagValues.regionIds) { + flagValues.regionIds = flagValues.regionIds.map(Number).filter((num) => !Number.isNaN(num)); + flagValues.regionIds = policy.filterRegions(flagValues.regionIds); + } else { + flagValues.regionIds = policy.getAllAccessibleRegions(); + } + + if (!flagValues.regionIds || flagValues.regionIds.length === 0) { + res.sendStatus(401); + return; + } + + validateFlagValues(flags, flagValues); + await setFlags(flags, flagValues); + const result = await executeQuery(query); + + const sanitizedOutputName = sanitizeFilename(`${defaultOutputName}_${generateFlagString(flagValues)}`); + + if (outputFormat === 'csv') { + res.setHeader('Content-Type', 'text/csv'); + res.setHeader('Content-Disposition', `attachment; filename="${sanitizedOutputName}.csv"`); + const csvData = stringify(result, { header: true }); + res.attachment(`${sanitizedOutputName}.csv`); + res.send(`\ufeff${csvData}`); + } else { + res.json(result); + } + } catch (error) { + res.status(500).send(`Error executing query: ${error.message}`); + } +}; + +export { + listQueries, + getFlags, + runQuery, +}; diff --git a/src/routes/admin/ssdi/index.ts b/src/routes/admin/ssdi/index.ts new file mode 100644 index 0000000000..091e9d84a4 --- /dev/null +++ b/src/routes/admin/ssdi/index.ts @@ -0,0 +1,15 @@ +import express from 'express'; +import transactionWrapper from '../../transactionWrapper'; +import authMiddleware from '../../../middleware/authMiddleware'; +import { + listQueries, + getFlags, + runQuery, +} from './handlers'; + +const router = express.Router(); +router.get('/list-queries', authMiddleware, transactionWrapper(listQueries)); +router.get('/get-flags', authMiddleware, transactionWrapper(getFlags)); +router.get('/run-query', authMiddleware, transactionWrapper(runQuery)); + +export default router; diff --git a/src/routes/courses/handlers.ts b/src/routes/courses/handlers.ts index 5c97b894fd..4ebaccc83f 100644 --- a/src/routes/courses/handlers.ts +++ b/src/routes/courses/handlers.ts @@ -28,7 +28,7 @@ export async function allCourses(req: Request, res: Response) { const courses = await getAllCourses(); res.json(courses); } catch (err) { - await handleErrors(err, req, res, logContext); + await handleErrors(req, res, err, logContext); } } @@ -38,7 +38,7 @@ export async function getCourseById(req: Request, res: Response) { const course = await getById(Number(id)); res.json(course); } catch (err) { - await handleErrors(err, req, res, logContext); + await handleErrors(req, res, err, logContext); } } @@ -70,7 +70,7 @@ export async function updateCourseById(req: Request, res: Response) { res.json(newCourse); } catch (err) { - await handleErrors(err, req, res, logContext); + await handleErrors(req, res, err, logContext); } } @@ -90,7 +90,7 @@ export async function createCourseByName(req: Request, res: Response) { res.json(course); } catch (err) { - await handleErrors(err, req, res, logContext); + await handleErrors(req, res, err, logContext); } } @@ -119,7 +119,7 @@ export async function deleteCourseById(req: Request, res: Response) { } res.status(204).send(); } catch (err) { - await handleErrors(err, req, res, logContext); + await handleErrors(req, res, err, logContext); } } diff --git a/src/routes/transactionWrapper.js b/src/routes/transactionWrapper.js index 8bd0b46b84..50afd9cfe6 100644 --- a/src/routes/transactionWrapper.js +++ b/src/routes/transactionWrapper.js @@ -1,3 +1,4 @@ +import httpContext from 'express-http-context'; import { sequelize } from '../models'; import { addAuditTransactionSettings, removeFromAuditedTransactions } from '../models/auditModelGenerator'; import handleErrors from '../lib/apiErrorHandler'; @@ -14,7 +15,8 @@ export default function transactionWrapper(originalFunction, context = '') { const startTime = Date.now(); try { // eslint-disable-next-line @typescript-eslint/return-await - return await sequelize.transaction(async () => { + return await sequelize.transaction(async (transaction) => { + httpContext.set('transactionId', transaction.id); try { await addAuditTransactionSettings(sequelize, null, null, 'transaction', originalFunction.name); const result = await originalFunction(req, res, next); diff --git a/src/scopes/goals/index.js b/src/scopes/goals/index.js index aee1ecb0a2..796e43f2c1 100644 --- a/src/scopes/goals/index.js +++ b/src/scopes/goals/index.js @@ -6,7 +6,6 @@ import { withReasons, withoutReasons } from './reasons'; import { withRecipientName, withoutRecipientName } from './recipient'; import { withRecipientId } from './recipientId'; import { withRegion, withoutRegion } from './region'; -import { withRoles, withoutRoles } from './role'; import { containsGrantNumber, doesNotContainGrantNumber, withGrantNumber, withoutGrantNumber, } from './grantNumber'; @@ -74,10 +73,6 @@ export const topicToQuery = { in: (query) => withRegion(query), nin: (query) => withoutRegion(query), }, - role: { - in: (query) => withRoles(query), - nin: (query) => withoutRoles(query), - }, group: { in: (query, _options, userId) => withGroup(query, userId), nin: (query, _options, userId) => withoutGroup(query, userId), diff --git a/src/scopes/goals/role.js b/src/scopes/goals/role.js deleted file mode 100644 index 62ae91d2e1..0000000000 --- a/src/scopes/goals/role.js +++ /dev/null @@ -1,30 +0,0 @@ -import { Op } from 'sequelize'; -import { filterAssociation } from './utils'; - -const roleFilter = ` -SELECT - DISTINCT "Goal"."id" -FROM "Objectives" "Objectives" -INNER JOIN "ObjectiveRoles" "ObjectiveRoles" -ON "Objectives"."id" = "ObjectiveRoles"."objectiveId" -INNER JOIN "Roles" "Roles" -ON "ObjectiveRoles"."roleId" = "Roles"."id" -INNER JOIN "Goals" "Goal" -ON "Objectives"."goalId" = "Goal"."id" -WHERE "Roles"."name"`; - -export function withRoles(roles) { - return { - [Op.or]: [ - filterAssociation(roleFilter, roles, false), - ], - }; -} - -export function withoutRoles(roles) { - return { - [Op.and]: [ - filterAssociation(roleFilter, roles, true), - ], - }; -} diff --git a/src/services/activityReports.js b/src/services/activityReports.js index 1e2dcfe398..6d35b1365b 100644 --- a/src/services/activityReports.js +++ b/src/services/activityReports.js @@ -14,6 +14,7 @@ import { ActivityReportCollaborator, ActivityReportFile, sequelize, + Sequelize, ActivityRecipient, File, Grant, @@ -1125,33 +1126,54 @@ export async function setStatus(report, status) { * @param {number} [regionId] - A region id to query against * @returns {*} Grants and Other entities */ -export async function possibleRecipients(regionId) { - const where = { status: 'Active', regionId }; - +export async function possibleRecipients(regionId, activityReportId = null) { const grants = await Recipient.findAll({ - attributes: ['id', 'name'], + attributes: [ + 'id', + 'name', + ], order: ['name'], - include: [{ - where, - model: Grant, - as: 'grants', - attributes: [['id', 'activityRecipientId'], 'name', 'number'], - include: [{ - model: Recipient, - as: 'recipient', - }, + include: [ { - model: Program, - as: 'programs', - attributes: ['programType'], + model: Grant, + as: 'grants', + attributes: ['number', ['id', 'activityRecipientId'], 'name'], + required: true, + include: [ + { + model: Program, + as: 'programs', + attributes: ['programType'], + required: false, + }, + { + model: Recipient, + as: 'recipient', + required: true, + }, + { + model: ActivityRecipient, + as: 'activityRecipients', + attributes: [], + required: false, + }, + ], }, + ], + where: { + '$grants.regionId$': regionId, + [Op.or]: [ + { '$grants.status$': 'Active' }, + { '$grants->activityRecipients.activityReportId$': activityReportId }, ], - }], + }, }); + const otherEntities = await OtherEntity.findAll({ raw: true, attributes: [['id', 'activityRecipientId'], 'name'], }); + return { grants, otherEntities }; } diff --git a/src/services/requestErrors.js b/src/services/requestErrors.js index 8d823e3502..56dafd5696 100644 --- a/src/services/requestErrors.js +++ b/src/services/requestErrors.js @@ -1,5 +1,3 @@ -import models from '../models'; - export default async function createRequestError({ operation, uri, @@ -8,11 +6,13 @@ export default async function createRequestError({ responseBody = 'N/A', responseCode = 'N/A', }) { + // eslint-disable-next-line global-require + const { RequestErrors } = require('../models'); try { const requestErrorBody = { operation, uri, method, requestBody, responseBody, responseCode, }; - const requestError = await models.RequestErrors.create(requestErrorBody, { transaction: null }); + const requestError = await RequestErrors.create(requestErrorBody, { transaction: null }); return requestError.id; } catch (err) { throw new Error('Error creating RequestError entry'); @@ -20,12 +20,14 @@ export default async function createRequestError({ } export async function requestErrors({ filter = '{}', range = '[0,9]', sort = '["createdAt","DESC"]' } = {}) { + // eslint-disable-next-line global-require + const { RequestErrors } = require('../models'); const offset = JSON.parse(range)[0]; const limit = JSON.parse(range)[1]; const order = JSON.parse(sort); const where = JSON.parse(filter); - return models.RequestErrors.findAndCountAll({ + return RequestErrors.findAndCountAll({ where, order: [order], offset, @@ -34,20 +36,26 @@ export async function requestErrors({ filter = '{}', range = '[0,9]', sort = '[" } export async function requestErrorById(id) { - return models.RequestErrors.findOne({ + // eslint-disable-next-line global-require + const { RequestErrors } = require('../models'); + return RequestErrors.findOne({ where: { id }, }); } export async function requestErrorsByIds({ filter = '{}' } = {}) { - return models.RequestErrors.findAll({ + // eslint-disable-next-line global-require + const { RequestErrors } = require('../models'); + return RequestErrors.findAll({ where: JSON.parse(filter), attributes: ['id'], }); } export async function delRequestErrors({ filter = '{}' } = {}) { - return models.RequestErrors.destroy({ + // eslint-disable-next-line global-require + const { RequestErrors } = require('../models'); + return RequestErrors.destroy({ where: JSON.parse(filter), }); } diff --git a/src/services/resourceQueue.js b/src/services/resourceQueue.js index 1a00fdc107..5c957500cd 100644 --- a/src/services/resourceQueue.js +++ b/src/services/resourceQueue.js @@ -2,6 +2,8 @@ import newQueue, { increaseListeners } from '../lib/queue'; import { RESOURCE_ACTIONS } from '../constants'; import { logger, auditLogger } from '../logger'; import { getResourceMetaDataJob } from '../lib/resource'; +import transactionQueueWrapper from '../workers/transactionWrapper'; +import referenceData from '../workers/referenceData'; const resourceQueue = newQueue('resource'); @@ -17,6 +19,7 @@ const addGetResourceMetadataToQueue = async (id, url) => { resourceId: id, resourceUrl: url, key: RESOURCE_ACTIONS.GET_METADATA, + ...referenceData(), }; return resourceQueue.add( RESOURCE_ACTIONS.GET_METADATA, @@ -47,7 +50,10 @@ const processResourceQueue = () => { // Get resource metadata. resourceQueue.process( RESOURCE_ACTIONS.GET_METADATA, - getResourceMetaDataJob, + transactionQueueWrapper( + getResourceMetaDataJob, + RESOURCE_ACTIONS.GET_METADATA, + ), ); }; diff --git a/src/services/s3Queue.js b/src/services/s3Queue.js index 05c02a233d..e28a4730b6 100644 --- a/src/services/s3Queue.js +++ b/src/services/s3Queue.js @@ -2,6 +2,8 @@ import newQueue, { increaseListeners } from '../lib/queue'; import { S3_ACTIONS } from '../constants'; import { logger, auditLogger } from '../logger'; import { deleteFileFromS3Job } from '../lib/s3'; +import transactionQueueWrapper from '../workers/transactionWrapper'; +import referenceData from '../workers/referenceData'; const s3Queue = newQueue('s3'); @@ -11,6 +13,7 @@ const addDeleteFileToQueue = (id, key) => { fileId: id, fileKey: key, key: S3_ACTIONS.DELETE_FILE, + ...referenceData(), }; s3Queue.add(S3_ACTIONS.DELETE_FILE, data); return data; @@ -33,7 +36,10 @@ const processS3Queue = () => { // Delete S3 file. s3Queue.process( S3_ACTIONS.DELETE_FILE, - deleteFileFromS3Job, + transactionQueueWrapper( + deleteFileFromS3Job, + S3_ACTIONS.DELETE_FILE, + ), ); }; diff --git a/src/services/scanQueue.js b/src/services/scanQueue.js index ca4a4b7aac..3fa1575f5b 100644 --- a/src/services/scanQueue.js +++ b/src/services/scanQueue.js @@ -1,6 +1,8 @@ import newQueue, { increaseListeners } from '../lib/queue'; import { logger, auditLogger } from '../logger'; import processFile from '../workers/files'; +import transactionQueueWrapper from '../workers/transactionWrapper'; +import referenceData from '../workers/referenceData'; const scanQueue = newQueue('scan'); const addToScanQueue = (fileKey) => { @@ -12,7 +14,10 @@ const addToScanQueue = (fileKey) => { }; return scanQueue.add( - fileKey, + { + ...fileKey, + ...referenceData(), + }, { attempts: retries, backoff: backOffOpts, @@ -35,7 +40,8 @@ const processScanQueue = () => { scanQueue.on('failed', onFailedScanQueue); scanQueue.on('completed', onCompletedScanQueue); increaseListeners(scanQueue); - scanQueue.process((job) => processFile(job.data.key)); + const process = (job) => processFile(job.data.key); + scanQueue.process(transactionQueueWrapper(process)); }; export { diff --git a/src/services/ssdi.test.js b/src/services/ssdi.test.js new file mode 100644 index 0000000000..a1c0cfea7b --- /dev/null +++ b/src/services/ssdi.test.js @@ -0,0 +1,282 @@ +import fs from 'fs'; +import path from 'path'; +import db from '../models'; +import { + queryFileCache, + queryDataCache, + listQueryFiles, + readNameAndDescriptionFromFile, + readFlagsAndQueryFromFile, + validateFlagValues, + validateType, + setFlags, + sanitizeFilename, + generateFlagString, + executeQuery, +} from './ssdi'; + +// Mock fs and db +jest.mock('fs'); +jest.mock('../models', () => ({ + sequelize: { + query: jest.fn(), + QueryTypes: { SELECT: 'SELECT', RAW: 'RAW' }, + }, +})); + +// Clear the caches before each test +beforeEach(() => { + queryFileCache.clear(); + queryDataCache.clear(); + jest.clearAllMocks(); +}); + +describe('ssdi', () => { + describe('readNameAndDescriptionFromFile', () => { + it('should read name, description, and default output name from file', () => { + const fileContents = ` + @name: TestName + @description: Test description + @defaultOutputName: test_output + `; + fs.readFileSync.mockReturnValue(fileContents); + + const result = readNameAndDescriptionFromFile('test/path.sql'); + expect(result).toEqual({ + name: 'TestName', + description: 'Test description', + defaultOutputName: 'test_output', + }); + }); + + it('should use filename as default for name and default output name if not provided', () => { + const fileContents = ` + @description: Test description + `; + fs.readFileSync.mockReturnValue(fileContents); + + const result = readNameAndDescriptionFromFile('test/path.sql'); + expect(result).toEqual({ + name: 'path', + description: 'Test description', + defaultOutputName: 'path', + }); + }); + }); + + describe('listQueryFiles', () => { + it('should list all query files with name and description', () => { + fs.readdirSync.mockReturnValue(['file1.sql', 'file2.sql']); + fs.readFileSync.mockReturnValue(` + @name: TestName + @description: Test description + @defaultOutputName: test_output + `); + + const result = listQueryFiles('test/directory'); + expect(result).toEqual([ + { + name: 'TestName', + description: 'Test description', + filePath: 'test/directory/file1.sql', + defaultOutputName: 'test_output', + }, + { + name: 'TestName', + description: 'Test description', + filePath: 'test/directory/file2.sql', + defaultOutputName: 'test_output', + }, + ]); + }); + + it('should return cached query file data if available', () => { + fs.readdirSync.mockReturnValue(['file1.sql']); + const cachedQueryFile = { + name: 'CachedName', + description: 'Cached description', + filePath: 'test/directory/file1.sql', + defaultOutputName: 'cached_output', + }; + + // Set the cache with the cachedQueryFile data + queryFileCache.set('test/directory/file1.sql', cachedQueryFile); + + const result = listQueryFiles('test/directory'); + expect(result).toEqual([cachedQueryFile]); + + // Ensure fs.readFileSync is not called since data is from cache + expect(fs.readFileSync).not.toHaveBeenCalledWith('test/directory/file1.sql', 'utf8'); + }); + }); + + describe('readFlagsAndQueryFromFile', () => { + it('should read flags and query from file and remove comments', () => { + const fileContents = ` + /* + * - ssdi.flag1 - integer[] - Flag description + * @defaultOutputName: test_output + */ + SELECT * FROM table; -- comment + SELECT * FROM another_table; -- another comment + `; + fs.readFileSync.mockReturnValue(fileContents); + + const result = readFlagsAndQueryFromFile('test/path.sql'); + expect(result).toEqual({ + flags: { + flag1: { + type: 'integer[]', + name: 'flag1', + description: 'Flag description', + }, + }, + query: 'SELECT * FROM table;\nSELECT * FROM another_table;', + defaultOutputName: 'test_output', + }); + }); + + it('should return cached query data if available', () => { + const cachedQuery = { + flags: { + flag1: { + type: 'integer[]', + name: 'flag1', + description: 'Cached Flag description', + }, + }, + query: 'SELECT * FROM cached_table;', + defaultOutputName: 'cached_output', + }; + + // Set the cache with the cachedQuery data + queryDataCache.set('test/path.sql', cachedQuery); + + const result = readFlagsAndQueryFromFile('test/path.sql'); + expect(result).toEqual(cachedQuery); + + // Ensure fs.readFileSync is not called since data is from cache + expect(fs.readFileSync).not.toHaveBeenCalled(); + }); + }); + + describe('validateType', () => { + it('should validate integer[] type', () => { + expect(validateType('integer[]', [1, 2, 3])).toBe(true); + expect(validateType('integer[]', [1, '2', 3])).toBe(false); + }); + + it('should validate date[] type', () => { + expect(validateType('date[]', ['2021-01-01', '2021-02-01'])).toBe(true); + expect(validateType('date[]', ['2021-01-01', 'invalid-date'])).toBe(false); + }); + + it('should validate string[] type', () => { + expect(validateType('string[]', ['hello', 'world'])).toBe(true); + expect(validateType('string[]', ['hello', 123])).toBe(false); + }); + + it('should validate boolean[] type', () => { + expect(validateType('boolean[]', [true, false])).toBe(true); + expect(validateType('boolean[]', [true, 'false'])).toBe(false); + }); + + it('should throw an error for unknown type', () => { + expect(() => validateType('unknown[]', ['test'])).toThrow('Unknown type: unknown[]'); + }); + }); + + describe('validateFlagValues', () => { + const flags = { + flag1: { type: 'integer[]', name: 'flag1', description: 'Flag description' }, + }; + + it('should validate flag values correctly', () => { + expect(() => validateFlagValues(flags, { flag1: [1, 2, 3] })).not.toThrow(); + }); + + it('should throw error for invalid flag value', () => { + expect(() => validateFlagValues(flags, { flag1: [1, '2', 3] })).toThrow( + 'Invalid type for flag flag1: expected integer[]', + ); + }); + + it('should throw error for invalid flag', () => { + expect(() => validateFlagValues(flags, { invalidFlag: [1, 2, 3] })).toThrow( + 'Invalid flag: invalidFlag', + ); + }); + }); + + describe('setFlags', () => { + it('should set flags in the database', async () => { + const flags = { flag1: { type: 'integer[]', name: 'flag1', description: 'Flag description' } }; + const flagValues = { flag1: [1, 2, 3] }; + + db.sequelize.query.mockResolvedValue([{ success: true }]); + + const result = await setFlags(flags, flagValues); + expect(result).toEqual([[{ success: true }]]); + expect(db.sequelize.query).toHaveBeenCalledWith('SELECT set_config($1, $2, false)', { + bind: ['ssdi.flag1', JSON.stringify([1, 2, 3])], + type: db.sequelize.QueryTypes.SELECT, + }); + }); + }); + + describe('sanitizeFilename', () => { + it('should sanitize the filename', () => { + expect(sanitizeFilename('test@file#name!')).toBe('test_file_name_'); + }); + }); + + describe('generateFlagString', () => { + it('should generate a string representation of the flag values', () => { + const flagValues = { flag1: [1, 2, 3], flag2: 'value' }; + expect(generateFlagString(flagValues)).toBe('flag1_1-2-3_flag2_value'); + }); + }); + + describe('executeQuery', () => { + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should throw an error if the query is not a string', async () => { + const invalidQuery = 123; + + await expect(executeQuery(invalidQuery)).rejects.toThrow('The query must be a string'); + }); + + it('should set the transaction to READ ONLY', async () => { + const mockQuery = 'SELECT * FROM users;'; + db.sequelize.query.mockResolvedValueOnce([]); + + await executeQuery(mockQuery); + + expect(db.sequelize.query).toHaveBeenCalledWith('SET TRANSACTION READ ONLY;', { type: db.sequelize.QueryTypes.RAW }); + }); + + it('should return the result of the query', async () => { + const mockQuery = 'SELECT * FROM users;'; + const mockResult = [{ id: 1, name: 'John Doe' }]; + db.sequelize.query + .mockResolvedValueOnce([]) // for SET TRANSACTION READ ONLY + .mockResolvedValueOnce(mockResult); // for the actual query + + const result = await executeQuery(mockQuery); + + expect(result).toEqual(mockResult); + }); + + it('should throw an error if the query fails', async () => { + const mockQuery = 'SELECT * FROM users;'; + const mockError = new Error('Query execution failed'); + db.sequelize.query + .mockResolvedValueOnce([]) // for SET TRANSACTION READ ONLY + .mockRejectedValueOnce(mockError); // for the actual query + + await expect(executeQuery(mockQuery)).rejects.toThrow(`Query failed: ${mockError.message}`); + }); + }); +}); diff --git a/src/services/ssdi.ts b/src/services/ssdi.ts new file mode 100644 index 0000000000..2f5509736a --- /dev/null +++ b/src/services/ssdi.ts @@ -0,0 +1,218 @@ +import fs from 'fs'; +import path from 'path'; +import { QueryTypes } from 'sequelize'; +import db from '../models'; + +interface Flag { + type: string; + name: string; + description: string; +} + +interface Flags { + [key: string]: Flag; +} + +interface FlagValues { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + [key: string]: any; +} + +interface QueryFile { + name: string; + description: string; + filePath: string; + defaultOutputName: string; +} + +interface CachedQuery { + flags: Flags; + query: string; + defaultOutputName: string; +} + +// Caches to store the parsed query files and their metadata +const queryFileCache: Map = new Map(); +const queryDataCache: Map = new Map(); + +// Helper function to read name, description, and default output name from a file +const readNameAndDescriptionFromFile = ( + filePath: string, +): { + name: string; + description: string; + defaultOutputName: string; +} => { + const fileContents = fs.readFileSync(filePath, 'utf8'); + const nameMatch = fileContents.match(/@name:\s*(.*)/); + const descriptionMatch = fileContents.match(/@description:\s*(.*)/); + const defaultOutputNameMatch = fileContents.match(/@defaultOutputName:\s*(.*)/); + const fileName = path.basename(filePath, path.extname(filePath)); + return { + name: nameMatch ? nameMatch[1].trim() : fileName, + description: descriptionMatch ? descriptionMatch[1].trim() : 'No description available', + defaultOutputName: defaultOutputNameMatch ? defaultOutputNameMatch[1].trim() : fileName, + }; +}; + +// Helper function to list all query files with name and description +const listQueryFiles = (directory: string): QueryFile[] => { + const files = fs.readdirSync(directory); + return files.map((file) => { + const filePath = path.join(directory, file); + if (queryFileCache.has(filePath)) { + return queryFileCache.get(filePath) as QueryFile; + } + const { name, description, defaultOutputName } = readNameAndDescriptionFromFile(filePath); + const queryFile: QueryFile = { + name, + description, + filePath, + defaultOutputName, + }; + queryFileCache.set(filePath, queryFile); + return queryFile; + }); +}; + +// Helper function to read flags and the query from the file +const readFlagsAndQueryFromFile = ( + filePath: string, +): CachedQuery => { + // Check if the query data is already cached + if (queryDataCache.has(filePath)) { + return queryDataCache.get(filePath) as CachedQuery; + } + + const fileContents = fs.readFileSync(filePath, 'utf8'); + const lines = fileContents.split('\n'); + const flags: Flags = {}; + const queryLines: string[] = []; + let inCommentBlock = false; + let defaultOutputName = 'output'; + + lines.forEach((line) => { + if (line.trim().startsWith('/*')) { + inCommentBlock = true; + } else if (line.trim().endsWith('*/')) { + inCommentBlock = false; + return; + } + + if (inCommentBlock) { + const flagMatch = line.match(/ - ssdi\.(\w+) - (\w+\[\]) - (.*)/); + const defaultOutputNameMatch = line.match(/@defaultOutputName:\s*(.*)/); + if (flagMatch) { + flags[flagMatch[1]] = { type: flagMatch[2], name: flagMatch[1], description: flagMatch[3] }; + } + if (defaultOutputNameMatch) { + defaultOutputName = defaultOutputNameMatch[1].trim(); + } + } else { + queryLines.push(line.trim()); // Trim spaces from each query line + } + }); + + // Join the query lines and remove leading and trailing blank lines + const query = queryLines + .map((line) => line.replace(/--.*$/, '').trim()) + .join('\n') + .replace(/^\s*\n|\s*\n$/g, ''); + + const cachedQuery: CachedQuery = { flags, query, defaultOutputName }; + // Cache the parsed query data + queryDataCache.set(filePath, cachedQuery); + + return cachedQuery; +}; + +// Function to validate the type of the flag values +const validateType = ( + expectedType: string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + value: any, +): boolean => { + switch (expectedType) { + case 'integer[]': + return Array.isArray(value) && value.every((v) => Number.isInteger(v)); + case 'date[]': + return Array.isArray(value) && value.every((v) => !Number.isNaN(Date.parse(v))); + case 'string[]': + return Array.isArray(value) && value.every((v) => typeof v === 'string'); + case 'boolean[]': + return Array.isArray(value) && value.every((v) => typeof v === 'boolean'); + default: + throw new Error(`Unknown type: ${expectedType}`); + } +}; + +// Helper function to validate flag values +const validateFlagValues = (flags: Flags, flagValues: FlagValues): void => { + Object.entries(flagValues).forEach(([key, value]) => { + if (!flags[key]) { + throw new Error(`Invalid flag: ${key}`); + } + const expectedType = flags[key].type; + if (!validateType(expectedType, value)) { + throw new Error(`Invalid type for flag ${key}: expected ${expectedType}`); + } + }); +}; + +// Helper function to set flags in the database +const setFlags = async ( + flags: Flags, + flagValues: FlagValues, + // eslint-disable-next-line @typescript-eslint/no-explicit-any +): Promise => Promise.all(Object.entries(flagValues).map(async ( + [key, value], +) => db.sequelize.query( + 'SELECT set_config($1, $2, false)', + { + bind: [`ssdi.${key}`, JSON.stringify(value)], + type: db.sequelize.QueryTypes.SELECT, + }, +))); + +// Helper function to sanitize filenames +const sanitizeFilename = (filename: string): string => filename.replace(/[^a-zA-Z0-9-_]/g, '_'); + +// Helper function to generate a string representation of the flag values +const generateFlagString = (flagValues: FlagValues): string => Object.entries(flagValues) + .map(([key, value]) => `${key}_${Array.isArray(value) ? value.join('-') : value}`) + .join('_'); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +const executeQuery = async (query: string): Promise => { + if (typeof query !== 'string') { + throw new Error('The query must be a string'); + } + + try { + // Set transaction to READ ONLY, this will fail the transaction if any tables are modified + await db.sequelize.query('SET TRANSACTION READ ONLY;', { type: QueryTypes.RAW }); + + const result = await db.sequelize.query(query, { type: QueryTypes.SELECT }); + return result; + } catch (error) { + throw new Error(`Query failed: ${error.message}`); + } +}; + +export { + Flag, + Flags, + FlagValues, + QueryFile, + queryFileCache, + queryDataCache, + listQueryFiles, + readNameAndDescriptionFromFile, + readFlagsAndQueryFromFile, + validateFlagValues, + validateType, + setFlags, + sanitizeFilename, + generateFlagString, + executeQuery, +}; diff --git a/src/worker.ts b/src/worker.ts index 3dd7d1f37a..2456be9d12 100644 --- a/src/worker.ts +++ b/src/worker.ts @@ -6,6 +6,7 @@ if (process.env.NODE_ENV === 'production') { import {} from 'dotenv/config'; import throng from 'throng'; +import httpContext from 'express-http-context'; import { processScanQueue, } from './services/scanQueue'; @@ -25,22 +26,26 @@ import { // Number of workers to spawn const workers = process.env.WORKER_CONCURRENCY || 2; -// Pull jobs off the redis queue and process them. +// Wrap your process functions to use httpContext async function start(context: { id: number }) { - // File Scanning Queue - processScanQueue(); + httpContext.ns.run(() => { + httpContext.set('workerId', context.id); - // S3 Queue. - processS3Queue(); + // File Scanning Queue + processScanQueue(); - // Resource Queue. - processResourceQueue(); + // S3 Queue. + processS3Queue(); - // Notifications Queue - processNotificationQueue(); + // Resource Queue. + processResourceQueue(); - // Maintenance Queue - processMaintenanceQueue(); + // Notifications Queue + processNotificationQueue(); + + // Maintenance Queue + processMaintenanceQueue(); + }); } // spawn workers and start them diff --git a/src/workers/referenceData.ts b/src/workers/referenceData.ts new file mode 100644 index 0000000000..ea21b58ee6 --- /dev/null +++ b/src/workers/referenceData.ts @@ -0,0 +1,28 @@ +import httpContext from 'express-http-context'; + +interface ReferenceData { + referenceData: { + userId: number | undefined; + impersonationId: number | undefined; + transactionId: string | undefined; + sessionSig: string | undefined; + } +} + +const referenceData = (): ReferenceData => { + const userId = httpContext.get('loggedUser') as number | undefined; + const impersonationId = httpContext.get('impersonationUserId') as number | undefined; + const transactionId = httpContext.get('transactionId') as string | undefined; + const sessionSig = httpContext.get('sessionSig') as string | undefined; + + return { + referenceData: { + userId, + impersonationId, + transactionId, + sessionSig, + }, + }; +}; + +export default referenceData; diff --git a/src/workers/transactionWrapper.ts b/src/workers/transactionWrapper.ts new file mode 100644 index 0000000000..ab834217dd --- /dev/null +++ b/src/workers/transactionWrapper.ts @@ -0,0 +1,51 @@ +import httpContext from 'express-http-context'; +import { addAuditTransactionSettings, removeFromAuditedTransactions } from '../models/auditModelGenerator'; +import { sequelize } from '../models'; +import { handleWorkerErrors } from '../lib/apiErrorHandler'; +import { auditLogger } from '../logger'; + +const namespace = 'WORKER:WRAPPER'; +const logContext = { + namespace, +}; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +type Job = any; // Define the correct type for your job here + +const transactionQueueWrapper = ( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + originalFunction: (job: Job) => Promise, + context = '', + // eslint-disable-next-line @typescript-eslint/no-explicit-any +) => async (job: Job): Promise => { + const startTime = Date.now(); + return httpContext.ns.runPromise(async () => { + httpContext.set('loggedUser', job.referenceData.userId); + httpContext.set('impersonationUserId', job.referenceData.impersonationUserId); + httpContext.set('sessionSig', job.id); + httpContext.set('auditDescriptor', originalFunction.name); + try { + // eslint-disable-next-line @typescript-eslint/return-await + return await sequelize.transaction(async (transaction) => { + httpContext.set('transactionId', transaction.id); + try { + // eslint-disable-next-line + await addAuditTransactionSettings(sequelize, null, null, 'transaction', originalFunction.name); + const result = await originalFunction(job); + const duration = Date.now() - startTime; + auditLogger.info(`${originalFunction.name} ${context} execution time: ${duration}ms`); + removeFromAuditedTransactions(); + return result; + } catch (err) { + auditLogger.error(`Error executing ${originalFunction.name} ${context}: ${(err as Error).message}`); + throw err; + } + }); + } catch (err) { + await handleWorkerErrors(job, err, logContext); + throw err; + } + }); +}; + +export default transactionQueueWrapper;