Skip to content

Commit

Permalink
Merge branch 'main' into feature/CPF-492-only-treasury-uploads-filter
Browse files Browse the repository at this point in the history
  • Loading branch information
as1729 authored Nov 15, 2024
2 parents a146c6e + 199b883 commit 9a56334
Show file tree
Hide file tree
Showing 34 changed files with 199 additions and 154 deletions.
16 changes: 16 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.3.0
hooks:
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.7.1
hooks:
# Run the linter.
- id: ruff
args: [--fix]
# Run the formatter.
- id: ruff-format
6 changes: 3 additions & 3 deletions .yarn/plugins/@yarnpkg/plugin-interactive-tools.cjs

Large diffs are not rendered by default.

16 changes: 8 additions & 8 deletions .yarn/releases/yarn-3.7.0.cjs

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion __mocks__/passage-user.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,4 @@ jest.mock('@passageidentity/passage-elements/passage-user', () => {
PassageUser: jest.fn(() => {}) ,
PassageUserInfo: jest.fn(() => {}) ,
};
});
});
Original file line number Diff line number Diff line change
@@ -1 +1 @@
CREATE UNIQUE INDEX "UploadValidation_unique_null_results_per_uploadid" ON "UploadValidation"("uploadId") WHERE results IS NULL;
CREATE UNIQUE INDEX "UploadValidation_unique_null_results_per_uploadid" ON "UploadValidation"("uploadId") WHERE results IS NULL;
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@ BEGIN;

CREATE OR REPLACE FUNCTION populate_organization_preferences()
RETURNS void AS $$
DECLARE
DECLARE
current_reporting_period_id INT;
BEGIN
BEGIN
-- Get one valid reporting period id --
SELECT id INTO current_reporting_period_id
FROM "ReportingPeriod"
SELECT id INTO current_reporting_period_id
FROM "ReportingPeriod"
WHERE "outputTemplateId" IS NOT NULL AND "inputTemplateId" IS NOT NULL
LIMIT 1;

Expand All @@ -27,4 +27,4 @@ $$ LANGUAGE plpgsql;

SELECT populate_organization_preferences();

COMMIT;
COMMIT;
2 changes: 1 addition & 1 deletion api/db/migrations/migration_lock.toml
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
# Please do not edit this file manually
# It should be added in your version-control system (i.e. Git)
provider = "postgresql"
provider = "postgresql"
4 changes: 2 additions & 2 deletions api/db/schema.prisma
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ model UploadValidation {
upload Upload @relation(fields: [uploadId], references: [id], onDelete: Cascade)
results Json? @db.JsonB
passed Boolean
isManual Boolean @default(false)
isManual Boolean @default(false)
initiatedById Int
initiatedBy User @relation(fields: [initiatedById], references: [id])
createdAt DateTime @default(now()) @db.Timestamptz(6)
Expand Down Expand Up @@ -169,7 +169,7 @@ enum SubrecipientStatus {

model SubrecipientUpload {
id Int @id @default(autoincrement())
subrecipientId Int
subrecipientId Int
subrecipient Subrecipient @relation(fields: [subrecipientId], references: [id])
uploadId Int
upload Upload @relation(fields: [uploadId], references: [id])
Expand Down
142 changes: 82 additions & 60 deletions api/src/functions/processValidationJson/processValidationJson.ts
Original file line number Diff line number Diff line change
Expand Up @@ -61,14 +61,14 @@ export const handler = async (
/*
This allows us to easily test this function during local development by making a GET request as follows:
http://localhost:8911/processValidationJson?Records[0][s3][bucket][name]=cpf-reporter&Records[0][s3][object][key]=/uploads/1/2/3/14/test.json
{
"Records":[
{
{
"Records":[
{
"s3":{
"bucket":{
"bucket":{
"name":"cpf-reporter"
},
"object":{
"object":{
"key":"/uploads/1/2/3/4/test.json"
}
}
Expand Down Expand Up @@ -198,61 +198,7 @@ export const processRecord = async (

// If we passed validation, we will save the subrecipient info into our DB
if (passed && result.subrecipients?.length) {
const organizationId = extractOrganizationIdFromKey(key)
result.subrecipients.forEach((subrecipient) =>
saveSubrecipientInfo(
subrecipient,
key,
uploadId,
result.versionString,
organizationId
)
)

let reportingPeriod
try {
reportingPeriod = (
await db.upload.findUnique({
where: { id: uploadId },
include: { reportingPeriod: true },
})
).reportingPeriod
} catch (err) {
logger.error(`Could not find reporting period for upload ${uploadId}`)
throw new Error('Error determining reporting period for upload')
}

try {
const subrecipientKey = `treasuryreports/${organizationId}/${reportingPeriod.id}/subrecipients.json`
const startDate = new Date(
reportingPeriod.endDate.getFullYear(),
reportingPeriod.endDate.getMonth() + 1,
1
)
const endDate = new Date(
reportingPeriod.endDate.getFullYear(),
reportingPeriod.endDate.getMonth() + 2,
0
)
const subrecipientsWithUploads = await db.subrecipient.findMany({
where: {
createdAt: { lte: endDate, gte: startDate },
organizationId,
},
include: { subrecipientUploads: true },
})
const subrecipients = {
subrecipients: subrecipientsWithUploads,
}
await sendPutObjectToS3Bucket(
bucket,
subrecipientKey,
JSON.stringify(subrecipients)
)
} catch (err) {
logger.error(`Error saving subrecipients JSON file to S3: ${err}`)
throw new Error('Error saving subrecipient info to S3')
}
await handleSubrecipientUploads(result, key, uploadId, bucket)
}

// Delete the errors.json file from S3
Expand All @@ -272,6 +218,82 @@ export const processRecord = async (
}
}

async function handleSubrecipientUploads(result, key, uploadId, bucket) {
const organizationId = extractOrganizationIdFromKey(key)
for (const subrecipient of result.subrecipients) {
await saveSubrecipientInfo(
subrecipient,
key,
uploadId,
result.versionString,
organizationId
)
}

let reportingPeriod
try {
reportingPeriod = (
await db.upload.findUnique({
where: { id: uploadId },
include: { reportingPeriod: true },
})
).reportingPeriod
} catch (err) {
logger.error(`Could not find reporting period for upload ${uploadId}`)
throw new Error('Error determining reporting period for upload')
}

try {
const subrecipientsWithUploads = await getNewlyCreatedSubrecipients(
reportingPeriod,
organizationId
)
const subrecipients = {
subrecipients: subrecipientsWithUploads,
}
const subrecipientKey = `treasuryreports/${organizationId}/${reportingPeriod.id}/subrecipients.json`
await sendPutObjectToS3Bucket(
bucket,
subrecipientKey,
JSON.stringify(subrecipients)
)
logger.info(
`Successfully saved subrecipients to S3: ${subrecipientsWithUploads.length}`
)
} catch (err) {
logger.error(`Error saving subrecipients JSON file to S3: ${err}`)
throw new Error('Error saving subrecipient info to S3')
}
}

async function getNewlyCreatedSubrecipients(reportingPeriod, organizationId) {
// Get all subrecipients for the current reporting period
// These subrecipients are considered for the report if they were created
// during the month following the end of the reporting period
// For example: if the reporting period ends on 2022-03-31,
// any subrecipients created between 2022-04-01 and 2022-04-30 are new

const startDate = new Date(
reportingPeriod.endDate.getFullYear(),
reportingPeriod.endDate.getMonth() + 1,
1
)
const endDate = new Date(
reportingPeriod.endDate.getFullYear(),
reportingPeriod.endDate.getMonth() + 2,
0
)
const subrecipientsWithUploads = await db.subrecipient.findMany({
where: {
createdAt: { lte: endDate, gte: startDate },
organizationId,
},
include: { subrecipientUploads: true },
})

return subrecipientsWithUploads
}

async function saveSubrecipientInfo(
subrecipientInput: Subrecipient,
key: string,
Expand Down
2 changes: 1 addition & 1 deletion docker-compose.dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ services:
- TEST_DATABASE_URL=postgresql://redwood:redwood@db:5432/redwood_test
depends_on:
- db

# docker compose -f ./docker-compose.dev.yml run --rm -it python-console /bin/bash
python-console:
user: root
Expand Down
4 changes: 2 additions & 2 deletions docs/python-schema.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Python Schema for Excel CPF Templates

In order to validate user reporting templates, we define a schema in Python that matches the Excel template schema (tabs, columns, and field types).
This schema is defined and validated using Pydantic, a Python library for data validation (docs [here](https://docs.pydantic.dev/latest/)).
This schema is defined and validated using Pydantic, a Python library for data validation (docs [here](https://docs.pydantic.dev/latest/)).

## Note on terminology
For the purposes of this guide, we will use `file` to refer to code files, and `sheet` to refer to user-uploaded Excel reporting templates.
Expand All @@ -22,7 +22,7 @@ For example, in a `schema.py` file:
..., serialization_alias="Project Name", json_schema_extra={"column":"C"}
)
```
There should only be _one_ field per column per tab (e.g., the "Project" tab can only ever have a `Project_Name__c` field in column C).
There should only be _one_ field per column per tab (e.g., the "Project" tab can only ever have a `Project_Name__c` field in column C).

## Updating the schema

Expand Down
5 changes: 4 additions & 1 deletion python/src/functions/create_archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,10 @@ def handle(event: dict[str, Any], _context: Context):
create_archive(organization_id, reporting_period_id, boto3.client("s3"), logger)
except Exception:
logger.exception("Exception creating archive")
return {"statusCode": 500, "body": "Internal Server Error - unable to create archive"}
return {
"statusCode": 500,
"body": "Internal Server Error - unable to create archive",
}

return {
"statusCode": 200,
Expand Down
67 changes: 36 additions & 31 deletions python/src/functions/generate_presigned_url_and_send_email.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
Your treasury report can be downloaded here: {url}.
"""


class SendTreasuryEmailLambdaPayload(BaseModel):
organization: OrganizationObj
user: UserObj
Expand All @@ -34,7 +35,7 @@ def handle(event: SendTreasuryEmailLambdaPayload, context: Context):
contains a pre-signed URL to the following S3 object if it exists:
treasuryreports/{organization.id}/{organization.preferences.current_reporting_period_id}/report.zip
If the object does not exist then raise an exception.
Args:
event: S3 Lambda event of type `s3:ObjectCreated:*`
context: Lambda context
Expand All @@ -59,33 +60,37 @@ def handle(event: SendTreasuryEmailLambdaPayload, context: Context):


def generate_email(
user: UserObj,
logger: structlog.stdlib.BoundLogger,
presigned_url: str = "",
user: UserObj,
logger: structlog.stdlib.BoundLogger,
presigned_url: str = "",
) -> Tuple[Optional[str], Optional[str], Optional[str]]:
try:
with open("src/static/email_templates/formatted_body.html") as g:
email_body = chevron.render(g, {
"body_title": 'Hello,',
"body_detail": treasury_email_html.format(
url = presigned_url
),
})
with open("src/static/email_templates/base.html") as f:
email_html = chevron.render(f, {
"tool_name": "CPF",
"title": "CPF Treasury Report",
"preheader": False,
"webview_available": False,
"base_url_safe": "",
"usdr_logo_url": 'https://grants.usdigitalresponse.org/usdr_logo_transparent.png',
"presigned_url": presigned_url,
"notifications_url_safe": False,
"email_body": email_body,
email_body = chevron.render(
g,
{
"body_title": "Hello,",
"body_detail": treasury_email_html.format(url=presigned_url),
},
partials_dict = {
"email_body": email_body,
})
)
with open("src/static/email_templates/base.html") as f:
email_html = chevron.render(
f,
{
"tool_name": "CPF",
"title": "CPF Treasury Report",
"preheader": False,
"webview_available": False,
"base_url_safe": "",
"usdr_logo_url": "https://grants.usdigitalresponse.org/usdr_logo_transparent.png",
"presigned_url": presigned_url,
"notifications_url_safe": False,
"email_body": email_body,
},
partials_dict={
"email_body": email_body,
},
)
email_text = treasury_email_text.format(url=presigned_url)
subject = "USDR CPF Treasury Report"
return email_html, email_text, subject
Expand All @@ -95,8 +100,8 @@ def generate_email(


def process_event(
payload: SendTreasuryEmailLambdaPayload,
logger: structlog.stdlib.BoundLogger,
payload: SendTreasuryEmailLambdaPayload,
logger: structlog.stdlib.BoundLogger,
):
"""
This function is structured as followed:
Expand All @@ -110,20 +115,20 @@ def process_event(
s3_client = boto3.client("s3")
user = payload.user
organization = payload.organization

presigned_url = get_presigned_url(
s3_client=s3_client,
bucket=os.environ["REPORTING_DATA_BUCKET_NAME"],
key=f"treasuryreports/{organization.id}/{organization.preferences.current_reporting_period_id}/report.zip",
expiration_time=60 * 60, # 1 hour
)
if presigned_url is None:
raise Exception('Failed to generate signed-URL or file not found')
raise Exception("Failed to generate signed-URL or file not found")

email_html, email_text, subject = generate_email(
user = user,
presigned_url = presigned_url,
logger = logger,
user=user,
presigned_url=presigned_url,
logger=logger,
)
if not email_html:
return False
Expand Down
Loading

0 comments on commit 9a56334

Please sign in to comment.