diff --git a/docker-compose.ci.yml b/docker-compose.ci.yml index 32d256805..8a44dd3fb 100644 --- a/docker-compose.ci.yml +++ b/docker-compose.ci.yml @@ -3,6 +3,7 @@ version: "3.4" services: backend: volumes: + - ./packages/webapp-libs/webapp-emails/build/email-renderer/:/app/scripts/email/renderer:ro - ./packages/backend/cov:/app/cov - ./packages/backend/docs:/app/docs environment: @@ -14,7 +15,6 @@ services: workers: volumes: - - ./packages/webapp-libs/webapp-emails/build/email-renderer/:/app/packages/workers/emails/renderer - ./packages/workers/cov:/app/packages/workers/cov environment: - CI=true diff --git a/docker-compose.local.yml b/docker-compose.local.yml index 8c9fcc735..dc883abeb 100644 --- a/docker-compose.local.yml +++ b/docker-compose.local.yml @@ -1,8 +1,8 @@ -version: "3.4" +version: '3.4' volumes: web_backend_db_data: - name: "${PROJECT_NAME}-web-backend-db-data" + name: '${PROJECT_NAME}-web-backend-db-data' external: true web_backend_staticfiles: {} @@ -17,9 +17,12 @@ services: backend: volumes: + - ./packages/webapp-libs/webapp-emails/build/email-renderer/:/app/scripts/runtime/email/renderer:ro - ./packages/backend/:/app - ./packages/backend/docs:/app/docs - /app/__pypackages__ + - /app/node_modules + - /app/cdk.out - web_backend_staticfiles:/app/static env_file: - ./packages/backend/.env @@ -31,10 +34,22 @@ services: - localstack - mailcatcher - workers + - flower + + celery_beat: + env_file: + - ./packages/backend/.env + + celery_default: + command: ["./scripts/runtime/run_local_celery_worker_default.sh"] + volumes: + - ./packages/backend/:/app + - /app/__pypackages__ + env_file: + - ./packages/backend/.env workers: volumes: - - ./packages/webapp-libs/webapp-emails/build/email-renderer/:/app/packages/workers/emails/renderer - ./packages/workers/:/app/packages/workers/ - /app/packages/workers/node_modules/ - /app/packages/workers/__pypackages__/ @@ -44,21 +59,31 @@ services: environment: - AWS_ENDPOINT_URL=http://localstack:4566 - ENV_STAGE=${ENV_STAGE:-} - depends_on: - db - mailcatcher ports: - - "3005:3005" + - '3005:3005' redis: volumes: - redis_cache:/data + flower: + image: '${PROJECT_NAME}/backend' + command: ['./scripts/runtime/run_local_celery_flower.sh'] + env_file: + - ./packages/backend/.env + ports: + - '5555:5555' + depends_on: + redis: + condition: service_healthy + localstack: image: localstack/localstack:2.3.0 ports: - - "4566:4566" + - '4566:4566' environment: - SERVICES=serverless,events,cloudformation,ses,secretsmanager - DEFAULT_REGION=eu-west-1 @@ -70,8 +95,8 @@ services: - AWS_SECRET_ACCESS_KEY=bar - HOST_TMP_FOLDER=/tmp volumes: - - "/tmp/localstack:/tmp/localstack" - - "/var/run/docker.sock:/var/run/docker.sock" + - '/tmp/localstack:/tmp/localstack' + - '/var/run/docker.sock:/var/run/docker.sock' privileged: true depends_on: - db @@ -81,6 +106,6 @@ services: mailcatcher: image: sj26/mailcatcher:v0.9.0 ports: - - "1080:1080" - - "1025:1025" + - '1080:1080' + - '1025:1025' restart: always diff --git a/docker-compose.yml b/docker-compose.yml index bb1377449..27ebea2a3 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -21,6 +21,7 @@ services: build: context: ./packages/backend target: backend + image: "${PROJECT_NAME}/backend" command: ["./scripts/runtime/run_local.sh"] ports: - "5001:5001" @@ -42,6 +43,22 @@ services: stdin_open: true tty: true + celery_beat: + image: '${PROJECT_NAME}/backend' + command: ["./scripts/runtime/run_celery_beat.sh"] + restart: always + depends_on: + backend: + condition: service_started + + celery_default: + image: '${PROJECT_NAME}/backend' + command: ["./scripts/runtime/run_celery_worker_default.sh"] + restart: always + depends_on: + backend: + condition: service_started + workers: build: context: . diff --git a/packages/backend/.env.shared b/packages/backend/.env.shared index 7a762079b..84f4dc315 100644 --- a/packages/backend/.env.shared +++ b/packages/backend/.env.shared @@ -11,8 +11,8 @@ ENVIRONMENT_NAME=local ADMIN_EMAIL=admin@example.com ADMIN_DEFAULT_PASSWORD=password -TASKS_LOCAL_URL=http://workers:3005 -TASKS_BASE_HANDLER=common.tasks.TaskLocalInvoke +LAMBDA_TASKS_LOCAL_URL=http://workers:3005 +LAMBDA_TASKS_BASE_HANDLER=common.tasks.LambdaTaskLocalInvoke DB_CONNECTION={"dbname":"backend","username":"backend","password":"backend","host":"db","port":5432} REDIS_CONNECTION=redis://redis:6379 @@ -43,4 +43,13 @@ AWS_XRAY_SDK_ENABLED=False OTP_AUTH_ISSUER_NAME=example.com -OPENAI_API_KEY= \ No newline at end of file +OPENAI_API_KEY= + +EMAIL_BACKEND=django.core.mail.backends.smtp.EmailBackend +EMAIL_HOST=mailcatcher +EMAIL_PORT=1025 +EMAIL_FROM_ADDRESS=from@example.org +EMAIL_REPLY_ADDRESS=from@example.org + +VITE_EMAIL_ASSETS_URL=http://localhost:3000/email-assets +VITE_WEB_APP_URL=http://localhost:3000 diff --git a/packages/backend/.gitignore b/packages/backend/.gitignore index 642032eb0..7eca62a2a 100644 --- a/packages/backend/.gitignore +++ b/packages/backend/.gitignore @@ -10,4 +10,6 @@ docs/ # Coverage .coverage -coverage.xml \ No newline at end of file +coverage.xml + +scripts/runtime/email/renderer/* diff --git a/packages/backend/.test.env b/packages/backend/.test.env index c150bd4d7..e5aaf6aa0 100644 --- a/packages/backend/.test.env +++ b/packages/backend/.test.env @@ -15,7 +15,7 @@ REDIS_CONNECTION=redis://redis:6379 WORKERS_EVENT_BUS_NAME=local-workers -TASKS_BASE_HANDLER=common.tasks.Task +LAMBDA_TASKS_BASE_HANDLER=common.tasks.LambdaTask AWS_DEFAULT_REGION=eu-west-1 PARENT_HOST=example.org @@ -31,4 +31,10 @@ AWS_ENDPOINT_URL= OTP_AUTH_ISSUER_NAME=example.com -OPENAI_API_KEY=sk-example \ No newline at end of file +OPENAI_API_KEY=sk-example + +EMAIL_BACKEND=django.core.mail.backends.locmem.EmailBackend +EMAIL_FROM_ADDRESS=from@example.org +EMAIL_REPLY_ADDRESS=from@example.org +VITE_EMAIL_ASSETS_URL=http://localhost:3000/email-assets +VITE_WEB_APP_URL=http://localhost:3000 diff --git a/packages/backend/Dockerfile b/packages/backend/Dockerfile index 58ae536ef..0058df544 100644 --- a/packages/backend/Dockerfile +++ b/packages/backend/Dockerfile @@ -15,12 +15,15 @@ ENV PYTHONUNBUFFERED 1 ENV PIP_NO_CACHE_DIR off -RUN apt-get update && apt-get install -y gcc postgresql-client ca-certificates jq \ +RUN apt-get update && apt-get install -y gcc postgresql-client ca-certificates jq curl \ && update-ca-certificates \ && pip install --upgrade pip \ && pip install --no-cache-dir setuptools pdm~=2.5.2 awscli==1.32.24 +RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \ + && apt-get --no-install-recommends install -y nodejs + COPY --from=chamber /chamber /bin/chamber WORKDIR /pkgs diff --git a/packages/backend/apps/content/tasks.py b/packages/backend/apps/content/tasks.py index 5daf857d2..2eea5124f 100644 --- a/packages/backend/apps/content/tasks.py +++ b/packages/backend/apps/content/tasks.py @@ -5,11 +5,11 @@ logger = logging.getLogger(__name__) -module_name, package = settings.TASKS_BASE_HANDLER.rsplit(".", maxsplit=1) -Task = getattr(importlib.import_module(module_name), package) +module_name, package = settings.LAMBDA_TASKS_BASE_HANDLER.rsplit(".", maxsplit=1) +LambdaTask = getattr(importlib.import_module(module_name), package) -class ContentfulSync(Task): +class ContentfulSync(LambdaTask): def __init__(self, name: str): super().__init__(name=name, source='backend.contentfulSync') diff --git a/packages/backend/apps/finances/tests/test_webhooks.py b/packages/backend/apps/finances/tests/test_webhooks.py index d6436e9eb..895bc3ece 100644 --- a/packages/backend/apps/finances/tests/test_webhooks.py +++ b/packages/backend/apps/finances/tests/test_webhooks.py @@ -2,6 +2,7 @@ import calleee import pytest +from unittest.mock import patch from djstripe import models as djstripe_models from djstripe.enums import RefundStatus, RefundFailureReason @@ -77,7 +78,8 @@ def test_subscription_schedule_from_subscription( class TestSendSubscriptionErrorEmail: - def test_send_email_on_invoice_payment_failed(self, webhook_event_factory, subscription, task_apply): + @patch('common.emails.send_email') + def test_send_email_on_invoice_payment_failed(self, send_email, webhook_event_factory, subscription): webhook_event = webhook_event_factory( type='invoice.payment_failed', data={ @@ -91,9 +93,12 @@ def test_send_email_on_invoice_payment_failed(self, webhook_event_factory, subsc webhook_event.invoke_webhook_handlers() - task_apply.assert_email_sent(notifications.SubscriptionErrorEmail, subscription.customer.subscriber.email) + send_email.apply_async.assert_called_with( + (subscription.customer.subscriber.email, notifications.SubscriptionErrorEmail.name, None) + ) - def test_send_email_on_invoice_payment_required(self, webhook_event_factory, subscription, task_apply): + @patch('common.emails.send_email') + def test_send_email_on_invoice_payment_required(self, send_email, webhook_event_factory, subscription): webhook_event = webhook_event_factory( type='invoice.payment_action_required', data={ @@ -107,11 +112,14 @@ def test_send_email_on_invoice_payment_required(self, webhook_event_factory, sub webhook_event.invoke_webhook_handlers() - task_apply.assert_email_sent(notifications.SubscriptionErrorEmail, subscription.customer.subscriber.email) + send_email.apply_async.assert_called_with( + (subscription.customer.subscriber.email, notifications.SubscriptionErrorEmail.name, None) + ) class TestSendTrialExpiresSoonEmail: - def test_previously_trialing_subscription_is_canceled(self, webhook_event_factory, customer, task_apply): + @patch('common.emails.send_email') + def test_previously_trialing_subscription_is_canceled(self, send_email, webhook_event_factory, customer): webhook_event = webhook_event_factory( type='customer.subscription.trial_will_end', data={'object': {'object': 'subscription', 'customer': customer.id, 'trial_end': 1617103425}}, @@ -119,8 +127,12 @@ def test_previously_trialing_subscription_is_canceled(self, webhook_event_factor webhook_event.invoke_webhook_handlers() - task_apply.assert_email_sent( - notifications.TrialExpiresSoonEmail, customer.subscriber.email, {'expiry_date': '2021-03-30T11:23:45Z'} + send_email.apply_async.assert_called_with( + ( + customer.subscriber.email, + notifications.TrialExpiresSoonEmail.name, + {'expiry_date': '2021-03-30T11:23:45Z'}, + ) ) diff --git a/packages/backend/apps/users/tasks.py b/packages/backend/apps/users/tasks.py index 3315d0666..263d8f2ea 100644 --- a/packages/backend/apps/users/tasks.py +++ b/packages/backend/apps/users/tasks.py @@ -2,10 +2,10 @@ from django.conf import settings -module_name, package = settings.TASKS_BASE_HANDLER.rsplit(".", maxsplit=1) -Task = getattr(importlib.import_module(module_name), package) +module_name, package = settings.LAMBDA_TASKS_BASE_HANDLER.rsplit(".", maxsplit=1) +LambdaTask = getattr(importlib.import_module(module_name), package) -class ExportUserData(Task): +class ExportUserData(LambdaTask): def __init__(self): super().__init__(name="EXPORT_USER_DATA", source='backend.export_user') diff --git a/packages/backend/common/emails.py b/packages/backend/common/emails.py index 499af9e84..3a2e2a98d 100644 --- a/packages/backend/common/emails.py +++ b/packages/backend/common/emails.py @@ -1,26 +1,11 @@ -import importlib +import json +import os +import subprocess +from celery import shared_task, states +from celery.exceptions import Ignore from django.conf import settings - -module_name, package = settings.TASKS_BASE_HANDLER.rsplit(".", maxsplit=1) -Task = getattr(importlib.import_module(module_name), package) - - -class SendEmail(Task): - def __init__(self, name: str): - super().__init__(name=name, source='backend.email') - - def apply(self, to: str, data, due_date=None): - if data is None: - data = {} - - super().apply( - { - "to": to, - **data, - }, - due_date, - ) +from django.core.mail import EmailMessage class BaseEmail: @@ -60,5 +45,59 @@ def send(self, due_date=None): serializer.is_valid(raise_exception=True) send_data = serializer.data - email_task = SendEmail(self.name) - email_task.apply(to=self.to, data=send_data, due_date=due_date) + # TODO: Handle due_date + send_email.apply_async((self.to, self.name, send_data)) + + +@shared_task(bind=True) +def send_email(self, to: str | list[str], email_type: str, email_data: dict): + render_script = ''' + const { renderEmail } = require('./email'); + console.log(JSON.stringify(renderEmail('%s', %s))); + process.exit(0); + ''' % ( + email_type, + json.dumps(email_data), + ) + + try: + node_process = subprocess.run( + ["node"], + shell=True, + input=bytes(render_script, 'utf-8'), + capture_output=True, + check=True, + cwd='/app/scripts/runtime', + # Environmental variables are mapped manually to avoid secret values from being exposed to email renderer + # script that is usually maintained by non-backend developers + env={ + 'DEBUG': str(settings.DEBUG), + 'VITE_EMAIL_ASSETS_URL': os.environ.get('VITE_EMAIL_ASSETS_URL', ''), + 'VITE_WEB_APP_URL': os.environ.get('VITE_WEB_APP_URL', ''), + }, + ) + except subprocess.CalledProcessError as e: + self.update_state( + state=states.FAILURE, + meta={ + 'return_code': e.returncode, + 'cmd': e.cmd, + 'output': e.output, + 'stderr': e.stderr, + }, + ) + raise Ignore() + + if isinstance(to, str): + to = (to,) + + rendered_email = json.loads(node_process.stdout) + email = EmailMessage( + rendered_email['subject'], + rendered_email['html'], + settings.EMAIL_FROM_ADDRESS, + to, + reply_to=settings.EMAIL_REPLY_ADDRESS, + ) + email.content_subtype = 'html' + return {'sent_emails_count': email.send()} diff --git a/packages/backend/common/tasks.py b/packages/backend/common/tasks.py index e4b6cf831..70411a9be 100644 --- a/packages/backend/common/tasks.py +++ b/packages/backend/common/tasks.py @@ -10,7 +10,7 @@ logger = logging.getLogger(__name__) -class Task: +class LambdaTask: def __init__(self, name: str, source: str, event_bus_name=settings.WORKERS_EVENT_BUS_NAME): self.name = name self.source = source @@ -32,13 +32,15 @@ def make_entry(data: dict, source: str, detail_type: str, event_bus_name: str): } def get_entry(self, data: dict): - return Task.make_entry(data=data, source=self.source, detail_type=self.name, event_bus_name=self.event_bus_name) + return LambdaTask.make_entry( + data=data, source=self.source, detail_type=self.name, event_bus_name=self.event_bus_name + ) def apply(self, data: dict, due_date: datetime = None): task_entry = self.get_entry(data) if due_date is not None: - task_entry = Task.make_entry( + task_entry = LambdaTask.make_entry( data={ 'entry': task_entry, 'due_date': due_date.isoformat(), @@ -48,7 +50,7 @@ def apply(self, data: dict, due_date: datetime = None): detail_type='backend.scheduler', ) - Task._apply(entry=task_entry) + LambdaTask._apply(entry=task_entry) @classmethod def _apply(cls, entry): @@ -56,20 +58,20 @@ def _apply(cls, entry): client.put_events(Entries=[entry]) -class TaskLocalInvoke(Task): +class LambdaTaskLocalInvoke(LambdaTask): def apply(self, data: dict, due_date: datetime = None): if due_date is None: due_date = datetime.now(tz=timezone.utc) entry = self.get_entry(data) response = requests.post( - settings.TASKS_LOCAL_URL, json={**entry, 'Time': datetime.now().isoformat()}, timeout=10 + settings.LAMBDA_TASKS_LOCAL_URL, json={**entry, 'Time': datetime.now().isoformat()}, timeout=10 ) logger.info(f"Invoking local task: {entry=} at {due_date.isoformat()}") logger.info(f"Invoke local response status code: {response.status_code}") -class TaskPrinter(Task): +class LambdaTaskPrinter(LambdaTask): def apply(self, data: dict, due_date=None): if due_date is None: due_date = datetime.now() diff --git a/packages/backend/common/tests/fixtures.py b/packages/backend/common/tests/fixtures.py index eb85e60b6..2df696698 100644 --- a/packages/backend/common/tests/fixtures.py +++ b/packages/backend/common/tests/fixtures.py @@ -5,8 +5,8 @@ @pytest.fixture(autouse=True) -def task_apply(mocker): - spy = mocker.patch("common.tasks.Task._apply") +def lambda_task_apply(mocker): + spy = mocker.patch("common.tasks.LambdaTask._apply") def assert_task_applied( source: str, @@ -34,17 +34,6 @@ def assert_task_applied( raise AssertionError("Task apply not found") - def assert_email_sent(email_class, to, data=None): - if data is None: - data = {} - - assert_task_applied( - source="backend.email", - detail_type=email_class.name, - data={"type": email_class.name, "to": to, **data}, - ) - spy.assert_task_applied = assert_task_applied - spy.assert_email_sent = assert_email_sent return spy diff --git a/packages/backend/common/tests/test_emails.py b/packages/backend/common/tests/test_emails.py new file mode 100644 index 000000000..9c1fd7db6 --- /dev/null +++ b/packages/backend/common/tests/test_emails.py @@ -0,0 +1,24 @@ +from unittest.mock import MagicMock + +import pytest +from django.conf import settings + +from django.core import mail +from ..emails import send_email + +pytestmark = pytest.mark.django_db + + +class TestSendEmail: + def test_send_example_email(self): + to = 'test@example.org' + email_type = 'ACCOUNT_ACTIVATION' + email_data = {'token': 'test-token', 'user_id': 'test-user'} + send_email(to, email_type, email_data) + + assert len(mail.outbox) == 1 + + sent_email = mail.outbox[0] + assert sent_email.to == [to] + assert sent_email.from_email == settings.EMAIL_FROM_ADDRESS + assert f"http://localhost:3000/en/auth/confirm/{email_data['user_id']}/{email_data['token']}" in sent_email.body diff --git a/packages/backend/common/tests/test_tasks.py b/packages/backend/common/tests/test_tasks.py index c631d346e..81ac57c32 100644 --- a/packages/backend/common/tests/test_tasks.py +++ b/packages/backend/common/tests/test_tasks.py @@ -4,29 +4,29 @@ import pytest from config import settings -from ..tasks import Task +from ..tasks import LambdaTask pytestmark = pytest.mark.django_db class TestScheduledTasks: - def test_apply_task_entry(self, task_apply): - task = Task(name='test-task', source='test-source', event_bus_name=settings.WORKERS_EVENT_BUS_NAME) + def test_apply_task_entry(self, lambda_task_apply): + task = LambdaTask(name='test-task', source='test-source', event_bus_name=settings.WORKERS_EVENT_BUS_NAME) task_data = {'test_param': 'test-value'} task.apply(task_data) - task_apply.assert_task_applied(source='test-source', detail_type='test-task', data=task_data) + lambda_task_apply.assert_task_applied(source='test-source', detail_type='test-task', data=task_data) - def test_schedule_task_entry(self, task_apply): - task = Task(name='test-task', source='test-source', event_bus_name=settings.WORKERS_EVENT_BUS_NAME) + def test_schedule_task_entry(self, lambda_task_apply): + task = LambdaTask(name='test-task', source='test-source', event_bus_name=settings.WORKERS_EVENT_BUS_NAME) task_data = {'test_param': 'test-value'} due_date = datetime.datetime.now() event_scheduler_type = 'backend.scheduler' task.apply(task_data, due_date=due_date) - called_entry_kwarg = task_apply.call_args.kwargs['entry'] + called_entry_kwarg = lambda_task_apply.call_args.kwargs['entry'] called_entry_kwarg_data = json.loads(called_entry_kwarg['Detail']) called_wrapped_entry = called_entry_kwarg_data['entry'] called_wrapped_entry_data = json.loads(called_entry_kwarg_data['entry']['Detail']) diff --git a/packages/backend/config/__init__.py b/packages/backend/config/__init__.py index e69de29bb..15d7c5085 100644 --- a/packages/backend/config/__init__.py +++ b/packages/backend/config/__init__.py @@ -0,0 +1,5 @@ +# This will make sure the app is always imported when +# Django starts so that shared_task will use this app. +from .celery import app as celery_app + +__all__ = ('celery_app',) diff --git a/packages/backend/config/celery.py b/packages/backend/config/celery.py new file mode 100644 index 000000000..ce53b74d2 --- /dev/null +++ b/packages/backend/config/celery.py @@ -0,0 +1,17 @@ +import os + +from celery import Celery + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings') + +app = Celery('tasks') + +app.config_from_object('django.conf:settings', namespace='CELERY') + +# Load task modules from all registered Django apps. +app.autodiscover_tasks() + + +@app.task(bind=True, ignore_result=True) +def debug_task(self): + print(f'Request: {self.request!r}') # noqa diff --git a/packages/backend/config/settings.py b/packages/backend/config/settings.py index fa2413bed..a55f01340 100644 --- a/packages/backend/config/settings.py +++ b/packages/backend/config/settings.py @@ -44,6 +44,8 @@ THIRD_PARTY_APPS = [ "django_extensions", + 'django_celery_results', + 'django_celery_beat', "djstripe", "django_hosts", "drf_yasg", @@ -53,7 +55,6 @@ "whitenoise", "graphene_django", 'channels', - # 'channels_postgres', "aws_xray_sdk.ext.django", ] @@ -306,8 +307,8 @@ AWS_ENDPOINT_URL = env("AWS_ENDPOINT_URL", default=None) -TASKS_BASE_HANDLER = env("TASKS_BASE_HANDLER", default="common.tasks.Task") -TASKS_LOCAL_URL = env("TASKS_LOCAL_URL", default=None) +LAMBDA_TASKS_BASE_HANDLER = env("LAMBDA_TASKS_BASE_HANDLER", default="common.tasks.LambdaTask") +LAMBDA_TASKS_LOCAL_URL = env("LAMBDA_TASKS_LOCAL_URL", default=None) STRIPE_LIVE_SECRET_KEY = env("STRIPE_LIVE_SECRET_KEY", default="sk_") STRIPE_TEST_SECRET_KEY = env("STRIPE_TEST_SECRET_KEY", default="sk_test_") @@ -373,3 +374,17 @@ def tenant_request_callback(request): USER_DOCUMENTS_NUMBER_LIMIT = env.int("USER_DOCUMENTS_NUMBER_LIMIT", default=10) TENANT_INVITATION_TIMEOUT = env("TENANT_INVITATION_TIMEOUT", default=60 * 60 * 24 * 14) + +CELERY_RESULT_BACKEND = 'django-db' +CELERY_BROKER_URL = f'{env("REDIS_CONNECTION")}/0' +CELERY_BROKER_TRANSPORT_OPTIONS = { + 'visibility_timeout': 3600, +} + +EMAIL_BACKEND = env("EMAIL_BACKEND", default="django_ses.SESBackend") +EMAIL_HOST = env("EMAIL_HOST", default=None) +EMAIL_PORT = env("EMAIL_PORT", default=None) +EMAIL_HOST_USER = env("EMAIL_HOST_USER", default=None) +EMAIL_HOST_PASSWORD = env("EMAIL_HOST_PASSWORD", default=None) +EMAIL_FROM_ADDRESS = env("EMAIL_FROM_ADDRESS", default=None) +EMAIL_REPLY_ADDRESS = env.list("EMAIL_REPLY_ADDRESS", default=(EMAIL_FROM_ADDRESS,)) diff --git a/packages/backend/conftest.py b/packages/backend/conftest.py index 97a97866e..60b786053 100644 --- a/packages/backend/conftest.py +++ b/packages/backend/conftest.py @@ -13,6 +13,7 @@ from storages.backends.s3boto3 import S3Boto3Storage pytest_plugins = [ + 'celery.contrib.pytest', 'tests.aws_fixtures', 'common.tests.fixtures', 'apps.users.tests.fixtures', diff --git a/packages/backend/infra/main.ts b/packages/backend/infra/main.ts index 9f0af6eb4..013f89b7b 100644 --- a/packages/backend/infra/main.ts +++ b/packages/backend/infra/main.ts @@ -1,11 +1,9 @@ import { App } from 'aws-cdk-lib'; -import { - loadEnvSettings, - getEnvStackName, -} from '@sb/infra-core'; +import { getEnvStackName, loadEnvSettings } from '@sb/infra-core'; import { ApiStack } from './stacks/api'; import { MigrationsStack } from './stacks/migrations'; +import { CeleryStack } from './stacks/celeryWorkers'; (async () => { const envSettings = await loadEnvSettings(); @@ -18,4 +16,8 @@ import { MigrationsStack } from './stacks/migrations'; new MigrationsStack(app, getEnvStackName('MigrationsStack', envSettings), { envSettings, }); + + new CeleryStack(app, getEnvStackName('CeleryStack', envSettings), { + envSettings, + }); })(); diff --git a/packages/backend/infra/stacks/api/stack.ts b/packages/backend/infra/stacks/api/stack.ts index 09786a8aa..6d9178480 100644 --- a/packages/backend/infra/stacks/api/stack.ts +++ b/packages/backend/infra/stacks/api/stack.ts @@ -1,27 +1,18 @@ import { App, Duration, Fn, Stack, StackProps } from 'aws-cdk-lib'; import * as ecs from 'aws-cdk-lib/aws-ecs'; -import * as iam from 'aws-cdk-lib/aws-iam'; -import * as sm from 'aws-cdk-lib/aws-secretsmanager'; import * as elb2 from 'aws-cdk-lib/aws-elasticloadbalancingv2'; -import * as s3 from 'aws-cdk-lib/aws-s3'; -import * as events from 'aws-cdk-lib/aws-events'; import { + ApplicationMultipleTargetGroupsFargateService, EnvConstructProps, getHostedZone, - ApplicationMultipleTargetGroupsFargateService, - EnvironmentSettings, } from '@sb/infra-core'; -import { - MainKmsKey, - MainDatabase, - MainECSCluster, - MainRedisCluster, - EnvComponentsStack, - FargateServiceResources, -} from '@sb/infra-shared'; +import { FargateServiceResources, MainECSCluster } from '@sb/infra-shared'; import { Monitoring } from './monitoring'; import { getApiServiceName } from './names'; +import { createBackendTaskRole } from '../lib/backendTaskRole'; +import { getBackendEnvironment } from '../lib/environment'; +import { getBackendSecrets } from '../lib/secrets'; export interface ApiStackProps extends StackProps, EnvConstructProps {} @@ -54,11 +45,10 @@ export class ApiStack extends Stack { props: ApiStackProps, ) { const { envSettings } = props; - const taskRole = this.createTaskRole(props); + const taskRole = createBackendTaskRole(this, 'ApiTaskRole', { + envSettings, + }); - const dbSecretArn = Fn.importValue( - MainDatabase.getDatabaseSecretArnOutputExportName(envSettings), - ); const domainZone = getHostedZone(this, envSettings); const allowedHosts = [ @@ -85,11 +75,9 @@ export class ApiStack extends Stack { securityGroup: resources.publicLoadBalancerSecurityGroup, }, ); - const stack = Stack.of(this); - const webSocketApiId = Fn.importValue( - EnvComponentsStack.getWebSocketApiIdOutputExportName(props.envSettings), - ); - return new ApplicationMultipleTargetGroupsFargateService( + + const healthCheckPath = '/lbcheck'; + return new ApplicationMultipleTargetGroupsFargateService( this, 'ApiService', { @@ -113,55 +101,12 @@ export class ApiStack extends Stack { resources.backendRepository, envSettings.version, ), - environment: { - PROJECT_NAME: envSettings.projectName, - ENVIRONMENT_NAME: envSettings.envStage, - CHAMBER_SERVICE_NAME: this.getChamberServiceName(envSettings), - CHAMBER_KMS_KEY_ALIAS: MainKmsKey.getKeyAlias(envSettings), - DJANGO_ALLOWED_HOSTS: allowedHosts, - CSRF_TRUSTED_ORIGINS: csrfTrustedOrigins, - OTP_AUTH_ISSUER_NAME: envSettings.domains.webApp, - WORKERS_EVENT_BUS_NAME: EnvComponentsStack.getWorkersEventBusName( - props.envSettings, - ), - WEB_SOCKET_API_ENDPOINT_URL: `https://${webSocketApiId}.execute-api.${stack.region}.amazonaws.com/${props.envSettings.envStage}`, - AWS_STORAGE_BUCKET_NAME: - EnvComponentsStack.getFileUploadsBucketName(props.envSettings), - AWS_S3_CUSTOM_DOMAIN: props.envSettings.domains.cdn, - DB_PROXY_ENDPOINT: Fn.importValue( - MainDatabase.getDatabaseProxyEndpointOutputExportName( - props.envSettings, - ), - ), - AWS_CLOUDFRONT_KEY_ID: Fn.importValue( - EnvComponentsStack.getCdnSigningPublicKeyIdExportName( - props.envSettings, - ), - ), - REDIS_CONNECTION: Fn.join('', [ - 'redis://', - Fn.importValue( - MainRedisCluster.getMainRedisClusterAddressExportName( - props.envSettings, - ), - ), - ':6379', - ]), - }, - secrets: { - DB_CONNECTION: ecs.Secret.fromSecretsManager( - sm.Secret.fromSecretCompleteArn(this, 'DbSecret', dbSecretArn), - ), - AWS_CLOUDFRONT_KEY: ecs.Secret.fromSecretsManager( - sm.Secret.fromSecretNameV2( - this, - 'CloudfrontPrivateKey', - `${EnvComponentsStack.getCDNSigningKeyName( - props.envSettings, - )}/private`, - ), - ), - }, + environment: getBackendEnvironment(this, { + envSettings, + allowedHosts, + csrfTrustedOrigins, + }), + secrets: getBackendSecrets(this, { envSettings }), }, { containerName: 'xray-daemon', @@ -190,6 +135,7 @@ export class ApiStack extends Stack { containerPort: 80, priority: 2, hostHeader: envSettings.domains.api, + healthCheckPath, }, ...(envSettings.domains.webApp ? [ @@ -198,6 +144,7 @@ export class ApiStack extends Stack { containerPort: 80, priority: 3, hostHeader: envSettings.domains.webApp, + healthCheckPath, }, ] : []), @@ -208,6 +155,7 @@ export class ApiStack extends Stack { containerPort: 80, priority: 4, hostHeader: envSettings.domains.www, + healthCheckPath, }, ] : []), @@ -218,6 +166,7 @@ export class ApiStack extends Stack { containerPort: 80, priority: 5, hostHeader: envSettings.domains.adminPanel, + healthCheckPath, }, ] : []), @@ -225,87 +174,4 @@ export class ApiStack extends Stack { }, ); } - - protected createTaskRole({ - envSettings, - }: { - envSettings: EnvironmentSettings; - }): iam.Role { - const stack = Stack.of(this); - const chamberServiceName = this.getChamberServiceName(envSettings); - - const taskRole = new iam.Role(this, 'ApiTaskRole', { - assumedBy: new iam.ServicePrincipal('ecs-tasks'), - }); - - const fileUploadsBucket = s3.Bucket.fromBucketName( - this, - 'FileUploadsBucket', - EnvComponentsStack.getFileUploadsBucketName(envSettings), - ); - fileUploadsBucket.grantReadWrite(taskRole); - fileUploadsBucket.grantPutAcl(taskRole); - - const eventBus = events.EventBus.fromEventBusName( - this, - 'WorkersEventBus', - EnvComponentsStack.getWorkersEventBusName(envSettings), - ); - eventBus.grantPutEventsTo(taskRole); - - taskRole.addToPolicy( - new iam.PolicyStatement({ - actions: [ - 'cloudformation:DescribeStacks', - 'apigateway:*', - 'execute-api:*', - 'xray:*', - ], - resources: ['*'], - }), - ); - - taskRole.addToPolicy( - new iam.PolicyStatement({ - actions: ['kms:Get*', 'kms:Describe*', 'kms:List*', 'kms:Decrypt'], - resources: [ - Fn.importValue(MainKmsKey.getMainKmsOutputExportName(envSettings)), - ], - }), - ); - - taskRole.addToPolicy( - new iam.PolicyStatement({ - actions: ['ssm:DescribeParameters'], - resources: ['*'], - }), - ); - - taskRole.addToPolicy( - new iam.PolicyStatement({ - actions: ['ssm:GetParameters*'], - resources: [ - `arn:aws:ssm:${stack.region}:${stack.account}:parameter/${chamberServiceName}/*`, - ], - }), - ); - - taskRole.addToPolicy( - new iam.PolicyStatement({ - actions: [ - 'ssmmessages:CreateControlChannel', - 'ssmmessages:CreateDataChannel', - 'ssmmessages:OpenControlChannel', - 'ssmmessages:OpenDataChannel', - ], - resources: ['*'], - }), - ); - - return taskRole; - } - - protected getChamberServiceName(envSettings: EnvironmentSettings) { - return `env-${envSettings.projectEnvName}-backend`; - } } diff --git a/packages/backend/infra/stacks/celeryWorkers/index.ts b/packages/backend/infra/stacks/celeryWorkers/index.ts new file mode 100644 index 000000000..d2395744d --- /dev/null +++ b/packages/backend/infra/stacks/celeryWorkers/index.ts @@ -0,0 +1 @@ +export { CeleryStack } from './stack'; diff --git a/packages/backend/infra/stacks/celeryWorkers/names.ts b/packages/backend/infra/stacks/celeryWorkers/names.ts new file mode 100644 index 000000000..95eee975b --- /dev/null +++ b/packages/backend/infra/stacks/celeryWorkers/names.ts @@ -0,0 +1,17 @@ +import { EnvironmentSettings } from '@sb/infra-core'; + +export function getCeleryWorkersServiceName(envSettings: EnvironmentSettings) { + return `${envSettings.projectEnvName}-celery-workers`; +} + +export function getCeleryBeatServiceName(envSettings: EnvironmentSettings) { + return `${envSettings.projectEnvName}-celery-beat`; +} + +export function getFlowerServiceName(envSettings: EnvironmentSettings) { + return `${envSettings.projectEnvName}-flower`; +} + +export function getCeleryWorkersFamily(envSettings: EnvironmentSettings) { + return `${envSettings.projectEnvName}-celery-workers`; +} diff --git a/packages/backend/infra/stacks/celeryWorkers/stack.ts b/packages/backend/infra/stacks/celeryWorkers/stack.ts new file mode 100644 index 000000000..392fc4762 --- /dev/null +++ b/packages/backend/infra/stacks/celeryWorkers/stack.ts @@ -0,0 +1,248 @@ +import { App, Duration, Fn, Stack, StackProps } from 'aws-cdk-lib'; +import * as ecs from 'aws-cdk-lib/aws-ecs'; +import { AwsLogDriver } from 'aws-cdk-lib/aws-ecs'; +import * as iam from 'aws-cdk-lib/aws-iam'; +import { + ApplicationMultipleTargetGroupsFargateService, + EnvConstructProps, + getHostedZone, +} from '@sb/infra-core'; +import { FargateServiceResources, MainECSCluster } from '@sb/infra-shared'; + +import { + getCeleryBeatServiceName, + getCeleryWorkersFamily, + getCeleryWorkersServiceName, + getFlowerServiceName, +} from './names'; +import { getBackendSecrets } from '../lib/secrets'; +import { getBackendEnvironment } from '../lib/environment'; +import { SubnetType } from 'aws-cdk-lib/aws-ec2'; +import { createBackendTaskRole } from '../lib/backendTaskRole'; +import { LogGroup } from 'aws-cdk-lib/aws-logs'; +import * as elb2 from 'aws-cdk-lib/aws-elasticloadbalancingv2'; +import { ApiStackProps } from '../api/stack'; + +export interface CeleryStackProps extends StackProps, EnvConstructProps {} + +export class CeleryStack extends Stack { + taskRole: iam.Role; + workersService: ecs.FargateService; + beatService: ecs.FargateService; + backendSecrets: Record; + flowerService: ApplicationMultipleTargetGroupsFargateService; + + constructor(scope: App, id: string, props: CeleryStackProps) { + super(scope, id, props); + + const resources = new FargateServiceResources(this, 'ApiResources', props); + + this.taskRole = createBackendTaskRole(this, 'CeleryWorkerTaskRole', { + envSettings: props.envSettings, + }); + this.backendSecrets = getBackendSecrets(this, { + envSettings: props.envSettings, + }); + this.workersService = this.createDefaultWorkerFargateService( + resources, + props, + ); + this.beatService = this.createBeatFargateService(resources, props); + this.flowerService = this.createFlowerService(resources, props); + } + + private createDefaultWorkerFargateService( + resources: FargateServiceResources, + props: CeleryStackProps, + ) { + const { envSettings } = props; + const taskDefinition = new ecs.FargateTaskDefinition( + this, + 'CeleryWorkersTaskDefinition', + { + cpu: 256, + memoryLimitMiB: 512, + family: getCeleryWorkersFamily(props.envSettings), + taskRole: this.taskRole, + }, + ); + + taskDefinition.addContainer('WorkerContainer', { + image: ecs.ContainerImage.fromEcrRepository( + resources.backendRepository, + envSettings.version, + ), + containerName: 'worker', + command: [ + 'sh', + '-c', + '/bin/chamber exec $CHAMBER_SERVICE_NAME -- ./scripts/runtime/run_celery_worker_default.sh', + ], + environment: getBackendEnvironment(this, { + envSettings, + }), + secrets: this.backendSecrets, + logging: this.createAWSLogDriver( + `${envSettings.projectEnvName}-celery-worker-default`, + ), + }); + + return new ecs.FargateService(this, 'DefaultWorkersService', { + serviceName: getCeleryWorkersServiceName(props.envSettings), + cluster: resources.mainCluster, + securityGroups: [resources.fargateContainerSecurityGroup], + vpcSubnets: resources.mainCluster.vpc.selectSubnets({ + subnetType: SubnetType.PRIVATE_WITH_EGRESS, + }), + desiredCount: 1, + capacityProviderStrategies: [ + { + capacityProvider: 'FARGATE_SPOT', + weight: 1, + }, + ], + taskDefinition: taskDefinition, + enableExecuteCommand: true, + }); + } + + private createBeatFargateService( + resources: FargateServiceResources, + props: CeleryStackProps, + ) { + const { envSettings } = props; + const taskDefinition = new ecs.FargateTaskDefinition( + this, + 'CeleryBeatTaskDefinition', + { + cpu: 256, + memoryLimitMiB: 512, + family: getCeleryWorkersFamily(props.envSettings), + taskRole: this.taskRole, + }, + ); + + taskDefinition.addContainer('BeatContainer', { + image: ecs.ContainerImage.fromEcrRepository( + resources.backendRepository, + envSettings.version, + ), + containerName: 'beat', + command: [ + 'sh', + '-c', + '/bin/chamber exec $CHAMBER_SERVICE_NAME -- ./scripts/runtime/run_celery_beat.sh', + ], + environment: getBackendEnvironment(this, { + envSettings, + }), + secrets: this.backendSecrets, + logging: this.createAWSLogDriver( + `${envSettings.projectEnvName}-celery-beat-default`, + ), + }); + + return new ecs.FargateService(this, 'BeatService', { + serviceName: getCeleryBeatServiceName(props.envSettings), + cluster: resources.mainCluster, + securityGroups: [resources.fargateContainerSecurityGroup], + vpcSubnets: resources.mainCluster.vpc.selectSubnets({ + subnetType: SubnetType.PRIVATE_WITH_EGRESS, + }), + desiredCount: 1, + capacityProviderStrategies: [ + { + capacityProvider: 'FARGATE_SPOT', + weight: 1, + }, + ], + taskDefinition: taskDefinition, + enableExecuteCommand: true, + }); + } + + private createFlowerService( + resources: FargateServiceResources, + props: ApiStackProps, + ) { + const { envSettings } = props; + const domainZone = getHostedZone(this, envSettings); + const httpsListener = + elb2.ApplicationListener.fromApplicationListenerAttributes( + this, + 'HttpsListener', + { + listenerArn: Fn.importValue( + MainECSCluster.getLoadBalancerHttpsListenerArnOutputExportName( + props.envSettings, + ), + ), + securityGroup: resources.publicLoadBalancerSecurityGroup, + }, + ); + + return new ApplicationMultipleTargetGroupsFargateService( + this, + 'FlowerService', + { + securityGroup: resources.fargateContainerSecurityGroup, + serviceName: getFlowerServiceName(props.envSettings), + healthCheckGracePeriod: Duration.minutes(2), + cluster: resources.mainCluster, + cpu: 256, + memoryLimitMiB: 512, + desiredCount: 1, + taskRole: this.taskRole, + capacityProviderStrategies: [ + { + capacityProvider: 'FARGATE_SPOT', + weight: 1, + }, + ], + taskImageOptions: [ + { + containerName: 'flower', + command: [ + 'sh', + '-c', + '/bin/chamber exec $CHAMBER_SERVICE_NAME -- ./scripts/runtime/run_celery_flower.sh', + ], + image: ecs.ContainerImage.fromEcrRepository( + resources.backendRepository, + envSettings.version, + ), + environment: getBackendEnvironment(this, { + envSettings, + }), + secrets: this.backendSecrets, + }, + ], + loadBalancers: [ + { + domainZone, + domainName: envSettings.domains.flower, + loadBalancer: resources.publicLoadBalancer, + listeners: [httpsListener], + }, + ], + targetGroups: [ + { + protocol: ecs.Protocol.TCP, + containerPort: 80, + priority: 6, + hostHeader: envSettings.domains.flower, + healthCheckPath: '/healthcheck', + }, + ], + }, + ); + } + + protected createAWSLogDriver(prefix: string): AwsLogDriver { + const logGroup = new LogGroup(this, `${prefix}-LogGroup`); + return new AwsLogDriver({ + streamPrefix: prefix, + logGroup: logGroup, + }); + } +} diff --git a/packages/backend/infra/stacks/lib/backendTaskRole.ts b/packages/backend/infra/stacks/lib/backendTaskRole.ts new file mode 100644 index 000000000..2d94dde0b --- /dev/null +++ b/packages/backend/infra/stacks/lib/backendTaskRole.ts @@ -0,0 +1,91 @@ +import { Construct } from 'constructs'; +import { Fn, Stack } from 'aws-cdk-lib'; +import { EnvironmentSettings } from '@sb/infra-core'; +import * as iam from 'aws-cdk-lib/aws-iam'; +import { getBackendChamberServiceName } from './names'; +import * as s3 from 'aws-cdk-lib/aws-s3'; +import * as events from 'aws-cdk-lib/aws-events'; +import { EnvComponentsStack, MainKmsKey } from '@sb/infra-shared'; + +export function createBackendTaskRole( + scope: Construct, + id: string, + { + envSettings, + }: { + envSettings: EnvironmentSettings; + }, +): iam.Role { + const stack = Stack.of(scope); + const chamberServiceName = getBackendChamberServiceName(envSettings); + + const taskRole = new iam.Role(scope, id, { + assumedBy: new iam.ServicePrincipal('ecs-tasks'), + }); + + const fileUploadsBucket = s3.Bucket.fromBucketName( + scope, + 'FileUploadsBucket', + EnvComponentsStack.getFileUploadsBucketName(envSettings), + ); + fileUploadsBucket.grantReadWrite(taskRole); + fileUploadsBucket.grantPutAcl(taskRole); + + const eventBus = events.EventBus.fromEventBusName( + scope, + 'WorkersEventBus', + EnvComponentsStack.getWorkersEventBusName(envSettings), + ); + eventBus.grantPutEventsTo(taskRole); + + taskRole.addToPolicy( + new iam.PolicyStatement({ + actions: [ + 'cloudformation:DescribeStacks', + 'apigateway:*', + 'execute-api:*', + 'xray:*', + ], + resources: ['*'], + }), + ); + + taskRole.addToPolicy( + new iam.PolicyStatement({ + actions: ['kms:Get*', 'kms:Describe*', 'kms:List*', 'kms:Decrypt'], + resources: [ + Fn.importValue(MainKmsKey.getMainKmsOutputExportName(envSettings)), + ], + }), + ); + + taskRole.addToPolicy( + new iam.PolicyStatement({ + actions: ['ssm:DescribeParameters'], + resources: ['*'], + }), + ); + + taskRole.addToPolicy( + new iam.PolicyStatement({ + actions: ['ssm:GetParameters*'], + resources: [ + `arn:aws:ssm:${stack.region}:${stack.account}:parameter/${chamberServiceName}/*`, + ], + }), + ); + + taskRole.addToPolicy( + new iam.PolicyStatement({ + actions: [ + 'ssmmessages:CreateControlChannel', + 'ssmmessages:CreateDataChannel', + 'ssmmessages:OpenControlChannel', + 'ssmmessages:OpenDataChannel', + ], + resources: ['*'], + }), + ); + + return taskRole; +} diff --git a/packages/backend/infra/stacks/lib/environment.ts b/packages/backend/infra/stacks/lib/environment.ts new file mode 100644 index 000000000..7feace03f --- /dev/null +++ b/packages/backend/infra/stacks/lib/environment.ts @@ -0,0 +1,55 @@ +import { EnvironmentSettings } from '@sb/infra-core'; +import { + EnvComponentsStack, + MainDatabase, + MainKmsKey, + MainRedisCluster, +} from '@sb/infra-shared'; +import { Fn } from 'aws-cdk-lib'; +import { Construct } from 'constructs'; +import { getBackendChamberServiceName } from './names'; + +type GetBackendEnvironmentOptions = { + envSettings: EnvironmentSettings; + allowedHosts?: string; + csrfTrustedOrigins?: string; +}; + +export function getBackendEnvironment( + scope: Construct, + { + envSettings, + allowedHosts, + csrfTrustedOrigins, + }: GetBackendEnvironmentOptions, +) { + return { + PROJECT_NAME: envSettings.projectName, + ENVIRONMENT_NAME: envSettings.envStage, + CHAMBER_SERVICE_NAME: getBackendChamberServiceName(envSettings), + CHAMBER_KMS_KEY_ALIAS: MainKmsKey.getKeyAlias(envSettings), + ...(allowedHosts ? { DJANGO_ALLOWED_HOSTS: allowedHosts } : {}), + ...(csrfTrustedOrigins ? { CSRF_TRUSTED_ORIGINS: csrfTrustedOrigins } : {}), + OTP_AUTH_ISSUER_NAME: envSettings.domains.webApp, + WORKERS_EVENT_BUS_NAME: + EnvComponentsStack.getWorkersEventBusName(envSettings), + AWS_STORAGE_BUCKET_NAME: + EnvComponentsStack.getFileUploadsBucketName(envSettings), + AWS_S3_CUSTOM_DOMAIN: envSettings.domains.cdn, + DB_PROXY_ENDPOINT: Fn.importValue( + MainDatabase.getDatabaseProxyEndpointOutputExportName(envSettings), + ), + AWS_CLOUDFRONT_KEY_ID: Fn.importValue( + EnvComponentsStack.getCdnSigningPublicKeyIdExportName(envSettings), + ), + REDIS_CONNECTION: Fn.join('', [ + 'redis://', + Fn.importValue( + MainRedisCluster.getMainRedisClusterAddressExportName(envSettings), + ), + ':6379', + ]), + VITE_WEB_APP_URL: `https://${envSettings.domains.webApp}`, + VITE_EMAIL_ASSETS_URL: `https://${envSettings.domains.webApp}/email-assets`, + }; +} diff --git a/packages/backend/infra/stacks/lib/names.ts b/packages/backend/infra/stacks/lib/names.ts new file mode 100644 index 000000000..b2d938c0d --- /dev/null +++ b/packages/backend/infra/stacks/lib/names.ts @@ -0,0 +1,5 @@ +import { EnvironmentSettings } from '@sb/infra-core'; + +export function getBackendChamberServiceName(envSettings: EnvironmentSettings) { + return `env-${envSettings.projectEnvName}-backend`; +} diff --git a/packages/backend/infra/stacks/lib/secrets.ts b/packages/backend/infra/stacks/lib/secrets.ts new file mode 100644 index 000000000..2fef2cef7 --- /dev/null +++ b/packages/backend/infra/stacks/lib/secrets.ts @@ -0,0 +1,28 @@ +import { Construct } from 'constructs'; +import * as ecs from 'aws-cdk-lib/aws-ecs'; +import * as sm from 'aws-cdk-lib/aws-secretsmanager'; +import { EnvComponentsStack, MainDatabase } from '@sb/infra-shared'; +import { Fn } from 'aws-cdk-lib'; +import { EnvironmentSettings } from '@sb/infra-core'; + +export function getBackendSecrets( + scope: Construct, + { envSettings }: { envSettings: EnvironmentSettings }, +) { + const dbSecretArn = Fn.importValue( + MainDatabase.getDatabaseSecretArnOutputExportName(envSettings), + ); + + return { + DB_CONNECTION: ecs.Secret.fromSecretsManager( + sm.Secret.fromSecretCompleteArn(scope, 'DbSecret', dbSecretArn), + ), + AWS_CLOUDFRONT_KEY: ecs.Secret.fromSecretsManager( + sm.Secret.fromSecretNameV2( + scope, + 'CloudfrontPrivateKey', + `${EnvComponentsStack.getCDNSigningKeyName(envSettings)}/private`, + ), + ), + }; +} diff --git a/packages/backend/infra/stacks/migrations/stack.ts b/packages/backend/infra/stacks/migrations/stack.ts index da6a86f1a..9942a453e 100644 --- a/packages/backend/infra/stacks/migrations/stack.ts +++ b/packages/backend/infra/stacks/migrations/stack.ts @@ -1,18 +1,14 @@ -import { App, Duration, Fn, Stack, StackProps } from 'aws-cdk-lib'; +import { App, Duration, Stack, StackProps } from 'aws-cdk-lib'; import * as ecs from 'aws-cdk-lib/aws-ecs'; import * as sfTasks from 'aws-cdk-lib/aws-stepfunctions-tasks'; import * as sf from 'aws-cdk-lib/aws-stepfunctions'; -import * as sm from 'aws-cdk-lib/aws-secretsmanager'; -import * as iam from 'aws-cdk-lib/aws-iam'; import * as logs from 'aws-cdk-lib/aws-logs'; import { EnvConstructProps, EnvironmentSettings } from '@sb/infra-core'; -import { - FargateServiceResources, - MainDatabase, - MainKmsKey, - MainRedisCluster, -} from '@sb/infra-shared'; +import { FargateServiceResources } from '@sb/infra-shared'; +import { getBackendSecrets } from '../lib/secrets'; +import { getBackendEnvironment } from '../lib/environment'; +import { createBackendTaskRole } from '../lib/backendTaskRole'; export interface MigrationsStackProps extends StackProps, EnvConstructProps {} @@ -28,10 +24,9 @@ export class MigrationsStack extends Stack { ); const containerName = 'migrations'; - const dbSecretArn = Fn.importValue( - MainDatabase.getDatabaseSecretArnOutputExportName(envSettings), - ); - const taskRole = this.createTaskRole(props); + const taskRole = createBackendTaskRole(this, 'MigrationsTaskRole', { + envSettings, + }); const migrationsTaskDefinition = new ecs.FargateTaskDefinition( this, @@ -49,29 +44,10 @@ export class MigrationsStack extends Stack { envSettings.version, ), logging: this.createAWSLogDriver(this.node.id, props.envSettings), - environment: { - CHAMBER_SERVICE_NAME: this.getChamberServiceName(envSettings), - CHAMBER_KMS_KEY_ALIAS: MainKmsKey.getKeyAlias(envSettings), - DB_PROXY_ENDPOINT: Fn.importValue( - MainDatabase.getDatabaseProxyEndpointOutputExportName( - props.envSettings, - ), - ), - REDIS_CONNECTION: Fn.join('', [ - 'redis://', - Fn.importValue( - MainRedisCluster.getMainRedisClusterAddressExportName( - props.envSettings, - ), - ), - ':6379', - ]), - }, - secrets: { - DB_CONNECTION: ecs.Secret.fromSecretsManager( - sm.Secret.fromSecretCompleteArn(this, 'DbSecret', dbSecretArn), - ), - }, + environment: getBackendEnvironment(this, { + envSettings, + }), + secrets: getBackendSecrets(this, { envSettings }), }); new sf.StateMachine(this, 'MigrationsStateMachine', { @@ -104,46 +80,4 @@ export class MigrationsStack extends Stack { }); return new ecs.AwsLogDriver({ streamPrefix: prefix, logGroup }); } - - protected createTaskRole(props: MigrationsStackProps): iam.Role { - const stack = Stack.of(this); - const chamberServiceName = this.getChamberServiceName(props.envSettings); - - const taskRole = new iam.Role(this, 'MigrationsTaskRole', { - assumedBy: new iam.ServicePrincipal('ecs-tasks'), - }); - - taskRole.addToPolicy( - new iam.PolicyStatement({ - actions: ['kms:Get*', 'kms:Describe*', 'kms:List*', 'kms:Decrypt'], - resources: [ - Fn.importValue( - MainKmsKey.getMainKmsOutputExportName(props.envSettings), - ), - ], - }), - ); - - taskRole.addToPolicy( - new iam.PolicyStatement({ - actions: ['ssm:DescribeParameters'], - resources: ['*'], - }), - ); - - taskRole.addToPolicy( - new iam.PolicyStatement({ - actions: ['ssm:GetParameters*'], - resources: [ - `arn:aws:ssm:${stack.region}:${stack.account}:parameter/${chamberServiceName}/*`, - ], - }), - ); - - return taskRole; - } - - protected getChamberServiceName(envSettings: EnvironmentSettings) { - return `env-${envSettings.projectEnvName}-backend`; - } } diff --git a/packages/backend/package.json b/packages/backend/package.json index d25bc50a8..74adb6096 100644 --- a/packages/backend/package.json +++ b/packages/backend/package.json @@ -9,6 +9,7 @@ "@sb/infra-core": "workspace:*", "@sb/infra-shared": "workspace:*", "@sb/tools": "workspace:*", + "@sb/webapp-emails": "workspace:*", "fs-extra": "^11.2.0" } } diff --git a/packages/backend/project.json b/packages/backend/project.json index e6e0c566e..d4211c268 100644 --- a/packages/backend/project.json +++ b/packages/backend/project.json @@ -10,6 +10,21 @@ "cwd": "packages/backend" } }, + "build-email-renderer": { + "executor": "nx:run-commands", + "options": { + "cwd": "packages/backend", + "color": true, + "commands": ["node scripts/copyEmailRenderer.js"], + "parallel": false + }, + "dependsOn": [ + { + "projects": ["webapp-emails"], + "target": "build" + } + ] + }, "compose-build-image": { "executor": "nx:run-commands", "options": { @@ -17,7 +32,7 @@ "commands": ["docker-compose build backend"], "parallel": false }, - "dependsOn": ["setup"] + "dependsOn": ["setup", "build-email-renderer"] }, "test": { "executor": "nx:run-commands", @@ -43,7 +58,8 @@ "color": true, "commands": ["node scripts/build.js"], "parallel": false - } + }, + "dependsOn": ["build-email-renderer"] }, "build-docs": { "executor": "nx:run-commands", @@ -80,6 +96,18 @@ "parallel": false } }, + "deploy:celery": { + "executor": "nx:run-commands", + "options": { + "cwd": "packages/backend", + "color": true, + "commands": [ + "pnpm cdk deploy *CeleryStack", + "pnpm nx run tools:upload-service-version celery" + ], + "parallel": false + } + }, "deploy": { "executor": "nx:run-commands", "options": { @@ -88,17 +116,36 @@ "commands": [ "pnpm cdk deploy *MigrationsStack", "pnpm nx run trigger-migrations-job", - "pnpm cdk deploy *ApiStack" + "pnpm cdk deploy *ApiStack", + "pnpm cdk deploy *CeleryStack" ], "parallel": false } }, - "diff": { + "diff:api": { + "executor": "nx:run-commands", + "options": { + "cwd": "packages/backend", + "color": true, + "commands": ["cdk diff *ApiStack"], + "parallel": false + } + }, + "diff:migrations": { + "executor": "nx:run-commands", + "options": { + "cwd": "packages/backend", + "color": true, + "commands": ["cdk diff *MigrationsStack"], + "parallel": false + } + }, + "diff:celery": { "executor": "nx:run-commands", "options": { "cwd": "packages/backend", "color": true, - "commands": ["cdk diff *ApiStack", "cdk diff *MigrationsStack"], + "commands": ["cdk diff *CeleryStack"], "parallel": false } }, diff --git a/packages/backend/pyproject.toml b/packages/backend/pyproject.toml index 656108df4..c58cebf02 100644 --- a/packages/backend/pyproject.toml +++ b/packages/backend/pyproject.toml @@ -120,6 +120,12 @@ dependencies = [ "channels-redis>=4.2.0", "django-channels-graphql-ws @ https://github.com/apptension/DjangoChannelsGraphqlWs/archive/refs/tags/v1.0.0rc7.tar.gz", "gunicorn==22.0.0", + "celery>=5.3.6", + "django-celery-results>=2.5.1", + "django-celery-beat>=2.6.0", + "flower>=2.0.1", + "watchdog[watchmedo]>=2.3.1", + "django-ses>=3.5.2", ] requires-python = "~=3.11.1" license = {text = "MIT"} diff --git a/packages/backend/scripts/copyEmailRenderer.js b/packages/backend/scripts/copyEmailRenderer.js new file mode 100644 index 000000000..4b63f8669 --- /dev/null +++ b/packages/backend/scripts/copyEmailRenderer.js @@ -0,0 +1,11 @@ +const fs = require('fs-extra'); +const path = require('path'); + +(async () => { + const emailRendererScriptPath = path.resolve( + __dirname, + '../../webapp-libs/webapp-emails/build/email-renderer/index.umd.js', + ); + const emailRendererScriptDestinationPath = path.resolve(__dirname, 'runtime/email/renderer/index.umd.js') + await fs.copy(emailRendererScriptPath, emailRendererScriptDestinationPath); +})(); diff --git a/packages/backend/scripts/runtime/email/index.js b/packages/backend/scripts/runtime/email/index.js new file mode 100644 index 000000000..59afb338d --- /dev/null +++ b/packages/backend/scripts/runtime/email/index.js @@ -0,0 +1,31 @@ +let _renderEmail; + +try { + const { renderEmail } = require('./renderer/index.umd'); + _renderEmail = renderEmail; +} catch { + _renderEmail = (emailType, emailData) => { + const errorMsg = + 'Email renderer script is missing. Make sure `pnpm nx run webapp-emails:build` is run earlier and' + + 'packages/webapp-libs/webapp-emails/build/email-renderer/index.umd.js is copied into packages/backend/scripts/emails/renderer .'; + if (process.env.DEBUG !== 'True') { + throw Error(errorMsg); + } + + return { + subject: emailType, + html: ` + + +

Error: Missing renderer script

+

${errorMsg}

+

Email Type: ${emailType}

+

Data: ${JSON.stringify(emailData)}

+ + +`, + }; + }; +} + +module.exports = { renderEmail: _renderEmail }; diff --git a/packages/backend/scripts/runtime/run_celery_beat.sh b/packages/backend/scripts/runtime/run_celery_beat.sh new file mode 100644 index 000000000..9c1860b80 --- /dev/null +++ b/packages/backend/scripts/runtime/run_celery_beat.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -e + +echo "Starting celery beat service..." + +pdm run celery -A config beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler diff --git a/packages/backend/scripts/runtime/run_celery_flower.sh b/packages/backend/scripts/runtime/run_celery_flower.sh new file mode 100644 index 000000000..b878a5433 --- /dev/null +++ b/packages/backend/scripts/runtime/run_celery_flower.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -e + +echo "Starting celery beat service..." + +pdm run celery -A config flower --address='0.0.0.0' --port=80 diff --git a/packages/backend/scripts/runtime/run_celery_worker_default.sh b/packages/backend/scripts/runtime/run_celery_worker_default.sh new file mode 100644 index 000000000..12db0e0c4 --- /dev/null +++ b/packages/backend/scripts/runtime/run_celery_worker_default.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -e + +echo "Starting celery worker – default queue..." + +pdm run celery -A config worker -l info diff --git a/packages/backend/scripts/runtime/run_local_celery_flower.sh b/packages/backend/scripts/runtime/run_local_celery_flower.sh new file mode 100644 index 000000000..18ea1de5d --- /dev/null +++ b/packages/backend/scripts/runtime/run_local_celery_flower.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -e + +echo "Starting celery beat service..." + +pdm run celery -A config flower diff --git a/packages/backend/scripts/runtime/run_local_celery_worker_default.sh b/packages/backend/scripts/runtime/run_local_celery_worker_default.sh new file mode 100755 index 000000000..42a36225d --- /dev/null +++ b/packages/backend/scripts/runtime/run_local_celery_worker_default.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +set -e + +pdm run watchmedo auto-restart \ + --directory=/app \ + --pattern=*.py \ + --recursive \ + -- pdm run celery -A config worker -l info diff --git a/packages/backend/sonar-project.properties b/packages/backend/sonar-project.properties index b56a12e6d..5a0fc86e9 100644 --- a/packages/backend/sonar-project.properties +++ b/packages/backend/sonar-project.properties @@ -2,4 +2,4 @@ sonar.organization=${env.SONAR_ORGANIZATION} sonar.projectKey=${env.SONAR_BACKEND_PROJECT_KEY} sonar.python.version=3.11 sonar.python.coverage.reportPaths=cov/coverage.xml -sonar.exclusions=**/migrations/*, **/tests/**/*, config/** +sonar.exclusions=**/migrations/*, **/tests/**/*, config/**, infra/**, scripts/** diff --git a/packages/infra/infra-core/src/lib/env-config.ts b/packages/infra/infra-core/src/lib/env-config.ts index b901222ab..620f065ac 100644 --- a/packages/infra/infra-core/src/lib/env-config.ts +++ b/packages/infra/infra-core/src/lib/env-config.ts @@ -7,6 +7,7 @@ declare const process: { SB_DOMAIN_DOCS: string; SB_DOMAIN_WWW: string; SB_DOMAIN_ADMIN_PANEL: string; + SB_DOMAIN_FLOWER: string; SB_CLOUDFRONT_CERTIFICATE_ARN: string; SB_CERTIFICATE_DOMAIN: string; SB_LOAD_BALANCER_CERTIFICATE_ARN: string; @@ -37,6 +38,7 @@ interface EnvConfigFileDomains { docs: string; www: string; cdn: string; + flower: string; } interface ToolsDomains { @@ -128,33 +130,38 @@ async function readConfig(): Promise { }; } -async function readEnvConfig(): Promise { +async function readEnvConfig(envStage: string): Promise { if (!process.env.SB_DOMAIN_API) { throw new Error('SB_DOMAIN_API env variable has to be defined'); } + const hostedZoneName = process.env.SB_HOSTED_ZONE_NAME ?? ''; + const certDomain = process.env.SB_CERTIFICATE_DOMAIN; + const defaultDomain = certDomain ?? `${envStage}.${hostedZoneName}`; + return { webAppConfig: { envVariables: {}, }, basicAuth: process.env.SB_BASIC_AUTH, domains: { - api: process.env.SB_DOMAIN_API, - webApp: process.env.SB_DOMAIN_WEB_APP ?? '', - cdn: process.env.SB_DOMAIN_CDN ?? '', - docs: process.env.SB_DOMAIN_DOCS ?? '', - www: process.env.SB_DOMAIN_WWW ?? '', - adminPanel: process.env.SB_DOMAIN_ADMIN_PANEL ?? '', + api: process.env.SB_DOMAIN_API ?? `api.${defaultDomain}`, + webApp: process.env.SB_DOMAIN_WEB_APP ?? `app.${defaultDomain}`, + cdn: process.env.SB_DOMAIN_CDN ?? `cdn.${defaultDomain}`, + docs: process.env.SB_DOMAIN_DOCS ?? `docs.${defaultDomain}`, + www: process.env.SB_DOMAIN_WWW ?? `www.${defaultDomain}`, + adminPanel: process.env.SB_DOMAIN_ADMIN_PANEL ?? `admin.${defaultDomain}`, + flower: process.env.SB_DOMAIN_FLOWER ?? `flower.${defaultDomain}`, }, certificates: { cloudfrontCertificateArn: process.env.SB_CLOUDFRONT_CERTIFICATE_ARN ?? '', - domain: process.env.SB_CERTIFICATE_DOMAIN ?? '', + domain: certDomain ?? '', loadBalancerCertificateArn: process.env.SB_LOAD_BALANCER_CERTIFICATE_ARN ?? '', }, hostedZone: { id: process.env.SB_HOSTED_ZONE_ID ?? '', - name: process.env.SB_HOSTED_ZONE_NAME ?? '', + name: hostedZoneName, }, deployBranches: process.env.SB_DEPLOY_BRANCHES?.split(',') ?? [], }; @@ -174,7 +181,7 @@ export async function loadEnvSettings(): Promise { } const config = await readConfig(); - const envConfig = await readEnvConfig(); + const envConfig = await readEnvConfig(envStage); return { envStage, diff --git a/packages/infra/infra-core/src/lib/patterns/applicationMultipleTargetGroupsFargateService.ts b/packages/infra/infra-core/src/lib/patterns/applicationMultipleTargetGroupsFargateService.ts index 55d866495..8456ec6db 100644 --- a/packages/infra/infra-core/src/lib/patterns/applicationMultipleTargetGroupsFargateService.ts +++ b/packages/infra/infra-core/src/lib/patterns/applicationMultipleTargetGroupsFargateService.ts @@ -3,6 +3,7 @@ import { FargateService, FargateTaskDefinition, AwsLogDriver, + CapacityProviderStrategy, } from 'aws-cdk-lib/aws-ecs'; import { ApplicationTargetGroup } from 'aws-cdk-lib/aws-elasticloadbalancingv2'; import { FeatureFlags } from 'aws-cdk-lib'; @@ -112,6 +113,8 @@ export interface ApplicationMultipleTargetGroupsFargateServiceProps * @default - Automatically generated name. */ readonly family?: string; + + readonly capacityProviderStrategies?: CapacityProviderStrategy[]; } /** @@ -149,7 +152,7 @@ export class ApplicationMultipleTargetGroupsFargateService extends ApplicationMu constructor( scope: Construct, id: string, - props: ApplicationMultipleTargetGroupsFargateServiceProps + props: ApplicationMultipleTargetGroupsFargateServiceProps, ) { super(scope, id, props); @@ -158,7 +161,7 @@ export class ApplicationMultipleTargetGroupsFargateService extends ApplicationMu if (props.taskDefinition && props.taskImageOptions) { throw new Error( - 'You must specify only one of TaskDefinition or TaskImageOptions.' + 'You must specify only one of TaskDefinition or TaskImageOptions.', ); } else if (props.taskDefinition) { this.taskDefinition = props.taskDefinition; @@ -210,12 +213,12 @@ export class ApplicationMultipleTargetGroupsFargateService extends ApplicationMu if (props.targetGroups) { this.addPortMappingForTargets( this.taskDefinition.defaultContainer, - props.targetGroups + props.targetGroups, ); this.targetGroup = this.registerECSTargets( this.service, this.taskDefinition.defaultContainer, - props.targetGroups + props.targetGroups, ); } } @@ -230,10 +233,10 @@ export class ApplicationMultipleTargetGroupsFargateService extends ApplicationMu } private createFargateService( - props: ApplicationMultipleTargetGroupsFargateServiceProps + props: ApplicationMultipleTargetGroupsFargateServiceProps, ): FargateService { const desiredCount = FeatureFlags.of(this).isEnabled( - cxapi.ECS_REMOVE_DEFAULT_DESIRED_COUNT + cxapi.ECS_REMOVE_DEFAULT_DESIRED_COUNT, ) ? this.internalDesiredCount : this.desiredCount; @@ -254,6 +257,7 @@ export class ApplicationMultipleTargetGroupsFargateService extends ApplicationMu }), platformVersion: props.platformVersion, enableExecuteCommand: true, + capacityProviderStrategies: props.capacityProviderStrategies, }); } } diff --git a/packages/infra/infra-core/src/lib/patterns/applicationMultipleTargetGroupsFargateServiceBase.ts b/packages/infra/infra-core/src/lib/patterns/applicationMultipleTargetGroupsFargateServiceBase.ts index 86e5515e9..2c601343a 100644 --- a/packages/infra/infra-core/src/lib/patterns/applicationMultipleTargetGroupsFargateServiceBase.ts +++ b/packages/infra/infra-core/src/lib/patterns/applicationMultipleTargetGroupsFargateServiceBase.ts @@ -257,6 +257,9 @@ export interface ApplicationTargetProps { * @default No path condition */ readonly pathPattern?: string; + + + readonly healthCheckPath: string; } /** @@ -443,6 +446,7 @@ export abstract class ApplicationMultipleTargetGroupsServiceBase extends Constru interface GroupedTarget { target: { protocol?: Protocol; containerPort: number }; hosts: ApplicationTargetProps[]; + healthCheckPath: string; } const groupedTargets: { [id: string]: GroupedTarget } = {}; @@ -455,6 +459,7 @@ export abstract class ApplicationMultipleTargetGroupsServiceBase extends Constru containerPort: targetProps.containerPort, }, hosts: [], + healthCheckPath: targetProps.healthCheckPath, }; } groupedTargets[key].hosts.push(targetProps); @@ -467,7 +472,7 @@ export abstract class ApplicationMultipleTargetGroupsServiceBase extends Constru vpc: service.cluster.vpc, port: groupedTarget.target.containerPort, healthCheck: { - path: '/lbcheck', + path: groupedTarget.healthCheckPath, protocol: ELBProtocol.HTTP, interval: Duration.seconds(6), timeout: Duration.seconds(5), diff --git a/packages/infra/infra-shared/src/stacks/main/mainEcsCluster.ts b/packages/infra/infra-shared/src/stacks/main/mainEcsCluster.ts index 29b0353b5..73d87d6ea 100644 --- a/packages/infra/infra-shared/src/stacks/main/mainEcsCluster.ts +++ b/packages/infra/infra-shared/src/stacks/main/mainEcsCluster.ts @@ -66,6 +66,7 @@ export class MainECSCluster extends Construct { return new ecs.Cluster(this, 'Cluster', { vpc: props.vpc, clusterName: MainECSCluster.getClusterName(props.envSettings), + enableFargateCapacityProviders: true, }); } diff --git a/packages/internal/cli/src/commands/backend/deploy/celery.ts b/packages/internal/cli/src/commands/backend/deploy/celery.ts new file mode 100644 index 000000000..e23540863 --- /dev/null +++ b/packages/internal/cli/src/commands/backend/deploy/celery.ts @@ -0,0 +1,42 @@ +import { Flags } from '@oclif/core'; +import { color } from '@oclif/color'; + +import { initConfig } from '../../../config/init'; +import { runCommand } from '../../../lib/runCommand'; +import { BaseCommand } from '../../../baseCommand'; + +export default class BackendDeployApi extends BaseCommand< + typeof BackendDeployApi +> { + static description = + 'Deploys Celery Workers to AWS using previously built backend artifact'; + + static examples = [`$ <%= config.bin %> <%= command.id %>`]; + + static flags = { + diff: Flags.boolean({ + default: false, + description: + 'Perform a dry run and list all changes that would be applied in AWS account', + required: false, + }), + }; + + async run(): Promise { + const { flags } = await this.parse(BackendDeployApi); + const { envStage, version, awsRegion, awsAccountId } = await initConfig( + this, + { requireAws: true }, + ); + + this.log(`Deploying Celery: + envStage: ${color.green(envStage)} + version: ${color.green(version)} + AWS account: ${color.green(awsAccountId)} + AWS region: ${color.green(awsRegion)} +`); + + const verb = flags.diff ? 'diff' : 'deploy'; + await runCommand('pnpm', ['nx', 'run', `backend:${verb}:celery`]); + } +} diff --git a/packages/internal/cli/src/commands/emails/build.ts b/packages/internal/cli/src/commands/emails/build.ts index af851d703..7d9e07def 100644 --- a/packages/internal/cli/src/commands/emails/build.ts +++ b/packages/internal/cli/src/commands/emails/build.ts @@ -2,8 +2,6 @@ import { color } from '@oclif/color'; import { initConfig } from '../../config/init'; import { runCommand } from '../../lib/runCommand'; -import { ENV_STAGE_LOCAL } from '../../config/env'; -import { assertChamberInstalled, loadChamberEnv } from '../../lib/chamber'; import { BaseCommand } from '../../baseCommand'; export default class EmailsBuild extends BaseCommand { @@ -12,15 +10,10 @@ export default class EmailsBuild extends BaseCommand { static examples = [`$ <%= config.bin %> <%= command.id %>`]; async run(): Promise { - const { envStage, version, awsRegion, awsAccountId, projectEnvName } = - await initConfig(this, { requireAws: 'allow-local' }); - - if (envStage !== ENV_STAGE_LOCAL) { - await assertChamberInstalled(); - await loadChamberEnv(this, { - serviceName: `env-${projectEnvName}-workers`, - }); - } + const { envStage, version, awsRegion, awsAccountId } = await initConfig( + this, + { requireAws: 'allow-local' }, + ); this.log(`Building emails: envStage: ${color.green(envStage)} diff --git a/packages/internal/cli/src/commands/workers/build.ts b/packages/internal/cli/src/commands/workers/build.ts index 2217ed655..208d3c79f 100644 --- a/packages/internal/cli/src/commands/workers/build.ts +++ b/packages/internal/cli/src/commands/workers/build.ts @@ -17,7 +17,7 @@ export default class WorkersBuild extends BaseCommand { ); await dockerHubLogin(); - this.log(`Deploying backend: + this.log(`Building workers: envStage: ${color.green(envStage)} version: ${color.green(version)} AWS account: ${color.green(awsAccountId)} diff --git a/packages/internal/cli/src/config/aws.ts b/packages/internal/cli/src/config/aws.ts index 16c340eb5..de4fa0c26 100644 --- a/packages/internal/cli/src/config/aws.ts +++ b/packages/internal/cli/src/config/aws.ts @@ -8,7 +8,7 @@ import * as childProcess from 'child_process'; import { promisify } from 'util'; import * as dotenv from 'dotenv'; -import { IS_CI, validateStageEnv } from './env'; +import { validateStageEnv } from './env'; import { isAwsVaultInstalled } from '../lib/awsVault'; import { assertChamberInstalled, loadChamberEnv } from '../lib/chamber'; import { runCommand } from '../lib/runCommand'; @@ -133,8 +133,7 @@ export const initAWS = async ( ); const awsRegion = process.env.AWS_REGION || process.env.AWS_DEFAULT_REGION; - - if (awsAccountId && awsRegion && IS_CI) { + if (awsAccountId && awsRegion) { await loginToECR(context, { awsAccountId, awsRegion, diff --git a/packages/internal/core/project.json b/packages/internal/core/project.json index dbaf11f1e..aa8d17adf 100644 --- a/packages/internal/core/project.json +++ b/packages/internal/core/project.json @@ -36,7 +36,7 @@ "commands": [ "nx run-many --target=setup", "nx run-many --target=compose-build-image --projects=backend,workers", - "docker-compose up --force-recreate -d backend workers" + "docker-compose up --force-recreate -d backend workers celery_default celery_beat" ] }, "dependsOn": ["setup"] diff --git a/packages/internal/docs/docs/introduction/features/workers.mdx b/packages/internal/docs/docs/introduction/features/workers.mdx index 7f0fda6f2..029261417 100644 --- a/packages/internal/docs/docs/introduction/features/workers.mdx +++ b/packages/internal/docs/docs/introduction/features/workers.mdx @@ -16,9 +16,11 @@ There are several use cases for async jobs in web applications. Some common exam ### Async Workers in the SaaS Boilerplate -The SaaS Boilerplate includes “async workers” feature that uses AWS Lambda functions and the Serverless Framework to -manage asynchronous jobs. The Serverless Framework provides a set of tools and abstractions that simplify the process of -deploying and managing serverless applications. +The SaaS Boilerplate includes two types of “async workers” to manage asynchronous jobs: +- Celery workers, being the default one. It is a task queue, which major selling point is its great compatibility with +Django. +- AWS Lambda functions with help of the Serverless Framework. The Serverless Framework provides a set of tools and +abstractions that simplify the process of deploying and managing serverless applications. Tasks can be scheduled or started from the backend application part. This makes it easy to schedule tasks to run at specific times, such as sending out email notifications at a specific time of day. diff --git a/packages/internal/docs/docs/introduction/stack-description.mdx b/packages/internal/docs/docs/introduction/stack-description.mdx index 41964fe6b..931e2fd2c 100644 --- a/packages/internal/docs/docs/introduction/stack-description.mdx +++ b/packages/internal/docs/docs/introduction/stack-description.mdx @@ -26,14 +26,18 @@ lightning-fast hot module replacement (HMR), enhancing the developer experience. serves as the primary language for the back-end development. - **Django**: Django is a high-level Python web framework. It provides a robust set of tools and conventions for building web applications. -- **Django** REST Framework: Django REST Framework is a powerful and flexible toolkit for building Web APIs. It +- **Django REST Framework**: Django REST Framework is a powerful and flexible toolkit for building Web APIs. It simplifies the process of creating RESTful APIs by providing a set of serializers, views, and authentication mechanisms. -- **Graphene** Django: Graphene Django is an integration library that connects Django and GraphQL. It allows you to +- **Graphene Django**: Graphene Django is an integration library that connects Django and GraphQL. It allows you to define GraphQL schemas and resolvers based on your Django models. - **dj-stripe**: dj-stripe is a Django library that provides integration with Stripe, a popular payment processing platform. It simplifies the implementation of subscription billing and payment handling. -- **Postgres**: Postgres is a robust and feature-rich open-source relational database. It is used as the default +- **PostgreSQL**: Postgres is a robust and feature-rich open-source relational database. It is used as the default database for the , offering stability, scalability, and advanced data management capabilities. +- **Celery**: is a simple, flexible, and reliable distributed system to process vast amounts of messages, while +providing operations with the tools required to. +- **Redis**: Redis is an open source (BSD licensed), in-memory data structure store, used as a database, cache, and message +broker. ## Infrastructure - **NX**: NX is a powerful development toolkit that helps you build scalable and efficient applications. It provides diff --git a/packages/internal/docs/docs/shared/partials/env-vars/_backend.mdx b/packages/internal/docs/docs/shared/partials/env-vars/_backend.mdx index 3d2d61b47..97c5415aa 100644 --- a/packages/internal/docs/docs/shared/partials/env-vars/_backend.mdx +++ b/packages/internal/docs/docs/shared/partials/env-vars/_backend.mdx @@ -1,12 +1,15 @@ -**Required** variables: - -| Name | Description | Example | -|--------------------------|-------------------------------------------------------------------------------|-------------------------------------| -| `DJANGO_DEBUG` | [docs](https://docs.djangoproject.com/en/4.2/ref/settings/#std:setting-DEBUG) | `True` | -| `DJANGO_SECRET_KEY` | [docs](https://docs.djangoproject.com/en/4.2/ref/settings/#secret-key) | `Zs639zRcb5!9om2@tW2H6XG#Znj^TB^I` | -| `HASHID_FIELD_SALT` | [docs](https://github.com/nshafer/django-hashid-field#hashid_field_salt) | `t5$^r\*xsMRXn1xjzhRSl8I5Hb3BUW$4U` | -| `ADMIN_EMAIL` | Will be used to create first super admin user | `admin@exmaple.com` | -| `ADMIN_DEFAULT_PASSWORD` | Will be used to create first super admin user | `AvPZpabgj9Z8` | +##### Required backend vars + +You will not be able to deploy backend service without setting following variables: + +| Name | Description | Example | +| ------------------------ | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------- | +| `DJANGO_DEBUG` | [docs](https://docs.djangoproject.com/en/4.2/ref/settings/#std:setting-DEBUG) | `True` | +| `DJANGO_SECRET_KEY` | [docs](https://docs.djangoproject.com/en/4.2/ref/settings/#secret-key) | `Zs639zRcb5!9om2@tW2H6XG#Znj^TB^I` | +| `HASHID_FIELD_SALT` | [docs](https://github.com/nshafer/django-hashid-field#hashid_field_salt) | `t5$^r\*xsMRXn1xjzhRSl8I5Hb3BUW$4U` | +| `ADMIN_EMAIL` | Will be used to create first super admin user | `admin@exmaple.com` | +| `ADMIN_DEFAULT_PASSWORD` | Will be used to create first super admin user | `AvPZpabgj9Z8` | +| `FLOWER_BASIC_AUTH` | Will be used to enable HTTP Basic authentication for flower (Celery monitoring). Alternatively you can configure OAuth2 by setting `FLOWER_AUTH_PROVIDER`, `FLOWER_AUTH`, `FLOWER_OAUTH2_KEY`, `FLOWER_OAUTH2_SECRET`, `FLOWER_OAUTH2_REDIRECT_URI` env vars. Check out [official flower docs](https://flower.readthedocs.io/en/latest/auth.html#google-oauth) for more information | `user1:password1,user2:password2` | import OptionalVars from './_backend_optional.mdx'; diff --git a/packages/internal/docs/docs/shared/partials/env-vars/_backend_email.mdx b/packages/internal/docs/docs/shared/partials/env-vars/_backend_email.mdx new file mode 100644 index 000000000..7c1e14233 --- /dev/null +++ b/packages/internal/docs/docs/shared/partials/env-vars/_backend_email.mdx @@ -0,0 +1,21 @@ +##### Backend vars required for sending e-mail messages + +If you want e-mail sending feature to work you need to at least set following variables: + +| Name | Description | Example | +| -------------------- | ------------------------------------------------------ | ------- | +| `EMAIL_FROM_ADDRESS` | An e-mail address visible to end-users as "from" field | `from@example.org` | + +##### Optional vars for sending e-mail messages + +You can set following variables to further configure e-mail sending feature: + +| Name | Description | Example | +| ----------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------ | +| `VITE_EMAIL_ASSETS_URL` | Absolute URL to email assets. By default it's `${SB_DOMAIN_WEB_APP}/email-assets` | `https://app.demo.saas.apptoku.com/email-assets` | +| `EMAIL_BACKEND` | The backend to use for sending emails. For the list of available backends see [Email backends](https://docs.djangoproject.com/en/5.0/topics/email/#topic-email-backends). | `django_ses.SESBackend` | +| `EMAIL_HOST` | Address of smtp host (make sure you set `EMAIL_BACKEND=django.core.mail.backends.smtp.EmailBackend` to use | `mailcatcher` | +| `EMAIL_PORT` | Port of smtp host | `1025` | +| `EMAIL_HOST_USER` | Username used for smtp authentication | `user` | +| `EMAIL_HOST_PASSWORD` | Password used for smtp authentication | `password` | +| `EMAIL_REPLY_ADDRESS` | An e-mail address used when user replies to an e-mail dispatched by the system | `smtp` | diff --git a/packages/internal/docs/docs/shared/partials/env-vars/_backend_optional.mdx b/packages/internal/docs/docs/shared/partials/env-vars/_backend_optional.mdx index 56cb2a8b8..035fd1d17 100644 --- a/packages/internal/docs/docs/shared/partials/env-vars/_backend_optional.mdx +++ b/packages/internal/docs/docs/shared/partials/env-vars/_backend_optional.mdx @@ -1,16 +1,14 @@ -**Required** variables for Stripe usage: +import BackendEmail from './_backend_email.mdx'; +import BackendStripe from './_backend_stripe.mdx'; -| Name | Description | Example | -|---------------------------|---------------------------------------------------------------------------------------------------------|-------------------------| -| `STRIPE_TEST_SECRET_KEY` | Use this one for Stripe test mode. [docs](https://stripe.com/docs/keys) | `sk_test_4dEFRCLCgCb` | -| `STRIPE_LIVE_SECRET_KEY` | Use this one for Stripe live mode. [docs](https://stripe.com/docs/keys) | `sk_4dEFRCLCgCbMy6O4FX` | -| `DJSTRIPE_WEBHOOK_SECRET` | [docs](https://stripe.com/docs/webhooks/best-practices#endpoint-secrets) | `whsec_12345` | -| `STRIPE_LIVE_MODE` | (bool) Set `true` or `false` to toggle between `live` and `test` Stripe mode. By default set to `False` | `true` | + -**Optional** variables: + + +##### Other optional backend vars: | Name | Description | Example | -|--------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------| +| ------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -------------------------------------------- | | `SENTRY_DSN` | [Sentry](https://sentry.io/) client key | `https://(...)@(...).ingest.sentry.io/(...)` | | `SOCIAL_AUTH_ALLOWED_REDIRECT_HOSTS` | Allowed hosts for OAuth redirection. Check [Python Social Auth](https://python-social-auth.readthedocs.io/en/latest/configuration/settings.html#processing-redirects-and-urlopen) package documentation for more details | `app.demo.saas.apptoku.com` | | `SOCIAL_AUTH_FACEBOOK_KEY` | Client key for Facebook OAuth integration | | @@ -19,3 +17,4 @@ | `SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET` | Client secret for Google OAuth integration | | | `SUBSCRIPTION_TRIAL_PERIOD_DAYS` | Number of days for subscription trial. By default set to `7` | `1` | | `OPENAI_API_KEY` | OpenAI API for generative AI module | | +| `VITE_WEB_APP_URL` | Absolute URL to web application. By default it's the value kept in global environment setting `SB_DOMAIN_WEB_APP` | `https://app.demo.saas.apptoku.com` | diff --git a/packages/internal/docs/docs/shared/partials/env-vars/_backend_stripe.mdx b/packages/internal/docs/docs/shared/partials/env-vars/_backend_stripe.mdx new file mode 100644 index 000000000..74835f89b --- /dev/null +++ b/packages/internal/docs/docs/shared/partials/env-vars/_backend_stripe.mdx @@ -0,0 +1,10 @@ +##### Backend vars required when you want to use Stripe + +If you plan to use Stripe integration you have to define following variables: + +| Name | Description | Example | +| ------------------------- | ------------------------------------------------------------------------------------------------------- | ----------------------- | +| `STRIPE_TEST_SECRET_KEY` | Use this one for Stripe test mode. [docs](https://stripe.com/docs/keys) | `sk_test_4dEFRCLCgCb` | +| `STRIPE_LIVE_SECRET_KEY` | Use this one for Stripe live mode. [docs](https://stripe.com/docs/keys) | `sk_4dEFRCLCgCbMy6O4FX` | +| `DJSTRIPE_WEBHOOK_SECRET` | [docs](https://stripe.com/docs/webhooks/best-practices#endpoint-secrets) | `whsec_12345` | +| `STRIPE_LIVE_MODE` | (bool) Set `true` or `false` to toggle between `live` and `test` Stripe mode. By default set to `False` | `true` | diff --git a/packages/internal/docs/docs/shared/partials/env-vars/_webapp.mdx b/packages/internal/docs/docs/shared/partials/env-vars/_webapp.mdx index 9c93a2da1..31b2a6ce1 100644 --- a/packages/internal/docs/docs/shared/partials/env-vars/_webapp.mdx +++ b/packages/internal/docs/docs/shared/partials/env-vars/_webapp.mdx @@ -1,11 +1,3 @@ -**Required** variables: - -| Name | Description | Example | -|------------------------|----------------------------------------------------------------------------------|--------------------------------------------------| -| VITE_BASE_API_URL | Path to access backend API | `/api` | -| VITE_WEB_APP_URL | Absolute URL to application | `https://app.demo.saas.apptoku.com` | -| VITE_EMAIL_ASSETS_URL | Absolute URL to email assets. By default it's `${VITE_WEB_APP_URL}/email-assets` | `https://app.demo.saas.apptoku.com/email-assets` | - import OptionalVars from './_webapp_optional.mdx'; <>{props.optional ? : null} diff --git a/packages/internal/docs/docs/shared/partials/env-vars/_webapp_contentful.mdx b/packages/internal/docs/docs/shared/partials/env-vars/_webapp_contentful.mdx new file mode 100644 index 000000000..d805c6d59 --- /dev/null +++ b/packages/internal/docs/docs/shared/partials/env-vars/_webapp_contentful.mdx @@ -0,0 +1,7 @@ +##### Web app vars required when you want to use Contentful + +| Name | Description | +| --------------------- | --------------------------- | +| VITE_CONTENTFUL_SPACE | Contentful Space ID | +| VITE_CONTENTFUL_TOKEN | Contentful API access token | +| VITE_CONTENTFUL_ENV | Contentful environment name | diff --git a/packages/internal/docs/docs/shared/partials/env-vars/_webapp_optional.mdx b/packages/internal/docs/docs/shared/partials/env-vars/_webapp_optional.mdx index 4ed374c1e..92733f995 100644 --- a/packages/internal/docs/docs/shared/partials/env-vars/_webapp_optional.mdx +++ b/packages/internal/docs/docs/shared/partials/env-vars/_webapp_optional.mdx @@ -1,10 +1,12 @@ -**Optional** variables (in order to use contentful or stripe services): +import WebappContentful from './_webapp_contentful.mdx'; -| Name | Description | -|-----------------------------------|-----------------------------------------| -| VITE_CONTENTFUL_SPACE | Contentful Space ID | -| VITE_CONTENTFUL_TOKEN | Contentful API access token | -| VITE_CONTENTFUL_ENV | Contentful environment name | -| VITE_STRIPE_PUBLISHABLE_KEY | Stripe Publishable key | -| VITE_SENTRY_DSN | [Sentry](https://sentry.io/) client key | -| VITE_GOOGLE_ANALYTICS_TRACKING_ID | Google Analytics 4 GTM ID | + + +##### Other optional web app variables + +| Name | Description | +| --------------------------------- | ------------------------------------------------------------------------- | ------ | +| VITE_BASE_API_URL | Path to access backend API. Set if you want to override default behaviour | `/api` | +| VITE_STRIPE_PUBLISHABLE_KEY | Stripe Publishable key | +| VITE_SENTRY_DSN | [Sentry](https://sentry.io/) client key | +| VITE_GOOGLE_ANALYTICS_TRACKING_ID | Google Analytics 4 GTM ID | diff --git a/packages/internal/docs/docs/shared/partials/env-vars/_webapp_stripe.mdx b/packages/internal/docs/docs/shared/partials/env-vars/_webapp_stripe.mdx new file mode 100644 index 000000000..240704d90 --- /dev/null +++ b/packages/internal/docs/docs/shared/partials/env-vars/_webapp_stripe.mdx @@ -0,0 +1,9 @@ +import WebappContentful from './_webapp_contentful.mdx'; + + + +##### Web app vars required when you want to use Stripe + +| Name | Description | +| --------------------------------- | ------------------------------------------------------------------------- | ------ | +| VITE_STRIPE_PUBLISHABLE_KEY | Stripe Publishable key | diff --git a/packages/internal/docs/docs/shared/partials/env-vars/_workers.mdx b/packages/internal/docs/docs/shared/partials/env-vars/_workers.mdx index 66c59b9d8..29788ea50 100644 --- a/packages/internal/docs/docs/shared/partials/env-vars/_workers.mdx +++ b/packages/internal/docs/docs/shared/partials/env-vars/_workers.mdx @@ -1,12 +1,11 @@ -**Required** variables: - -| Name | Description | Example | -|------------------|-------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------| -| FROM_EMAIL | Email used in `From` email field | `admin@exmaple.com` | -| HASHID_SALT | [docs](https://github.com/nshafer/django-hashid-field#hashid_field_salt) | `t5$^r\*xsMRXn1xjzhRSl8I5Hb3BUW$4U` | -| JWT_SECRET | Secret used to decode JWT used in subscriptions. The value needs to be the same as `DJANGO_SECRET_KEY` backend environment variable | | -| WEB_APP_URL | | `https://app.demo.saas.apptoku.com` | -| EMAIL_ASSETS_URL | Absolute URL to email assets. By default it's `${WEB_APP_URL}/email-assets` | `https://app.demo.saas.apptoku.com/email-assets` | +##### Required workers vars + +You will not be able to deploy workers service without setting following variables: + +| Name | Description | Example | +| ----------- | ----------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------- | +| HASHID_SALT | [docs](https://github.com/nshafer/django-hashid-field#hashid_field_salt) | `t5$^r\*xsMRXn1xjzhRSl8I5Hb3BUW$4U` | +| JWT_SECRET | Secret used to decode JWT used in subscriptions. The value needs to be the same as `DJANGO_SECRET_KEY` backend environment variable | | import OptionalVars from './_workers_optional.mdx'; diff --git a/packages/internal/docs/docs/shared/partials/env-vars/_workers_optional.mdx b/packages/internal/docs/docs/shared/partials/env-vars/_workers_optional.mdx index fd3bf9774..54aa438bb 100644 --- a/packages/internal/docs/docs/shared/partials/env-vars/_workers_optional.mdx +++ b/packages/internal/docs/docs/shared/partials/env-vars/_workers_optional.mdx @@ -1,7 +1,9 @@ -**Optional** variables (in order to use contentful or sentry services): +##### Other optional workers vars + +Following vars should be set in order to use contentful or sentry services: | Name | Description | -|-------------------------|-----------------------------------------| +| ----------------------- | --------------------------------------- | | CONTENTFUL_ACCESS_TOKEN | Contentful API access token | | CONTENTFUL_ENVIRONMENT | Contentful environment name | | CONTENTFUL_SPACE_ID | Contentful Space ID | diff --git a/packages/internal/docs/docs/working-with-sb/async-workers/create-workers-module.mdx b/packages/internal/docs/docs/working-with-sb/async-workers/create-workers-module.mdx index c4d9ff68b..1a13f3090 100644 --- a/packages/internal/docs/docs/working-with-sb/async-workers/create-workers-module.mdx +++ b/packages/internal/docs/docs/working-with-sb/async-workers/create-workers-module.mdx @@ -1,5 +1,5 @@ --- -title: How to create a new workers module? +title: How to create a new Lambda workers module? description: Guiding you through the process of creating a new workers module --- import ProjectName from '../../shared/components/ProjectName.component'; diff --git a/packages/internal/docs/docs/working-with-sb/async-workers/debug-async-job-celery.mdx b/packages/internal/docs/docs/working-with-sb/async-workers/debug-async-job-celery.mdx new file mode 100644 index 000000000..3dbc1e869 --- /dev/null +++ b/packages/internal/docs/docs/working-with-sb/async-workers/debug-async-job-celery.mdx @@ -0,0 +1,40 @@ +--- +title: How to debug tasks run in Celery? +description: Debug tasks run in Celery worker +--- + +import ProjectName from '../../shared/components/ProjectName.component'; + +### Monitoring + +To monitor the Celery cluster you can use **Flower**, which is configured in by default. +Flower is an open-source web application for monitoring and managing Celery clusters. It provides real-time information +about the status of Celery workers and tasks. + +:::info +If you want to learn more about flower configuration visit +[official documentation](https://flower.readthedocs.io/en/latest/#) +::: + +#### Localhost + +To access Flower on localhost open [http://localhost:5555/](http://localhost:5555/) in your browser. + +#### After deployment + +To access Flower in deployed environment open `http://flower.` in your browser. + +### Browsing task results + +By default, in , `django-db` is configured as a Celery result backend. It means all tasks' results are +serialized to PostgreSQL database and you are able to browse through those results in Django Admin. + +#### Localhost + +To access the list open [http://admin.localhost:5001/django_celery_results/taskresult/](http://admin.localhost:5001/django_celery_results/taskresult/) +in your browser. + +#### After deployment + +To access the list in deployed environment open `https://flower./django_celery_results/taskresult/` +in your browser. diff --git a/packages/internal/docs/docs/working-with-sb/async-workers/run-async-job-celery.mdx b/packages/internal/docs/docs/working-with-sb/async-workers/run-async-job-celery.mdx new file mode 100644 index 000000000..7e091810f --- /dev/null +++ b/packages/internal/docs/docs/working-with-sb/async-workers/run-async-job-celery.mdx @@ -0,0 +1,29 @@ +--- +title: How to run an async job in Celery from backend? +description: Trigger an execution of an asynchronous job in Celery task queue from the backend of your SaaS application +--- + +import ProjectName from '../../shared/components/ProjectName.component'; + +In the Celery task queue is preconfigured and enabled by default, using Redis as message broker. + +:::info +If you're not familiar with Celery, here's the official documentation: ["What is Celery?"](https://docs.celeryq.dev/en/stable/) +::: + +#### Invoking tasks + +Running Celery asynchronous tasks from the backend is pretty straightforward and it's best that you refer to the +official docs, but here's a very basic example: + +```python showLineNumbers +from celery import shared_task + +@shared_task() +def example_task(arg1: str, arg2: dict): + pass +``` + +```python showLineNumbers +example_task.apply_async(('arg1-value', { 'arg2-field': 'arg2-field-value' })) +``` diff --git a/packages/internal/docs/docs/working-with-sb/async-workers/run-async-job.mdx b/packages/internal/docs/docs/working-with-sb/async-workers/run-async-job-lambda.mdx similarity index 52% rename from packages/internal/docs/docs/working-with-sb/async-workers/run-async-job.mdx rename to packages/internal/docs/docs/working-with-sb/async-workers/run-async-job-lambda.mdx index 018e11e19..6c931ee2f 100644 --- a/packages/internal/docs/docs/working-with-sb/async-workers/run-async-job.mdx +++ b/packages/internal/docs/docs/working-with-sb/async-workers/run-async-job-lambda.mdx @@ -1,14 +1,14 @@ --- -title: How to run an async job from backend? -description: Run an asynchronous job from the backend of your SaaS application +title: How to run an async job in AWS Lambda from backend? +description: Trigger an execution of an asynchronous job in AWS Lambda from the backend of your SaaS application --- import ProjectName from '../../shared/components/ProjectName.component'; -In the , asynchronous jobs are represented as tasks. -These tasks are defined as classes that inherit from a `Task` class provided by the . -The `Task` class provides a framework for defining tasks that can be executed asynchronously. +In the , asynchronous jobs, executed in AWS Lambda are represented as tasks. +These tasks are defined as classes that inherit from a `LambdaTask` class provided by the . +The `LambdaTask` class provides a framework for defining tasks that can be executed asynchronously. -When a task is executed, it is passed to an AWS Lambda function as an event. +When a task is executed, it is passed to an AWS Lambda function as an event through Amazon EventBridge. The AWS Lambda function then processes the event and executes the task asynchronously. :::info @@ -17,40 +17,41 @@ If you're not familiar with AWS Lambda, here's the official documentation: ["Wha #### Invoking tasks -Running asynchronous tasks from the backend is made easy with the `Task` class. -The `Task` class is a utility class that allows you to define and run asynchronous tasks in a clean and organized way. +Running asynchronous tasks from the backend is made easy with the `LambdaTask` class. It is a utility class that allows +you to define and run asynchronous tasks in a clean and organized way. ```python showLineNumbers import importlib from django.conf import settings -module_name, package = settings.TASKS_BASE_HANDLER.rsplit(".", maxsplit=1) -Task = getattr(importlib.import_module(module_name), package) +module_name, package = settings.LAMBDA_TASKS_BASE_HANDLER.rsplit(".", maxsplit=1) +LambdaTask = getattr(importlib.import_module(module_name), package) -class ExampleTask(Task): +class ExampleTask(LambdaTask): def __init__(self): super().__init__(name="example_task", source='backend.example_task') ``` :::info -`Task` uses the `boto3` library to interact with the Amazon EventBridge service. -When a particular task is invoked, it generates an event entry that is sent to the EventBridge service using the `put_events` method of the `boto3` client. -[Learn more about Amazon EventBridge](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-what-is.html). +`LambdaTask` uses the `boto3` library to interact with the Amazon EventBridge service. +When a particular task is invoked, it generates an event entry that is sent to the EventBridge service using the +`put_events` method of the `boto3` client. [Learn more about Amazon EventBridge](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-what-is.html). ::: The `importlib` module provides tools for dynamically importing Python modules at runtime. The code then uses the imported modules to dynamically import a task handler module specified in the Django project's settings. -The `TASKS_BASE_HANDLER` setting is a string that specifies the module and class name of the base task handler. +The `LAMBDA_TASKS_BASE_HANDLER` setting is a string that specifies the module and class name of the base task handler. The `getattr()` function is used to get the `Task` class from the dynamically imported module. -The `ExampleTask` class is then defined as a subclass of `Task`. +The `ExampleTask` class is then defined as a subclass of `LambdaTask`. The `__init__` method of `ExampleTask` is used to set up the task. It sets up the basic properties of the task, such as its name and source. The `name` and `source` parameters can be used to identify the task to run within the serverless application. :::tip -Make sure to check ["How to create a new workers module?"](./create-workers-module) to learn how to register and configure your task handlers. +Make sure to check ["How to create a new Lambda workers module?"](./create-workers-module) to learn how to register and configure your lambda +task handlers. ::: Next, to invoke this task you can simply do: @@ -64,4 +65,4 @@ task.apply(data=data) ``` The `apply()` method is responsible for executing the task. -It takes the `data` parameter and performs the necessary actions to complete the task. \ No newline at end of file +It takes the `data` parameter and performs the necessary actions to complete the task. diff --git a/packages/internal/docs/docs/working-with-sb/emails/send-email.mdx b/packages/internal/docs/docs/working-with-sb/emails/send-email.mdx index 70cac1692..770228b47 100644 --- a/packages/internal/docs/docs/working-with-sb/emails/send-email.mdx +++ b/packages/internal/docs/docs/working-with-sb/emails/send-email.mdx @@ -1,23 +1,32 @@ --- title: How to send an email from backend? -description: Send emails programmatically using Django and async workers +description: Send emails programmatically using Django and Celery async workers --- In the SaaS Boilerplate, you can send emails from the backend by creating a new class that subclasses the `Email` class. The `Email` class is a base class that provides common functionality for sending emails. :::caution -This guide assumes that you are already familiar with async workers and Django REST Framework (DRF) serializers. +This guide assumes that you are already familiar with Celery async workers and Django REST Framework (DRF) serializers. If you are not familiar with these concepts, we recommend that you read the following articles: + - ["Working with serializers"](../graphql/backend/working-with-serializers) -- ["How to create a new workers module?"](../async-workers/create-workers-module) -- ["How to run an async job from backend?"](../async-workers/run-async-job) +- ["How to run a Celery async job from backend?"](../async-workers/run-async-job-celery) ::: +## Configure env variables + +First you need to configure environmental variables of a [backend services](../../api-reference/env#backend-service) to be able to dispatch any emails. + +import BackendEmailEnvVars from '../../shared/partials/env-vars/_backend_email.mdx'; + + + ## Email Serializer In the SaaS Boilerplate, email data is serialized using the Django REST Framework serializers. -Serializers define how data should be converted to and from JSON format, making it easy to transmit data over the [Amazon EventBridge API](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-what-is.html). +Serializers define how data should be converted to and from JSON format, making it easy to transmit data over Redis +broker to Celery task runner. Here's an example of an email serializer used in the SaaS Boilerplate: @@ -96,4 +105,4 @@ class UserSignupSerializer(serializers.ModelSerializer): ).send() return {'id': user.id, 'email': user.email, 'access': str(refresh.access_token), 'refresh': str(refresh)} -``` \ No newline at end of file +``` diff --git a/packages/internal/docs/sidebars.js b/packages/internal/docs/sidebars.js index 3d6bb76c4..beaf277a0 100644 --- a/packages/internal/docs/sidebars.js +++ b/packages/internal/docs/sidebars.js @@ -517,7 +517,9 @@ module.exports = { slug: '/working-with-sb/async-workers', }, items: [ - 'working-with-sb/async-workers/run-async-job', + 'working-with-sb/async-workers/run-async-job-celery', + 'working-with-sb/async-workers/debug-async-job-celery', + 'working-with-sb/async-workers/run-async-job-lambda', 'working-with-sb/async-workers/create-workers-module', ], }, diff --git a/packages/webapp-libs/webapp-core/src/config/env.ts b/packages/webapp-libs/webapp-core/src/config/env.ts index 951fcac82..97ed4bc4b 100644 --- a/packages/webapp-libs/webapp-core/src/config/env.ts +++ b/packages/webapp-libs/webapp-core/src/config/env.ts @@ -1,7 +1,7 @@ // @ts-nocheck export const ENV = { - BASE_API_URL: process.env.VITE_BASE_API_URL ?? '', + BASE_API_URL: process.env.VITE_BASE_API_URL ?? '/api', ENVIRONMENT_NAME: process.env.VITE_ENVIRONMENT_NAME, SENTRY_DSN: process.env.VITE_SENTRY_DSN, WEB_APP_URL: process.env.VITE_WEB_APP_URL ?? '', diff --git a/packages/webapp/.env.shared b/packages/webapp/.env.shared index 0d964a8a5..cc86caf00 100644 --- a/packages/webapp/.env.shared +++ b/packages/webapp/.env.shared @@ -1,11 +1,7 @@ VITE_ENVIRONMENT_NAME=local -VITE_BASE_API_URL=/api VITE_CONTENTFUL_SPACE= VITE_CONTENTFUL_TOKEN= VITE_CONTENTFUL_ENV=develop -VITE_EMAIL_ASSETS_URL=/email-assets -VITE_WEB_APP_URL=http://localhost:3000 - VITE_STRIPE_PUBLISHABLE_KEY= diff --git a/packages/workers/.coveragerc b/packages/workers/.coveragerc index a47da1ccc..15ff1034a 100644 --- a/packages/workers/.coveragerc +++ b/packages/workers/.coveragerc @@ -6,4 +6,4 @@ branch = true source = . [report] -include = content/*, dao/*, emails/*, scheduler/*, settings/*, userauth/*, utils/*, websockets/* \ No newline at end of file +include = content/*, dao/*, scheduler/*, settings/*, userauth/*, utils/* \ No newline at end of file diff --git a/packages/workers/.env.test b/packages/workers/.env.test index 85d168af0..299f959c2 100644 --- a/packages/workers/.env.test +++ b/packages/workers/.env.test @@ -3,8 +3,6 @@ AWS_SECRET_ACCESS_KEY=secretkey AWS_DEFAULT_REGION=eu-west-1 AWS_REGION=eu-west-1 -FROM_EMAIL=from@example.com - DB_CONNECTION={"engine":"postgresql","conndbname":"backend","dbname":"workers_test","username":"backend","password":"backend","host":"db","port":5432} WEB_APP_URL=http://localhost diff --git a/packages/workers/conftest.py b/packages/workers/conftest.py index e83cfd2f4..82c9e1e34 100644 --- a/packages/workers/conftest.py +++ b/packages/workers/conftest.py @@ -10,12 +10,9 @@ from dao.db.models import Base from dao.db.session import db_session as db_session_ctx from userauth import factories as ua_factories -from websockets import factories as ws_factories from demo import factories as demo_factories register(ua_factories.UserFactory) -register(ws_factories.WebSocketConnectionFactory) -register(ws_factories.GraphQLSubscriptionFactory) register(demo_factories.CrudDemoItemFactory) register(demo_factories.DocumentDemoItemFactory) diff --git a/packages/workers/emails/handlers.js b/packages/workers/emails/handlers.js deleted file mode 100644 index 60eec3b33..000000000 --- a/packages/workers/emails/handlers.js +++ /dev/null @@ -1,44 +0,0 @@ -const AWS = require('@aws-sdk/client-ses'); -const nodemailer = require('nodemailer'); - -const { renderEmail } = require('./renderer/index.umd.js'); - -const config = { - awsEndpoint: process.env.AWS_ENDPOINT_URL, - fromEmail: process.env.FROM_EMAIL, -}; - -const sesClient = new AWS.SES({ - endpoint: config.awsEndpoint, -}); - -exports.sendEmail = async function (event) { - const { to, type } = event.detail || {}; - const { subject, html } = renderEmail(type, { - ...event.detail, - }); - - return sesClient.sendEmail({ - Source: config.fromEmail, - Destination: { ToAddresses: [to] }, - ReplyToAddresses: [config.fromEmail], - Message: { - Subject: { Charset: 'UTF-8', Data: subject }, - Body: { Html: { Charset: 'UTF-8', Data: html } }, - }, - }); -}; - -exports.sendEmailLocal = async function (event) { - const { to, type } = event.detail || {}; - const { subject, html } = renderEmail(type, { - ...event.detail, - webAppUrl: config.webAppUrl, - }); - - const transport = nodemailer.createTransport({ - host: 'mailcatcher', - port: 1025, - }); - transport.sendMail({ to, subject, html, from: config.fromEmail }); -}; diff --git a/packages/workers/package.json b/packages/workers/package.json index f2e0df190..c8da470f5 100755 --- a/packages/workers/package.json +++ b/packages/workers/package.json @@ -23,7 +23,7 @@ "@sb/tools": "workspace:*", "@sb/webapp-emails": "workspace:*", "serverless": "^3.38.0", - "serverless-esbuild": "^1.48.0", + "serverless-esbuild": "^1.52.1", "serverless-localstack": "^1.1.2", "serverless-step-functions": "^3.18.0" }, diff --git a/packages/workers/project.json b/packages/workers/project.json index a4a1e0c78..c5d7f7c81 100644 --- a/packages/workers/project.json +++ b/packages/workers/project.json @@ -46,10 +46,7 @@ ], "parallel": false }, - "dependsOn": [ - { "projects": ["webapp-emails"], "target": "build" }, - "compose-build-image" - ] + "dependsOn": ["compose-build-image"] }, "lint": { "executor": "nx:run-commands", @@ -76,10 +73,7 @@ "color": true, "command": "docker-compose run --rm --entrypoint /bin/bash workers /app/packages/workers/scripts/runtime/run_build.sh" }, - "dependsOn": [ - { "projects": ["webapp-emails"], "target": "build" }, - "compose-build-image" - ] + "dependsOn": ["compose-build-image"] } }, "tags": ["service"] diff --git a/packages/workers/serverless.yml b/packages/workers/serverless.yml index f69a9b6ac..bbf1aea97 100644 --- a/packages/workers/serverless.yml +++ b/packages/workers/serverless.yml @@ -37,18 +37,6 @@ functions: memorySize: 128 environment: ${self:custom.conf.ExecuteScheduledTask.environment} - SendEmail: - handler: ${self:custom.conf.SendEmail.handler} - timeout: 6 - memorySize: 128 - environment: ${self:custom.conf.SendEmail.environment} - events: - - eventBridge: - eventBus: "${self:custom.conf.eventBusArn}" - pattern: - source: - - backend.email - ExportUsers: handler: userauth.handlers.user_data_export runtime: python3.11 @@ -78,39 +66,8 @@ functions: source: - backend.contentfulSync - WebSocketsConnectHandler: - handler: websockets.handlers.connect.handle - runtime: python3.11 - timeout: 30 - memorySize: 256 - environment: ${self:custom.conf.WebSocketsHandler.environment} - vpc: ${self:custom.conf.vpc} - events: - - websocket: $connect - - WebSocketsMessageHandler: - handler: websockets.handlers.message.handle - runtime: python3.11 - timeout: 30 - memorySize: 256 - environment: ${self:custom.conf.WebSocketsHandler.environment} - vpc: ${self:custom.conf.vpc} - events: - - websocket: $default - - WebSocketsDisconnectHandler: - handler: websockets.handlers.disconnect.handle - runtime: python3.11 - timeout: 30 - memorySize: 256 - environment: ${self:custom.conf.WebSocketsHandler.environment} - vpc: ${self:custom.conf.vpc} - events: - - websocket: $disconnect - plugins: - serverless-step-functions - - serverless-esbuild - serverless-localstack custom: @@ -122,8 +79,6 @@ custom: ssmService: env-${env:PROJECT_NAME}-${self:provider.stage}-workers - esbuild: ${self:custom.conf.esbuild} - conf: ${file(./${self:custom.confFile.${self:provider.stage}})} confFile: local: workers.conf.local.yml diff --git a/packages/workers/settings/base.py b/packages/workers/settings/base.py index 9185b1a6d..3725b9815 100644 --- a/packages/workers/settings/base.py +++ b/packages/workers/settings/base.py @@ -8,7 +8,6 @@ AWS_ENDPOINT_URL = env('AWS_ENDPOINT_URL', None) AWS_DEFAULT_REGION = env('AWS_DEFAULT_REGION', None) SMTP_HOST = env('SMTP_HOST', None) -EMAIL_ENABLED = env.bool('EMAIL_ENABLED', default=True) secrets_manager_client = boto3.client('secretsmanager', endpoint_url=AWS_ENDPOINT_URL) @@ -37,8 +36,6 @@ def fetch_db_secret(db_secret_arn): DB_CONNECTION["host"] = DB_PROXY_ENDPOINT DB_CONNECTION["engine"] = "postgresql" -FROM_EMAIL = env('FROM_EMAIL', None) - WEB_APP_URL = env('WEB_APP_URL', None) SENTRY_DSN = env('SENTRY_DSN', None) diff --git a/packages/workers/websockets/__init__.py b/packages/workers/websockets/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/packages/workers/websockets/apigateway.py b/packages/workers/websockets/apigateway.py deleted file mode 100644 index ecfdf159b..000000000 --- a/packages/workers/websockets/apigateway.py +++ /dev/null @@ -1,22 +0,0 @@ -import json -import logging - -import boto3 -from dao.db.session import db_session -from .connection import purge_connection - -import settings - - -logger = logging.getLogger() -logger.setLevel(logging.INFO) - - -def post_to_connection(domain_name, connection_id, body): - client = boto3.client('apigatewaymanagementapi', endpoint_url=f"https://{domain_name}/{settings.ENVIRONMENT_NAME}") - logger.info(f"Posting to connection: {json.dumps(body)}") - try: - client.post_to_connection(Data=json.dumps(body).encode(), ConnectionId=connection_id) - except client.exceptions.GoneException: - with db_session() as session: - purge_connection(connection_id, session) diff --git a/packages/workers/websockets/connection.py b/packages/workers/websockets/connection.py deleted file mode 100644 index 551740dd0..000000000 --- a/packages/workers/websockets/connection.py +++ /dev/null @@ -1,8 +0,0 @@ -from . import models - - -def purge_connection(connection_id, session): - connection = session.query(models.WebSocketConnection).filter_by(connection_id=connection_id).first() - if connection: - session.query(models.GraphQLSubscription).filter_by(connection=connection).delete() - session.delete(connection) diff --git a/packages/workers/websockets/factories.py b/packages/workers/websockets/factories.py deleted file mode 100644 index 524afa900..000000000 --- a/packages/workers/websockets/factories.py +++ /dev/null @@ -1,18 +0,0 @@ -import factory - -from dao.db import session -from . import models - - -class WebSocketConnectionFactory(factory.alchemy.SQLAlchemyModelFactory): - class Meta: - model = models.WebSocketConnection - sqlalchemy_session = session.Session - - -class GraphQLSubscriptionFactory(factory.alchemy.SQLAlchemyModelFactory): - class Meta: - model = models.GraphQLSubscription - sqlalchemy_session = session.Session - - connection = factory.SubFactory(WebSocketConnectionFactory) diff --git a/packages/workers/websockets/handlers/__init__.py b/packages/workers/websockets/handlers/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/packages/workers/websockets/handlers/connect.py b/packages/workers/websockets/handlers/connect.py deleted file mode 100644 index d17e4ba73..000000000 --- a/packages/workers/websockets/handlers/connect.py +++ /dev/null @@ -1,55 +0,0 @@ -import json -import logging - -import jwt -from hashids import Hashids - -import settings -from dao.db.session import db_session -from userauth.models import User -from utils import monitoring -from .. import models, utils - -monitoring.init() - -logger = logging.getLogger() -logger.setLevel(logging.INFO) - -hashids = Hashids(salt=settings.HASHID_SALT, min_length=7) - - -def get_token_from_cookie(cookie): - cookies = cookie.split("; ") - token = next((c[6:] for c in cookies if c.startswith("token=")), None) - return token - - -def get_user_id_from_token(token): - decoded_jwt = jwt.decode(token, settings.JWT_SECRET, algorithms="HS256") - user_hashid = decoded_jwt.get("user_id") - return hashids.decode(user_hashid)[0] - - -def handle(event, context): - logger.info(json.dumps(event)) - - connection_id = event["requestContext"]["connectionId"] - cookie_header = event["headers"].get("Cookie", "") - - token = get_token_from_cookie(cookie_header) - if not token: - return utils.prepare_response("Token is missing.", 400) - - try: - user_id = get_user_id_from_token(token) - except jwt.ExpiredSignatureError: - return utils.prepare_response("Signature has expired.", 400) - - with db_session() as session: - user = session.query(User).filter_by(id=user_id).first() - if not user: - return utils.prepare_response("User doesn't exist.", 400) - connection = models.WebSocketConnection(user=user, connection_id=connection_id) - session.add(connection) - - return utils.prepare_response(connection_id) diff --git a/packages/workers/websockets/handlers/disconnect.py b/packages/workers/websockets/handlers/disconnect.py deleted file mode 100644 index 5fc26a453..000000000 --- a/packages/workers/websockets/handlers/disconnect.py +++ /dev/null @@ -1,23 +0,0 @@ -import json -import logging - -from dao.db.session import db_session -from utils import monitoring -from .. import utils -from ..connection import purge_connection - -monitoring.init() - -logger = logging.getLogger() -logger.setLevel(logging.INFO) - - -def handle(event, context): - logger.info(json.dumps(event)) - - connection_id = event["requestContext"]["connectionId"] - - with db_session() as session: - purge_connection(connection_id, session) - - return utils.prepare_response(connection_id) diff --git a/packages/workers/websockets/handlers/message.py b/packages/workers/websockets/handlers/message.py deleted file mode 100644 index 1ec1c6b35..000000000 --- a/packages/workers/websockets/handlers/message.py +++ /dev/null @@ -1,55 +0,0 @@ -import json -import logging -import uuid - -from dao.db.session import db_session -from utils import monitoring -from .. import models, apigateway, utils - -monitoring.init() - -logger = logging.getLogger() -logger.setLevel(logging.INFO) - - -def handle(event, context): - logger.info(json.dumps(event)) - - connection_id = event["requestContext"]["connectionId"] - domain_name = event["requestContext"]["domainName"] - operation = json.loads(event.get("body", "{}")) - - if operation.get("type") == "connection_init": - apigateway.post_to_connection( - domain_name, - connection_id, - {"id": str(uuid.uuid4()), "type": "connection_ack", "payload": {"con_id": connection_id}}, - ) - - elif operation.get("type") == "subscribe": - subscription_id = operation.get("id") - payload = operation.get("payload", {}) - - with db_session() as session: - connection = session.query(models.WebSocketConnection).filter_by(connection_id=connection_id).first() - if connection: - subscription = models.GraphQLSubscription( - connection=connection, - relay_id=subscription_id, - operation_name=payload.get("operationName"), - query=payload.get("query"), - variables=payload.get("variables"), - ) - session.add(subscription) - - elif operation.get("type") == "stop": - subscription_id = operation.get("id") - - with db_session() as session: - connection = session.query(models.WebSocketConnection).filter_by(connection_id=connection_id).first() - if connection: - session.query(models.GraphQLSubscription).filter_by( - connection=connection, relay_id=subscription_id - ).delete() - - return utils.prepare_response(connection_id) diff --git a/packages/workers/websockets/models.py b/packages/workers/websockets/models.py deleted file mode 100644 index 1b979aeda..000000000 --- a/packages/workers/websockets/models.py +++ /dev/null @@ -1,26 +0,0 @@ -from dao.db import models -from sqlalchemy import Column, ForeignKey, String, Integer, JSON -from sqlalchemy.orm import relationship - -from userauth.models import User - - -class WebSocketConnection(models.Base): - __tablename__ = "websockets_websocketconnection" - - id = Column("id", Integer, primary_key=True, autoincrement=True) - user_id = Column(ForeignKey("users_user.id")) - user = relationship(User) - connection_id = Column("connection_id", String) - - -class GraphQLSubscription(models.Base): - __tablename__ = "websockets_graphqlsubscription" - - id = Column("id", Integer, primary_key=True, autoincrement=True) - connection_id = Column(ForeignKey("websockets_websocketconnection.id")) - connection = relationship(WebSocketConnection) - relay_id = Column("relay_id", String) - operation_name = Column("operation_name", String) - query = Column("query", String) - variables = Column("variables", JSON) diff --git a/packages/workers/websockets/tests/__init__.py b/packages/workers/websockets/tests/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/packages/workers/websockets/tests/test_handlers.py b/packages/workers/websockets/tests/test_handlers.py deleted file mode 100644 index 74ee3bd35..000000000 --- a/packages/workers/websockets/tests/test_handlers.py +++ /dev/null @@ -1,135 +0,0 @@ -import pytest - -from dao.db.session import db_session -from .. import models -from ..handlers import connect, message, disconnect - -pytestmark = pytest.mark.usefixtures('db_session') - - -DOMAIN_NAME = "example.com" - - -def test_handler_connect(user_factory): - user_factory(id=1) - - response = connect.handle( - { - "requestContext": {"eventType": "CONNECT", "connectionId": "conn-id"}, - "headers": { - "Cookie": ( - "token=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ0b2tlbl90eXBlIjoiYWNjZXNzIiwiZXhwIjoyNjI0NTI3Mzg" - "5LCJqdGkiOiJkNThkOTBmZGExN2Q0ZTBhOWMzMjNmNWIxZmQ0NzQzOSIsInVzZXJfaWQiOiJ6eGtYRzhaIn0._KiwJy4qO" - "kaXGPe5AsAylPZvh2smpV6d-EN_xsHpyTY" - ) - }, - }, - {}, - ) - - assert response == { - "statusCode": 200, - "body": "conn-id", - "headers": {"Sec-WebSocket-Protocol": "graphql-transport-ws"}, - } - with db_session() as session: - assert session.query(models.WebSocketConnection).filter_by(user_id=1, connection_id="conn-id").count() == 1 - - -def test_handler_connect_without_existing_user(): - response = connect.handle( - { - "requestContext": {"eventType": "CONNECT", "connectionId": "conn-id"}, - "headers": { - "Cookie": ( - "token=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ0b2tlbl90eXBlIjoiYWNjZXNzIiwiZXhwIjoyNjI0NTI3Mzg" - "5LCJqdGkiOiJkNThkOTBmZGExN2Q0ZTBhOWMzMjNmNWIxZmQ0NzQzOSIsInVzZXJfaWQiOiJ6eGtYRzhaIn0._KiwJy4qO" - "kaXGPe5AsAylPZvh2smpV6d-EN_xsHpyTY" - ) - }, - }, - {}, - ) - - assert response == { - "statusCode": 400, - "body": "User doesn't exist.", - "headers": {"Sec-WebSocket-Protocol": "graphql-transport-ws"}, - } - with db_session() as session: - assert session.query(models.WebSocketConnection).filter_by(connection_id="conn-id").count() == 0 - - -def test_handler_message_connection_init(mocker, user_factory): - post_to_connection = mocker.patch("websockets.apigateway.post_to_connection") - mocker.patch("uuid.uuid4", return_value="some-uuid") - user_factory(id=1) - - message.handle( - { - "requestContext": {"eventType": "MESSAGE", "connectionId": "conn-id", "domainName": DOMAIN_NAME}, - "body": '{"type": "connection_init"}', - }, - {}, - ) - - post_to_connection.assert_called_once_with( - DOMAIN_NAME, - "conn-id", - {"id": "some-uuid", "type": "connection_ack", "payload": {"con_id": "conn-id"}}, - ) - - -def test_handler_message_subscribe(web_socket_connection_factory): - web_socket_connection_factory(id=1, connection_id="conn-id") - - message.handle( - { - "requestContext": {"eventType": "MESSAGE", "connectionId": "conn-id", "domainName": DOMAIN_NAME}, - "body": ( - '{\"id\":\"1\",\"type\":\"subscribe\",\"payload\":{\"query\":\"subscription ' - 'notificationsListSubscription {\\n notificationCreated {\\n id\\n }\\n}\\n\",\"' - 'operationName\":\"notificationsListSubscription\",\"variables\":{}}}' - ), - }, - {}, - ) - - with db_session() as session: - assert session.query(models.GraphQLSubscription).filter_by(connection_id=1).count() == 1 - - -def test_handler_message_stop(web_socket_connection_factory, graph_ql_subscription_factory): - connection = web_socket_connection_factory(id=1, connection_id="conn-id") - graph_ql_subscription_factory.create_batch(2, connection=connection, relay_id="1") - - message.handle( - { - "requestContext": {"eventType": "MESSAGE", "connectionId": "conn-id", "domainName": DOMAIN_NAME}, - "body": '{\"id\":\"1\",\"type\":\"stop\"}', - }, - {}, - ) - - with db_session() as session: - assert session.query(models.GraphQLSubscription).filter_by(connection_id=1).count() == 0 - - -def test_handler_disconnect(web_socket_connection_factory, graph_ql_subscription_factory): - connection = web_socket_connection_factory(id=1, connection_id="conn-id") - other_connection = web_socket_connection_factory(id=2, connection_id="other-conn-id") - graph_ql_subscription_factory.create_batch(2, connection=connection) - graph_ql_subscription_factory.create_batch(3, connection=other_connection) - - disconnect.handle( - { - "requestContext": {"eventType": "DISCONNECT", "connectionId": "conn-id"}, - }, - {}, - ) - - with db_session() as session: - assert session.query(models.WebSocketConnection).count() == 1 - assert session.query(models.WebSocketConnection).filter_by(connection_id="conn-id").count() == 0 - assert session.query(models.GraphQLSubscription).count() == 3 - assert session.query(models.GraphQLSubscription).filter_by(connection_id=1).count() == 0 diff --git a/packages/workers/websockets/utils.py b/packages/workers/websockets/utils.py deleted file mode 100644 index e132aac84..000000000 --- a/packages/workers/websockets/utils.py +++ /dev/null @@ -1,6 +0,0 @@ -def prepare_response(connection_id, status_code=200): - return { - "statusCode": status_code, - "headers": {"Sec-WebSocket-Protocol": "graphql-transport-ws"}, - "body": connection_id, - } diff --git a/packages/workers/workers.conf.local.yml b/packages/workers/workers.conf.local.yml index 01b37a541..2d9f2aaef 100644 --- a/packages/workers/workers.conf.local.yml +++ b/packages/workers/workers.conf.local.yml @@ -1,23 +1,14 @@ stackName: "${env:PROJECT_NAME}-workers" -iam: [] +iam: [ ] -environment: {} +environment: { } ScheduleTask: - environment: {} + environment: { } ExecuteScheduledTask: - environment: {} - -SendEmail: - handler: emails/handlers.sendEmailLocal - environment: - AWS_ENDPOINT_URL: "http://localstack:4566" - FROM_EMAIL: "from@example.com" - WEB_APP_URL: "http://localhost:3000" - VITE_EMAIL_ASSETS_URL: "http://localhost:3000/email-assets" - VITE_WEB_APP_URL: "http://localhost:3000" + environment: { } SynchronizeContentfulContent: environment: @@ -27,22 +18,14 @@ SynchronizeContentfulContent: CONTENTFUL_ENVIRONMENT: "${env:CONTENTFUL_ENVIRONMENT,''}" WebSocketsHandler: - environment: [] + environment: [ ] ExportUsers: environment: AWS_ENDPOINT_URL: "http://localstack:4566" AWS_EXPORTS_STORAGE_BUCKET_NAME: "exports-bucket" -vpc: {} - -esbuild: - bundle: true - minify: false - target: [ 'es2020' ] - sourcemap: false - sourcesContent: false - define: { 'require.resolve': undefined } +vpc: { } websocketApiId: { Fn::ImportValue: "${self:custom.projectEnvName}-webSocketApiId" } eventBusArn: "arn:aws:events:::event-bus/${self:custom.projectEnvName}-workers" diff --git a/packages/workers/workers.conf.yml b/packages/workers/workers.conf.yml index 8c2b11284..c4c36ed75 100644 --- a/packages/workers/workers.conf.yml +++ b/packages/workers/workers.conf.yml @@ -23,7 +23,7 @@ iam: - Effect: "Allow" Action: - "events:PutEvents" - Resource: ["${self:custom.conf.eventBusArn}"] + Resource: [ "${self:custom.conf.eventBusArn}" ] - Effect: "Allow" Action: - "execute-api:ManageConnections" @@ -63,15 +63,7 @@ ScheduleTask: TASK_SCHEDULING_STATE_MACHINE_ARN: { Ref: 'TaskSchedulingStateMachineID' } ExecuteScheduledTask: - environment: {} - -SendEmail: - handler: emails/handlers.sendEmail - environment: - FROM_EMAIL: ${ssm:/${self:custom.ssmService}/FROM_EMAIL} - WEB_APP_URL: ${ssm:/${self:custom.ssmService}/WEB_APP_URL} - VITE_WEB_APP_URL: ${ssm:/${self:custom.ssmService}/WEB_APP_URL} - VITE_EMAIL_ASSETS_URL: ${ssm:/${self:custom.ssmService}/EMAIL_ASSETS_URL} + environment: { } SynchronizeContentfulContent: environment: @@ -101,13 +93,5 @@ vpc: - Fn::ImportValue: "${self:custom.projectEnvName}-privateSubnetOneId" - Fn::ImportValue: "${self:custom.projectEnvName}-privateSubnetTwoId" -esbuild: - bundle: true - minify: true - target: [ 'es2020' ] - sourcemap: true - sourcesContent: false - define: { 'require.resolve': undefined } - websocketApiId: { Fn::ImportValue: "${self:custom.projectEnvName}-webSocketApiId" } eventBusArn: "arn:aws:events:${aws:region}:${aws:accountId}:event-bus/${self:custom.projectEnvName}-workers" \ No newline at end of file