Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add webhook support #119

Merged
merged 9 commits into from
Jan 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .github/workflows/main.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,14 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: "Upgrade pip"
run: "pip install --upgrade pip"
- name: "Print python version"
run: "python --version"
- name: "Install package"
run: pip install ".[dev]"
- name: "Run lint checks"
Expand Down
4 changes: 4 additions & 0 deletions .github/workflows/no_debug_allowed.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: "Upgrade pip"
run: "pip install --upgrade pip"
- name: "Install package"
Expand Down
44 changes: 44 additions & 0 deletions .github/workflows/no_forgoten_migrations.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
name: Make sure to run manage.py makemigrations if you change models

on: [pull_request]

jobs:
is-migration-diff-clean:
runs-on: ubuntu-latest
services:
postgres:
image: postgres:latest
env:
POSTGRES_DB: nycmesh-dev
POSTGRES_USER: nycmesh
POSTGRES_PASSWORD: abcd1234
POSTGRES_PORT: 5432
ports:
- 5432:5432
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: "Upgrade pip"
run: "pip install --upgrade pip"
- name: "Install package"
run: pip install ".[dev]"
- name: "You forgot to run manage.py makemigrations for model changes"
env:
DB_NAME: nycmesh-dev
DB_USER: nycmesh
DB_PASSWORD: abcd1234
DB_HOST: localhost
DB_PORT: 5432
DJANGO_SECRET_KEY: k7j&!u07c%%97s!^a_6%mh_wbzo*$hl4lj_6c2ee6dk)y9!k88
run: |
python src/manage.py makemigrations meshapi --dry-run # Run extra time for debug output
python src/manage.py makemigrations meshapi --dry-run | grep "No changes detected in app 'meshapi'"

12 changes: 8 additions & 4 deletions .github/workflows/run_django_tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,17 @@ jobs:
image: pelias/parser:latest
ports:
- 6800:3000
strategy:
max-parallel: 4
matrix:
python-version: [3.11]
redis:
image: redis
ports:
- 6379:6379

steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: "Upgrade pip"
run: "pip install --upgrade pip"
- name: "Install package"
Expand Down
12 changes: 12 additions & 0 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,16 @@ services:
volumes:
- postgres_data:/var/lib/postgresql/data/

redis:
healthcheck:
test: [ "CMD", "redis-cli", "--raw", "incr", "ping" ]
networks:
- api
ports:
- 6379:6379
image:
redis

pelias:
networks:
- api
Expand All @@ -28,6 +38,8 @@ services:
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
healthcheck:
test: curl http://127.0.0.1:8081/api/v1
interval: 2s
Expand Down
8 changes: 8 additions & 0 deletions entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,14 @@ done

echo 'DB started'

# It's okay to start Celery in the background and continue without waiting, even though "migrate"
# might make DB changes we want to notify for since tasks are queued by Django Webhook and
# are executed as soon as celery starts
# FIXME: This makes testing locally a bit awkward, since this isn't started by "manage.py runserver"
# maybe there's a way to do this better?
echo 'Staring Celery Worker...'
celery -A meshdb worker -l INFO --detach

echo 'Running Migrations...'
python manage.py migrate

Expand Down
5 changes: 4 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,10 @@
name = "nycmesh-meshdb"
version = "0.1"
dependencies = [
"celery[redis]==5.3.*",
"django==4.2.*",
"djangorestframework==3.14.*",
"django-webhook>=0.0.7,<0.1.0",
"psycopg2-binary==2.9.*",
"gunicorn==21.2.*",
"python-dotenv==1.0.*",
Expand All @@ -26,7 +28,8 @@ dev = [
"black == 23.7.*",
"isort == 5.12.*",
"coverage == 7.3.*",
"mypy == 1.5.*"
"mypy == 1.5.*",
"flask == 3.0.*",
]

[project.scripts]
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# Generated by Django 4.2.9 on 2024-01-28 01:02

from django.db import migrations


class Migration(migrations.Migration):
dependencies = [
("meshapi", "0001_initial"),
]

operations = [
migrations.RemoveField(
model_name="installer",
name="group_ptr",
),
migrations.RemoveField(
model_name="readonly",
name="group_ptr",
),
migrations.DeleteModel(
name="Admin",
),
migrations.DeleteModel(
name="Installer",
),
migrations.DeleteModel(
name="ReadOnly",
),
]
96 changes: 96 additions & 0 deletions src/meshapi/tests/test_webhooks.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
import multiprocessing
import queue

from flask import Flask, Response, request

multiprocessing.set_start_method("fork")

import django_webhook.models
from celery.contrib.testing.worker import start_worker
from django.test import TransactionTestCase
from django_webhook.models import Webhook, WebhookTopic

from meshdb.celery import app as celery_app

from ..models import Building, Member
from .sample_data import sample_building, sample_member

HTTP_CALL_WAITING_TIME = 2 # Seconds


def dummy_webhook_listener(http_requests_queue):
flask_app = Flask(__name__)

@flask_app.route("/webhook", methods=["POST"])
def respond():
http_requests_queue.put(request.json)
return Response(status=200)

flask_app.run(host="127.0.0.1", port=8089, debug=False)


class TestMemberWebhook(TransactionTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
# Start the celery worker inside the test case
cls.celery_worker = start_worker(celery_app, perform_ping_check=False)
cls.celery_worker.__enter__()

@classmethod
def tearDownClass(cls):
super().tearDownClass()
cls.celery_worker.__exit__(None, None, None)

def setUp(self):
# Create a simple HTTP listener using flask
self.http_requests_queue = multiprocessing.Queue()
self.app_process = multiprocessing.Process(
target=dummy_webhook_listener,
args=(self.http_requests_queue,),
)
self.app_process.start()

# Load the possible webhook topics from the models, normally this happens
# at migration time but the test DB is odd
django_webhook.models.populate_topics_from_settings()

# Create the webhook in Django
# (this would be done by an admin via the UI in prod)
webhook = Webhook(url="http://localhost:8089/webhook")
topics = [
WebhookTopic.objects.get(name="meshapi.Member/create"),
]
webhook.save()
webhook.topics.set(topics)
webhook.save()

def tearDown(self) -> None:
self.app_process.terminate()

def test_member(self):
# Create new member triggers webhook
member_obj = Member(**sample_member)
member_obj.save()

try:
flask_request = self.http_requests_queue.get(timeout=HTTP_CALL_WAITING_TIME)
except queue.Empty as e:
raise RuntimeError("HTTP server not called...") from e

assert flask_request["topic"] == "meshapi.Member/create"
for key, value in sample_member.items():
assert flask_request["object"][key] == value
assert flask_request["object_type"] == "meshapi.Member"
assert flask_request["webhook_uuid"]

def test_building(self):
# Create new building doesn't trigger webhook (they're not subscribed)
building_obj = Building(**sample_building)
building_obj.save()

try:
self.http_requests_queue.get(timeout=HTTP_CALL_WAITING_TIME)
assert False, "HTTP server shouldn't have been called"
except queue.Empty:
pass
5 changes: 5 additions & 0 deletions src/meshdb/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app

__all__ = ("celery_app",)
18 changes: 18 additions & 0 deletions src/meshdb/celery.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import os

from celery import Celery

# Set the default Django settings module for the 'celery' program.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "meshdb.settings")

# Use the docker-hosted Redis container as the backend for Celery
app = Celery("meshdb", broker="redis://localhost:6379/0")

# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object("django.conf:settings", namespace="CELERY")

# Load task modules from all registered Django apps.
app.autodiscover_tasks()
18 changes: 17 additions & 1 deletion src/meshdb/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@
https://docs.djangoproject.com/en/4.2/ref/settings/
"""

from pathlib import Path
import os
from pathlib import Path

from dotenv import load_dotenv

Expand Down Expand Up @@ -73,6 +73,7 @@
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"django_webhook",
"rest_framework",
"rest_framework.authtoken",
"meshapi",
Expand Down Expand Up @@ -178,3 +179,18 @@
"rest_framework.authentication.TokenAuthentication",
],
}

# Allow-list models which the admin can select to send webhooks for
DJANGO_WEBHOOK = dict(
MODELS=[
"meshapi.Building",
"meshapi.Member",
"meshapi.Install",
"meshapi.Link",
"meshapi.Sector",
],
# This breaks tests, and our write volumes are so low that this performance
# impact should be negligible (it's an extra DB call on any model change)
# If this is a problem in the future, look into setting this only during testing
USE_CACHE=False,
)
Loading