Skip to content
This repository has been archived by the owner on Oct 11, 2024. It is now read-only.

refactor(docker): move docker utils to a single folder #121

Merged
merged 10 commits into from
Mar 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions .github/labeler.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

'ext: apm':
- changed-files:
- any-glob-to-any-file: apm/*
- any-glob-to-any-file: docker/apm/*

'module: core':
- changed-files:
Expand Down Expand Up @@ -79,10 +79,10 @@
'topic: docker':
- changed-files:
- any-glob-to-any-file:
- docker-compose.yml
- docker-compose.test.yml
- Dockerfile
- traefik/*
- docker/docker-compose.yml
- docker/docker-compose.test.yml
- docker/Dockerfile
- docker/.env.example

'topic: docs':
- changed-files:
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/builds.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ jobs:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: pg_pwd
OLLAMA_MODEL: tinydolphin:1.1b-v2.8-q4_0
run: docker compose up -d --build
run: docker compose -f docker/docker-compose.yml up -d --build
- name: Docker sanity check
env:
SUPERADMIN_GH_PAT: ${{ secrets.SUPERADMIN_GH_PAT }}
Expand All @@ -46,7 +46,7 @@ jobs:
POSTGRES_PASSWORD: pg_pwd
OLLAMA_MODEL: tinydolphin:1.1b-v2.8-q4_0
run: |
sleep 20 && docker compose logs backend
sleep 20 && docker compose -f docker/docker-compose.yml logs backend
nc -vz localhost 8050
docker compose logs
docker compose -f docker/docker-compose.yml logs
curl http://localhost:8050/docs
2 changes: 1 addition & 1 deletion .github/workflows/push.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
- name: Resolve dependencies
run: poetry export -f requirements.txt --without-hashes --output requirements.txt
- name: Build docker
run: docker build . -t quackai/contribution-api:latest
run: docker build -f docker/Dockerfile . -t quackai/contribution-api:latest
- name: Login to DockerHub
uses: docker/login-action@v3
with:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/scripts.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ jobs:
SUPERADMIN_GH_PAT: ${{ secrets.SUPERADMIN_GH_PAT }}
GH_OAUTH_ID: ${{ secrets.GH_OAUTH_ID }}
GH_OAUTH_SECRET: ${{ secrets.GH_OAUTH_SECRET }}
run: docker compose -f docker-compose.test.yml up -d --build
run: docker compose -f docker/docker-compose.test.yml up -d --build
- name: Docker sanity check
run: sleep 20 && nc -vz localhost 8050
- name: Install dependencies
Expand Down
20 changes: 10 additions & 10 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,15 @@ jobs:
SUPERADMIN_GH_PAT: ${{ secrets.SUPERADMIN_GH_PAT }}
GH_OAUTH_ID: ${{ secrets.GH_OAUTH_ID }}
GH_OAUTH_SECRET: ${{ secrets.GH_OAUTH_SECRET }}
run: docker compose -f docker-compose.test.yml up -d --build
run: docker compose -f docker/docker-compose.test.yml up -d --build
- name: Run docker test
env:
SUPERADMIN_GH_PAT: ${{ secrets.SUPERADMIN_GH_PAT }}
GH_OAUTH_ID: ${{ secrets.GH_OAUTH_ID }}
GH_OAUTH_SECRET: ${{ secrets.GH_OAUTH_SECRET }}
run: |
docker compose -f docker-compose.test.yml exec -T backend pytest --cov=app --cov-report xml tests/
docker compose -f docker-compose.test.yml cp backend:/app/coverage.xml ./coverage-src.xml
docker compose -f docker/docker-compose.test.yml exec -T backend pytest --cov=app --cov-report xml tests/
docker compose -f docker/docker-compose.test.yml cp backend:/app/coverage.xml ./coverage-src.xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v4
with:
Expand All @@ -57,17 +57,17 @@ jobs:
SUPERADMIN_GH_PAT: ${{ secrets.SUPERADMIN_GH_PAT }}
GH_OAUTH_ID: ${{ secrets.GH_OAUTH_ID }}
GH_OAUTH_SECRET: ${{ secrets.GH_OAUTH_SECRET }}
run: docker compose -f docker-compose.test.yml up -d --build
run: docker compose -f docker/docker-compose.test.yml up -d --build
- name: Debug
run: |
docker compose -f docker-compose.test.yml exec -T backend alembic current
docker compose -f docker-compose.test.yml exec -T backend alembic history --verbose
docker compose -f docker/docker-compose.test.yml exec -T backend alembic current
docker compose -f docker/docker-compose.test.yml exec -T backend alembic history --verbose
- name: Run migrations
run: |
docker compose -f docker-compose.test.yml exec -T backend alembic stamp head
docker compose -f docker-compose.test.yml exec -T backend alembic history --verbose
docker compose -f docker-compose.test.yml exec -T backend alembic downgrade -1
docker compose -f docker-compose.test.yml exec -T backend alembic upgrade +1
docker compose -f docker/docker-compose.test.yml exec -T backend alembic stamp head
docker compose -f docker/docker-compose.test.yml exec -T backend alembic history --verbose
docker compose -f docker/docker-compose.test.yml exec -T backend alembic downgrade -1
docker compose -f docker/docker-compose.test.yml exec -T backend alembic upgrade +1

headers:
runs-on: ${{ matrix.os }}
Expand Down
1 change: 1 addition & 0 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ Whatever the way you wish to contribute to the project, please respect the [code
- [`src/app`](https://github.com/quack-ai/contribution-api/blob/main/src/app) - The actual API codebase
- [`src/tests`](https://github.com/quack-ai/contribution-api/blob/main/src/tests) - The API unit tests
- [`.github`](https://github.com/quack-ai/contribution-api/blob/main/.github) - Configuration for CI (GitHub Workflows)
- [`docker`](https://github.com/quack-ai/contribution-api/blob/main/docker) - Docker-related configurations
- [`docs`](https://github.com/quack-ai/contribution-api/blob/main/docs) - Everything related to documentation
- [`scripts`](https://github.com/quack-ai/contribution-api/blob/main/scripts) - Custom scripts

Expand Down
20 changes: 10 additions & 10 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -16,35 +16,35 @@ lock:
# Build the docker
build:
poetry export -f requirements.txt --without-hashes --output requirements.txt
docker build . -t quackai/contribution-api:python3.9-alpine3.14
docker build -f docker/Dockerfile . -t quackai/contribution-api:latest

# Run the docker
run:
poetry export -f requirements.txt --without-hashes --output requirements.txt
docker compose up -d --build
docker compose -f docker/docker-compose.yml up -d --build

# Run the docker
stop:
docker compose down
docker compose -f docker/docker-compose.yml down

run-dev:
poetry export -f requirements.txt --without-hashes --with test --output requirements.txt
docker compose -f docker-compose.test.yml up -d --build
docker compose -f docker/docker-compose.test.yml up -d --build

stop-dev:
docker compose -f docker-compose.test.yml down
docker compose -f docker/docker-compose.test.yml down

# Run tests for the library
test:
poetry export -f requirements.txt --without-hashes --with test --output requirements.txt
docker compose -f docker-compose.test.yml up -d --build
docker compose exec -T backend pytest --cov=app
docker compose -f docker-compose.test.yml down
docker compose -f docker/docker-compose.test.yml up -d --build
docker compose -f docker/docker-compose.test.yml exec -T backend pytest --cov=app
docker compose -f docker/docker-compose.test.yml down

# Run tests for the library
e2e:
poetry export -f requirements.txt --without-hashes --output requirements.txt
docker compose -f docker-compose.test.yml up -d --build
docker compose -f docker/docker-compose.test.yml up -d --build
sleep 5
python scripts/test_e2e.py
docker compose -f docker-compose.test.yml down
docker compose -f docker/docker-compose.test.yml down
10 changes: 5 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,11 @@ The project was designed so that everything runs with Docker orchestration (stan
### Configuration

In order to run the project, you will need to specific some information, which can be done using a `.env` file.
Copy the default environement variables from [`.env.example`](./docker/.env.example):
```shell
cp docker/.env.example .env
```

This file will have to hold the following information:
- `POSTGRES_DB`*: a name for the [PostgreSQL](https://www.postgresql.org/) database that will be created
- `POSTGRES_USER`*: a login for the PostgreSQL database
Expand All @@ -133,12 +138,7 @@ Optionally, the following information can be added:
- `SLACK_CHANNEL`: the Slack channel where your bot will post events (defaults to `#general`, you have to invite the App to your channel).
- `SUPPORT_EMAIL`: the email used for support of your API.
- `DEBUG`: if set to false, silence debug logs.
- `OPENAI_API_KEY`**: your API key for Open AI (Create new secret key on [OpenAI](https://platform.openai.com/api-keys))

_** marks the deprecated values._

So your `.env` file should look like something similar to [`.env.example`](.env.example)
The file should be placed in the folder of your `./docker-compose.yml`.

## Contributing

Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
20 changes: 9 additions & 11 deletions docker-compose.yml → docker/docker-compose.prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,8 @@ version: '3.7'

services:
backend:
build: .
image: quackai/contribution-api:latest
command: uvicorn app.main:app --reload --host 0.0.0.0 --port 8050 --proxy-headers
volumes:
- ./src/:/app/
ports:
- "8050:8050"
environment:
Expand All @@ -19,7 +17,7 @@ services:
- GH_OAUTH_ID=${GH_OAUTH_ID}
- GH_OAUTH_SECRET=${GH_OAUTH_SECRET}
- SUPPORT_EMAIL=${SUPPORT_EMAIL}
- DEBUG=true
- DEBUG=false
depends_on:
db:
condition: service_healthy
Expand All @@ -38,13 +36,13 @@ services:
interval: 5s
timeout: 1m
retries: 3
# deploy:
# resources:
# reservations:
# devices:
# - driver: nvidia
# count: 1
# capabilities: [gpu]
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [gpu]

db:
image: postgres:15-alpine
Expand Down
6 changes: 4 additions & 2 deletions docker-compose.test.yml → docker/docker-compose.test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,12 @@ version: '3.7'

services:
backend:
build: .
build:
context: ..
dockerfile: ./docker/Dockerfile
command: uvicorn app.main:app --host 0.0.0.0 --port 8050
volumes:
- ./src/:/app/
- ../src/:/app/
ports:
- "8050:8050"
environment:
Expand Down
69 changes: 69 additions & 0 deletions docker/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
version: '3.7'

services:
backend:
build:
context: ..
dockerfile: ./docker/Dockerfile
command: uvicorn app.main:app --reload --host 0.0.0.0 --port 8050 --proxy-headers
volumes:
- ../src/:/app/
ports:
- "8050:8050"
environment:
- POSTGRES_URL=postgresql+asyncpg://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db/${POSTGRES_DB}
- OLLAMA_ENDPOINT=http://ollama:11434
- OLLAMA_MODEL=${OLLAMA_MODEL}
- SECRET_KEY=${SECRET_KEY}
- SUPERADMIN_GH_PAT=${SUPERADMIN_GH_PAT}
- SUPERADMIN_LOGIN=${SUPERADMIN_LOGIN}
- SUPERADMIN_PWD=${SUPERADMIN_PWD}
- GH_OAUTH_ID=${GH_OAUTH_ID}
- GH_OAUTH_SECRET=${GH_OAUTH_SECRET}
- SUPPORT_EMAIL=${SUPPORT_EMAIL}
- DEBUG=true
depends_on:
db:
condition: service_healthy
ollama:
condition: service_healthy

ollama:
image: ollama/ollama:0.1.29
command: serve
volumes:
- "$HOME/.ollama:/root/.ollama"
expose:
- 11434
healthcheck:
test: ["CMD-SHELL", "ollama pull '${OLLAMA_MODEL}'"]
interval: 5s
timeout: 1m
retries: 3
# deploy:
# resources:
# reservations:
# devices:
# - driver: nvidia
# count: 1
# capabilities: [gpu]

db:
image: postgres:15-alpine
volumes:
- postgres_data:/var/lib/postgresql/data/
expose:
- 5432
environment:
- POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
- POSTGRES_DB=${POSTGRES_DB}
healthcheck:
test: ["CMD-SHELL", "sh -c 'pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}'"]
interval: 10s
timeout: 3s
retries: 3

volumes:
postgres_data:
ollama:
Loading