Skip to content

Commit

Permalink
Merge pull request #159 from deNBI/staging
Browse files Browse the repository at this point in the history
Staging
  • Loading branch information
eKatchko authored Jun 9, 2022
2 parents d95cfe7 + fa8c2cc commit aec6d3c
Show file tree
Hide file tree
Showing 62 changed files with 2,188 additions and 6,105 deletions.
34 changes: 11 additions & 23 deletions .default.env
Original file line number Diff line number Diff line change
@@ -1,25 +1,13 @@
# Your Login to Perun
OS_CREDITS_PERUN_LOGIN=
# Corresponding Password
OS_CREDITS_PERUN_PASSWORD=
# ID of your virtual organization
OS_CREDITS_PERUN_VO_ID=
# how to connect to the InfluxDB storing the prometheus data
INFLUXDB_HOST=portal_influxdb
INFLUXDB_USER=prometheus
INFLUXDB_USER_PASSWORD=secret
INFLUXDB_DB=portal_prometheus
#TimescaleDB
POSTGRES_DB=credits_db
POSTGRES_PORT=5432
POSTGRES_USER=postgres
POSTGRES_PASSWORD=password
POSTGRES_HOST=timescaledb

API_KEY="change-me-but-not-to-123"
#API key to secure some endpoints
API_KEY=super-secret

# Semicolon-separated list of project/group names.
# If set only those will be billed
# OS_CREDITS_PROJECT_WHITELIST
# How many worker tasks will process the queue. Default is 10
# OS_CREDITS_WORKERS=10
# Float precision of `credits_current` inside Perun
# OS_CREDITS_PRECISION=2
# Cost of running one vCPU core for one hour
# VCPU_CREDIT_PER_HOUR=1
# Cost of running one GB of RAM for one hour
# RAM_CREDIT_PER_HOUR=0.3
METRICS_TO_BILL={"project_vcpu_usage": 1.0, "project_mb_usage": 0.3}

ENDPOINTS_ONLY=True
2 changes: 1 addition & 1 deletion .editorconfig
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ max_line_length = 80
charset = utf-8
indent_style = space
indent_size = 4
max_line_length = 88
max_line_length = 140

# 2 space indentation
[*.{html,css,less,scss,yml,json}]
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build-image.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ jobs:
uses: rokroskar/[email protected]
env:
GITHUB_TOKEN: "${{ secrets.GITHUBSECRET2 }}"
- uses: actions/checkout@v2.4.0
- uses: actions/checkout@v3.0.2
- name: Build with retry
uses: Wandalen/[email protected]
with:
Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/codeql-analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,11 @@ jobs:

steps:
- name: Checkout repository
uses: actions/checkout@v2
uses: actions/checkout@v3

# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
queries: +security-extended, security-and-quality
Expand All @@ -51,7 +51,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v1
uses: github/codeql-action/autobuild@v2

# ℹ️ Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
Expand All @@ -65,4 +65,4 @@ jobs:
# make release

- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1
uses: github/codeql-action/analyze@v2
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -117,3 +117,5 @@ dmypy.json
# project specific settings
.vim/
.nvimrc

db_data
11 changes: 6 additions & 5 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,23 +1,24 @@
FROM python:3.10.1-alpine as builder
FROM python:3.10.4-alpine as builder

ADD src /code/src
ADD pyproject.toml poetry.lock /code/
WORKDIR /code
RUN apk add gcc musl-dev python3-dev libffi-dev openssl-dev cargo
RUN apk -U upgrade && apk --no-cache add gcc wget linux-headers musl-dev libffi-dev libressl-dev cargo build-base libpq-dev
RUN pip install cryptography
RUN pip install poetry && poetry build -f wheel

# by using a build container we prevent us from carrying around poetry
# alongside its dependencies
FROM python:3.10.1-alpine
FROM python:3.10.4-alpine
ARG OS_CREDITS_VERSION
ARG WHEEL_NAME=os_credits-1.2.0-py3-none-any.whl
ARG WHEEL_NAME=os_credits-2.0.0-py3-none-any.whl
EXPOSE 80
ENV CREDITS_PORT 80
ENV CREDITS_HOST 0.0.0.0
COPY --from=builder /code/dist/$WHEEL_NAME /tmp/

# wget to perform healthcheck against /ping endpoint
RUN apk update && apk --no-cache add gcc wget linux-headers musl-dev \
RUN apk update && apk --no-cache add gcc wget linux-headers musl-dev libpq-dev \
&& pip install --no-cache /tmp/$WHEEL_NAME \
&& rm /tmp/$WHEEL_NAME

Expand Down
6 changes: 3 additions & 3 deletions Dockerfile.dev
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
# Expected to be called with `--volume $(PWD)/src:/code/src:ro`
# Uses `adev` from `aiohttp-devtools` to restart the application on any code change
FROM python:3.10.1-alpine
FROM python:3.10.4-alpine

ADD src /code/src
ADD pyproject.toml poetry.lock /code/
WORKDIR /code
RUN apk -U upgrade && apk --no-cache add gcc wget linux-headers musl-dev libffi-dev libressl-dev cargo
RUN apk -U upgrade && apk --no-cache add gcc wget linux-headers musl-dev libffi-dev libressl-dev cargo build-base libpq-dev
RUN pip install cryptography
RUN pip install poetry
# install to system
RUN poetry config virtualenvs.create false
# we do not need any development packages except aiohttp-devtools to
# automatically restart the app once we changed the bind-mounted source code
RUN cp /usr/local/lib/python3.9/site-packages/certifi/cacert.pem /cacert.pem
RUN cp /usr/local/lib/python3.10/site-packages/certifi/cacert.pem /cacert.pem
RUN export REQUESTS_CA_BUNDLE=/cacert.pem && poetry install --no-dev && pip install --no-cache aiohttp-devtools

EXPOSE 80
Expand Down
69 changes: 8 additions & 61 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -28,75 +28,22 @@ clean-build: ## Remove any python build artifacts

.PHONY: docker-build
docker-build: ## Call bin/build_docker.py with $DOCKER_USERNAME[$USER] and $DOCKER_IMAGENAME[os_credits]
find . -type d -name '__pycache__' -prune -exec rm -rf {} \;
find src -type d -name '__pycache__' -prune -exec rm -rf {} \;
poetry run bin/build_docker.py -u $(DOCKER_USERNAME) -i $(DOCKER_IMAGENAME)

.PHONY: docker-build-dev
docker-build-dev: ## Build Dockerfile.dev with name 'os_credits-dev'
find . -type d -name '__pycache__' -prune -exec rm -rf {} \;
docker build -f Dockerfile.dev -t os_credits-dev .

.PHONY: build-run-dev
build-run-dev: ## Build Dockerfile.dev with name 'os_credits-dev' and start docker-compose
find . -type d -name '__pycache__' -prune -exec rm -rf {} \;
find src -type d -name '__pycache__' -prune -exec rm -rf {} \;
docker build -f Dockerfile.dev -t os_credits-dev .
docker-compose up -d
docker stop os_credits_credits_1
docker start os_credits_credits_1

.PHONY: docker-run-dev
docker-run-dev: ## Run 'os_credits-dev' inside 'docker-compose.yml' attached - os_credits-dev:80 -> localhost:8000
poetry run docker-compose up
.PHONY: up-dev
up-dev: ## Build Dockerfile.dev with name 'os_credits-dev' and docker-compose up --detach
docker-compose up --detach

.PHONY: docker-project_usage-dev
docker-project_usage-dev: ## Run 'os_credits-dev' and integrate it into the 'dev' profile of 'project_usage'
docker stop portal_credits || true
docker rm portal_credits || true
docker run \
--publish=8002:80 \
--name portal_credits \
--network project_usage_portal \
--volume $(PWD)/src:/code/src:ro \
--env-file .env \
--env MAIL_NOT_STARTTLS=1 \
--env MAIL_SMTP_SERVER=portal_smtp_server \
--detach \
os_credits-dev:latest
.PHONY: up-dev
down: ## docker-compose down
docker-compose down

.PHONY: docs
docs: ## Build HTML documentation
cd docs && $(MAKE) html

.PHONY: docs-doctest
docs-doctest: ## Run doctests inside documentation
cd docs && $(MAKE) doctest

.PHONY: test
test: ## Start tests/docker-compose.yml, run test suite and stop docker-compose
poetry run docker-compose -f tests/docker-compose.yml up --detach
@echo 'Waiting until InfluxDB is ready'
. tests/test.env && until `curl -o /dev/null -s -I -f "http://$$INFLUXDB_HOST:$$INFLUXDB_PORT/ping"`; \
do printf '.'; \
sleep 1; \
done
poetry run pytest --color=yes tests src || true
poetry run docker-compose -f tests/docker-compose.yml down --volumes --remove-orphans

.PHONY: test-online
test-online: ## Same as `test` but does also run tests against Perun
env TEST_ONLINE=1 $(MAKE) test

.PHONY: test-online-only
test-online-only: ## Only run tests against Perun
poetry run env TEST_ONLINE=1 pytest --color=yes --no-cov tests/test_perun.py

.PHONY: mypy
mypy: ## Run `mypy`, a static type checker for python, see 'htmlcov/mypy/index.html'
poetry run mypy src/os_credits --html-report=htmlcov/mypy

.PHONY: setup
setup: ## Setup development environment
@echo 'Requires poetry from - https://poetry.eustace.io/docs/'
poetry install
poetry run pre-commit install -t pre-commit
poetry run pre-commit install -t pre-push
29 changes: 13 additions & 16 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -12,18 +12,17 @@ The service is integrated into the *Portal stack* of the `project_usage project
<https://github.com/deNBI/project_usage>`_, please refer to its wiki for corresponding
setup instructions/required services.

The development has been part of the master thesis **Accounting and Reporting of
OpenStack Cloud instances via Prometheus** which therefore
contains a large introduction to the area of *Cloud Billing* and motivations which lead
to the current design. A development manual can be found inside the
``docs/`` folder of this repository which can be build via ``make docs``.

Development
-----------

The project has been developed with Python 3.7 and uses the `aiohttp
<https://docs.aiohttp.org>`_ framework communication. Its dependencies are managed via
`Poetry <https://pypi.org/project/poetry/>`_.
If you want to develop while using the whole stack, please see `project_usage project
<https://github.com/deNBI/project_usage>`_ for more information.
If you only need some endpoints which do not need the whole stack (e.g. /cost_per_hour),
copy the .default.env to .env and run make up-dev. This will build the container from
Dockerfile.dev. Please note that a named volume will be created: credits_data.

Monitoring/Debugging
~~~~~~~~~~~~~~~~~~~~
Expand All @@ -44,16 +43,14 @@ the image. To modify this values call
``make build-docker DOCKER_USERNAME=<your_username> DOCKER_IMAGENAME=<your_imagename>``.


Stack integration
Additional notes
~~~~~~~~~~~~~~~~~

To run the code use the provided ``Dockerfile.dev`` which you can build
via ``make docker-build-dev``. Afterward use ``make docker-project_usage-dev`` to
integrate the development container into the ``project_usage`` stack.
The development has been part of the master thesis **Accounting and Reporting of
OpenStack Cloud instances via Prometheus** which therefore
contains a large introduction to the area of *Cloud Billing* and motivations which lead
to the current design.

The development container is using the ``adev runserver`` command from
the
```aiohttp-devtools`` <https://github.com/aio-libs/aiohttp-devtools>`__
which will restart your app on any code change. But since the code is
bind mounted inside the container you can simply continue editing and
have it restart on any change.
Update 2022:
The design of this system changed due to exchanging InfluxDB with TimescaleDB
and some unforeseen requirements.
57 changes: 0 additions & 57 deletions config/credits.toml.in

This file was deleted.

23 changes: 23 additions & 0 deletions db_init/init_db.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
#!/bin/bash
set -e

FILE=/docker-entrypoint-initdb.d/credits_db.dump

psql -v ON_ERROR_STOP=1 -U postgres -d credits_db <<-EOSQL
ALTER ROLE postgres SET client_encoding TO 'utf8';
ALTER ROLE postgres SET default_transaction_isolation TO 'read committed';
ALTER ROLE postgres SET timezone TO 'UTC';
GRANT ALL PRIVILEGES ON DATABASE credits_db TO postgres;
CREATE EXTENSION IF NOT EXISTS timescaledb;
EOSQL

if test -f "$FILE"; then
echo "got here"
psql -v ON_ERROR_STOP=1 -U postgres -d credits_db <<-EOSQL
SELECT timescaledb_pre_restore();
EOSQL
psql -U postgres --set ON_ERROR_STOP=on -d credits_db -f /docker-entrypoint-initdb.d/credits_db.dump
psql -v ON_ERROR_STOP=1 -U postgres -d credits_db <<-EOSQL
SELECT timescaledb_post_restore();
EOSQL
fi
Loading

0 comments on commit aec6d3c

Please sign in to comment.