Skip to content

Commit

Permalink
fix all files using pre-commit (#1170)
Browse files Browse the repository at this point in the history
* fix all files using  pre-commit

* fix Sphinx docstring format

* remove forgotten :param word

* update node lts version references

* disable autoflake unused import when override dev

* fix Sphinx docstrings format

---------
  • Loading branch information
PanosParalakis authored Mar 5, 2024
1 parent 223e146 commit 05c978e
Show file tree
Hide file tree
Showing 49 changed files with 344 additions and 256 deletions.
1 change: 1 addition & 0 deletions .nvmrc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
20.11
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Production static resource building
# Production static resource building
# Currently moved to makefile for development

# FROM node:lts-buster-slim AS jsdeps
Expand All @@ -20,7 +20,7 @@ ARG ENV="prod"
ENV ENV="${ENV}" \
PYTHONUNBUFFERED=1 \
PYTHONDONTWRITEBYTECODE=1\
PATH="${PATH}:/home/tamato/.local/bin"
PATH="${PATH}:/home/tamato/.local/bin"

# don't run as root
RUN groupadd -g 1000 tamato && \
Expand Down
44 changes: 22 additions & 22 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ DB_DUMP?=${PROJECT}_db.sql
DB_NAME?=${PROJECT}
DB_USER?=postgres
DATE=$(shell date '+%Y_%m_%d')
TEMPLATE_NAME?="${DB_NAME}_${DATE}"
TEMPLATE_NAME?="${DB_NAME}_${DATE}"

-include .env
export
Expand All @@ -20,7 +20,7 @@ SPHINXOPTS ?=
.PHONY: help clean clean-bytecode clean-static collectstatic compilescss dependencies \
docker-clean docker-deep-clean docker-down docker-up-db docker-down docker-image \
docker-db-dump docker-test node_modules run test test-fast docker-makemigrations \
docker-checkmigrations docker-migrate build-docs docker-restore-db docker-import-new-db
docker-checkmigrations docker-migrate build-docs docker-restore-db docker-import-new-db



Expand Down Expand Up @@ -118,7 +118,7 @@ docker-test-fast:
@echo
@echo "> Running tests in docker..."
@${COMPOSE_LOCAL} ${DOCKER_RUN} \
${PROJECT} ${PYTHON} -m pytest -x -n=auto --dist=loadfile
${PROJECT} ${PYTHON} -m pytest -x -n=auto --dist=loadfile

## clean-docs: Clean the generated documentation files
clean-docs:
Expand All @@ -134,15 +134,15 @@ build-docs html:
## docker-clean: clean unused images and volumes
docker-clean:
@echo
@echo "> Cleaning unused images in docker..."
@echo "> Cleaning unused images in docker..."
@${DOCKER} image prune -a -f
@echo "> Cleaning unused volumes in docker..."
@${DOCKER} volume prune -f
@echo "> Cleaning unused volumes in docker..."
@${DOCKER} volume prune -f

## docker-deep-clean: deep clean all unused systems (containers, networks, images, cache)
docker-deep-clean:
@echo
@echo "> Cleaning unused systems in docker..."
@echo "> Cleaning unused systems in docker..."
@${DOCKER} system prune -a

## docker-down: shut down services in Docker
Expand All @@ -154,38 +154,38 @@ docker-down:
## docker-up-db: shut down services in Docker
docker-up-db:
@echo
@echo "> Running db in docker..."
@echo "> Running db in docker..."
@${COMPOSE_LOCAL} up -d db
@echo
@echo
@echo "Waiting for database \"ready for connections\""
@sleep 15;
@sleep 15;
@echo "Database Ready for connections!"

## docker-import-new-db: Import new db DB_DUMP into new TEMPLATE_NAME in Docker container db must be running
## docker-import-new-db: Import new db DB_DUMP into new TEMPLATE_NAME in Docker container db must be running
docker-import-new-db: docker-up-db
@${COMPOSE_LOCAL} exec -u ${DB_USER} db psql -c "DROP DATABASE ${TEMPLATE_NAME}" || true
@${COMPOSE_LOCAL} exec -u ${DB_USER} db psql -c "CREATE DATABASE ${TEMPLATE_NAME} TEMPLATE template0"
@${COMPOSE_LOCAL} exec -u ${DB_USER} db psql -c "DROP DATABASE ${TEMPLATE_NAME}" || true
@${COMPOSE_LOCAL} exec -u ${DB_USER} db psql -c "CREATE DATABASE ${TEMPLATE_NAME} TEMPLATE template0"
@echo "> Running db dump: ${DB_DUMP} in docker..."
@cat ${DB_DUMP} | ${COMPOSE_LOCAL} exec -T db psql -U ${DB_USER} -d ${TEMPLATE_NAME}
@sleep 5;
@cat ${DB_DUMP} | ${COMPOSE_LOCAL} exec -T db psql -U ${DB_USER} -d ${TEMPLATE_NAME}
@sleep 5;

## docker-restore-db: Resotre db in Docker container set DB_NAME to rename db must be running
## docker-restore-db: Resotre db in Docker container set DB_NAME to rename db must be running
docker-restore-db: docker-down docker-up-db
@${COMPOSE_LOCAL} exec -u ${DB_USER} db psql -c "DROP DATABASE ${DB_NAME}" || true
@${COMPOSE_LOCAL} exec -u ${DB_USER} db psql -c "CREATE DATABASE ${DB_NAME} TEMPLATE ${TEMPLATE_NAME}"
@${COMPOSE_LOCAL} exec -u ${DB_USER} db psql -c "DROP DATABASE ${DB_NAME}" || true
@${COMPOSE_LOCAL} exec -u ${DB_USER} db psql -c "CREATE DATABASE ${DB_NAME} TEMPLATE ${TEMPLATE_NAME}"
@sleep 5;

## docker-db-dump: Run db dump to import data into Docker
docker-db-dump: docker-up-db
@echo "> Running db dump in docker..."
@cat ${DB_DUMP} | ${COMPOSE_LOCAL} exec -T db psql -U ${DB_USER} -d ${DB_NAME}
@cat ${DB_DUMP} | ${COMPOSE_LOCAL} exec -T db psql -U ${DB_USER} -d ${DB_NAME}

## docker-first-use: Run application for first time in Docker
## docker-first-use: Run application for first time in Docker
docker-first-use: docker-down docker-clean node_modules compilescss docker-build docker-import-new-db \
docker-restore-db docker-migrate docker-superuser docker-up
docker-restore-db docker-migrate docker-superuser docker-up

## docker-makemigrations: Run django makemigrations in Docker
docker-makemigrations:
docker-makemigrations:
@echo
@echo "> Running makemigrations in docker..."
@${COMPOSE_LOCAL} ${DOCKER_RUN} \
Expand Down
36 changes: 18 additions & 18 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ Prerequisites
The following dependencies are required to run this app:

- Python_ 3.8.x
- Node.js_ 16.8.x
- Node.js_ 20.11.* (LTS) Iron
- PostgreSQL_ 12
- Redis_ 5.x

Expand Down Expand Up @@ -71,10 +71,10 @@ Installing
$ npm install
$ npm run build
Those using Mac m1 laptops may have problems installing certain packages (e.g.
psycopg2 and lxml) via requirements-dev.txt. In this scenario you should run the
following from a rosetta terminal (see `this article
<https://www.courier.com/blog/tips-and-tricks-to-setup-your-apple-m1-for-development/>`_ ),
Those using Mac m1 laptops may have problems installing certain packages (e.g.
psycopg2 and lxml) via requirements-dev.txt. In this scenario you should run the
following from a rosetta terminal (see `this article
<https://www.courier.com/blog/tips-and-tricks-to-setup-your-apple-m1-for-development/>`_ ),
substituting your own python version as appropriate:

.. code:: sh
Expand All @@ -85,7 +85,7 @@ substituting your own python version as appropriate:
$ export LDFLAGS="-L/opt/homebrew/opt/[email protected]/lib -L${HOME}/.pyenv/versions/3.8.10/lib"
$ arch -arm64 pip install psycopg2 --no-binary :all:
Credit due to armenzg and his `answer here
Credit due to armenzg and his `answer here
<https://github.com/psycopg/psycopg2/issues/1286#issuecomment-914286206>`_ .

Running
Expand Down Expand Up @@ -248,7 +248,7 @@ Build and Run for the first time:
# to overwrite default db dump name pass in DB_DUMP=tamato_db.sql
$ make docker-first-use
# take a tea break to import the db dump then
# enter super user details when prompted
# enter super user details when prompted
# and visit localhost:8000/ when the containers are up
Run the tamato app every other time:
Expand All @@ -274,7 +274,7 @@ Import from a dump of the database:
# can override the name of the template at TEMPLATE_NAME
$ make docker-import-new-db
# Will restore the db DB_NAME with the provided TEMPLATE_NAME
# Will restore the db DB_NAME with the provided TEMPLATE_NAME
$ make docker-restore-db
Sometimes docker gets clogged up and we need to clean it:
Expand Down Expand Up @@ -332,13 +332,13 @@ loading environment settings that are specific to this configuration:
services:
celery:
env_file:
env_file:
- .env
- settings/envs/docker.env
- settings/envs/docker.override.env
rule-check-celery:
env_file:
env_file:
- .env
- settings/envs/docker.env
- settings/envs/docker.override.env
Expand Down Expand Up @@ -424,7 +424,7 @@ Open another terminal and start a Celery worker:
.. code:: sh
celery -A common.celery worker --loglevel=info -Q standard,rule-check
# The celery worker can be run as two workers for each queue
# The celery worker can be run as two workers for each queue
celery -A common.celery worker --loglevel=info -Q standard
celery -A common.celery worker --loglevel=info -Q rule-check
Expand Down Expand Up @@ -467,19 +467,19 @@ Output defaults to stdout if filename is ``-`` or is not supplied.
Mocking s3 upload with minio
~~~~~~~~~~~~~~~~~~~~~~~~~~~~

1. Follow `instructions <https://min.io/docs/minio/macos/index.html>`_ to install minio server
1. Follow `instructions <https://min.io/docs/minio/macos/index.html>`_ to install minio server
2. Export MINIO_ROOT_USER and MINIO_ROOT_PASSWORD variables of your choice
3. Run server with:
3. Run server with:

.. code:: sh
minio server --quiet --address 0.0.0.0:9003 ~/data
4. Navigate to http://localhost:9003/ and login using root user and password credentials just
4. Navigate to http://localhost:9003/ and login using root user and password credentials just
created. Create a bucket and an access key via the console.
5. Export environment variables for any storages you wish to dummy (e.g. for sqlite dump export
this will be SQLITE_STORAGE_BUCKET_NAME, SQLITE_S3_ACCESS_KEY_ID, SQLITE_S3_SECRET_ACCESS_KEY,
SQLITE_S3_ENDPOINT_URL, and SQLITE_STORAGE_DIRECTORY), setting s3 endpoint url to
SQLITE_S3_ENDPOINT_URL, and SQLITE_STORAGE_DIRECTORY), setting s3 endpoint url to
http://localhost:9003/
6. Alternatively, export all environment variables temporarily to an environment such as Bash
(useful when running a local development instance of a Celery worker):
Expand All @@ -494,15 +494,15 @@ Virus Scan and running locally
We use a shared service accross the department for virus scanning to run locally set up the following:
1. Follow set up `instructions <https://github.com/uktrade/dit-clamav-rest>`_ and run it
2. set SKIP_CLAM_AV_FILE_UPLOAD to False and CLAM_USE_HTTP True
3. add CLAM_AV_DOMAIN without http(s)://
3. add CLAM_AV_DOMAIN without http(s)://
4. set CLAM_AV_USERNAME,CLAM_AV_PASSWORD as the username and password found in the config.py in the dit-clamav-rest project


Application maintenance mode
----------------------------

The application can be put into a "maintenance mode" type of operation. By doing
so, all user web access is routed to a maintenance view and the default database
so, all user web access is routed to a maintenance view and the default database
route removes the application's access to the database. This prevents
inadvertent changes by users, via the application UI, to application data while
in maintenance mode. Note, however, that this would not restrict other forms of
Expand Down
2 changes: 1 addition & 1 deletion checks/tests/test_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def check(request):
assert num_completed >= num_successful

check = factories.TransactionCheckFactory.create(
**{trait: True for trait in traits}
**{trait: True for trait in traits},
)
check_names = [str(i) for i in range(num_checks)]
completes = repeat(True, num_completed)
Expand Down
4 changes: 2 additions & 2 deletions commodities/import_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def get_absorbed_into_goods_nomenclature_link(self, model, kwargs):
):
previous = (
models.GoodsNomenclatureSuccessor.objects.filter(
**{key: self.data[key] for key in self.identifying_fields}
**{key: self.data[key] for key in self.identifying_fields},
)
.latest_approved()
.get()
Expand Down Expand Up @@ -144,7 +144,7 @@ def create_missing_goods_nomenclature_description_period(
goods_nomenclature_description_handler,
):
"""
in some circumstances, we will receive an EU update that will reference
In some circumstances, we will receive an EU update that will reference
a historic description period, and since TAP does not track SIDs
currently for this data we cant resolve the reference.
Expand Down
9 changes: 7 additions & 2 deletions commodities/models/dc.py
Original file line number Diff line number Diff line change
Expand Up @@ -911,7 +911,10 @@ def to_transaction(self, workbasket: WorkBasket) -> TrackedModel:

with workbasket.new_transaction(order=order) as transaction:
return self.obj.new_version(
workbasket, transaction, update_type=self.update_type, **attrs
workbasket,
transaction,
update_type=self.update_type,
**attrs,
)

def _get_preemptive_transaction_order(self, workbasket: WorkBasket) -> int:
Expand Down Expand Up @@ -1693,7 +1696,9 @@ def __init__(self, **kwargs):


def get_tracked_model_reflection(
obj: TrackedModel, transaction: Transaction = None, **overrides
obj: TrackedModel,
transaction: Transaction = None,
**overrides,
):
"""
Returns a reflection of a TrackedModel object.
Expand Down
12 changes: 9 additions & 3 deletions commodities/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,9 @@ def create_collection(


def create_record(
transaction_pool: Iterator[Transaction], factory, **kwargs
transaction_pool: Iterator[Transaction],
factory,
**kwargs,
) -> TrackedModel:
"""
Returns a new TrackedModel instance.
Expand All @@ -179,7 +181,9 @@ def create_record(


def create_dependent_measure(
commodity: Commodity, transaction_pool: Iterator[Transaction], **kwargs
commodity: Commodity,
transaction_pool: Iterator[Transaction],
**kwargs,
) -> Measure:
"""Returns a new measure linked to a given good."""
factory = factories.MeasureFactory
Expand All @@ -190,7 +194,9 @@ def create_dependent_measure(


def create_footnote_association(
commodity: Commodity, transaction_pool: Iterator[Transaction], **kwargs
commodity: Commodity,
transaction_pool: Iterator[Transaction],
**kwargs,
) -> FootnoteAssociationGoodsNomenclature:
"""Returns a new footnote association linked to a given good."""
factory = factories.FootnoteAssociationGoodsNomenclatureFactory
Expand Down
23 changes: 15 additions & 8 deletions common/business_rules.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,8 @@ def get_linked_models(
:param model TrackedModel: Get models linked to this model instance
:param transaction Transaction: Get latest approved versions of linked
models as of this transaction
:rtype Iterator[TrackedModel]: The linked models
:return: The linked models
:rtype: Iterator[TrackedModel]
"""
for field, related_model in get_relations(type(model)).items():
business_rules = getattr(related_model, "business_rules", [])
Expand Down Expand Up @@ -169,9 +170,12 @@ def violation(
"""
Create a violation exception object.
:param model Optional[TrackedModel]: The model that violates this business rule
:param message Optional[str]: A message explaining the violation
:rtype BusinessRuleViolation: An exception indicating a business rule violation
:param model: The model that violates this business rule
:type model: Optional[TrackedModel]
:param message: A message explaining the violation
:type message: Optional[str]
:rtype BusinessRuleViolation: An exception indicating a business rule
violation
"""

return getattr(self.__class__, "Violation", BusinessRuleViolation)(
Expand All @@ -185,7 +189,7 @@ def only_applicable_after(cutoff: Union[date, datetime, str]):
Decorate BusinessRules to make them only applicable after a given date.
:param cutoff Union[date, datetime, str]: The date, datetime or isoformat
date string of the time before which the rule should not apply
date string of the time before which the rule should not apply
"""

if isinstance(cutoff, str):
Expand Down Expand Up @@ -226,8 +230,10 @@ def skip_when_update_type(cls: Type[BusinessRule], update_types: Iterable[Update
"""
Skip business rule validation for given update types.
:param cls Type[BusinessRule]: The BusinessRule to decorate
:param update_types Iterable[int]: The UpdateTypes to skip
:param cls: The BusinessRule to decorate
:type cls: Type[BusinessRule]
:param update_types: The UpdateTypes to skip
:type update_types: Iterable[int]
"""
_original_validate = cls.validate

Expand Down Expand Up @@ -336,7 +342,8 @@ def validate(self, model):
Check whether the specified model violates this business rule.
:param model TrackedModel: The model to check
:raises BusinessRuleViolation: Raised if the passed model violates this business rule.
:raises BusinessRuleViolation: Raised if the passed model violates this
business rule.
"""
if self.has_violation(model):
raise self.violation(model)
Expand Down
5 changes: 4 additions & 1 deletion common/fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,10 @@ class TaricDateRangeField(DateRangeField):
range_type = TaricDateRange

def from_db_value(
self, value: Union[DateRange, TaricDateRange], *_args, **_kwargs
self,
value: Union[DateRange, TaricDateRange],
*_args,
**_kwargs,
) -> TaricDateRange:
"""
By default Django ignores the range_type and just returns a Psycopg2
Expand Down
Loading

0 comments on commit 05c978e

Please sign in to comment.