diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 0000000..c0b3ebc --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,28 @@ +name: Unit tests + +on: + push: + branches: [master] + pull_request: + branches: [master] + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + name: Set up Python + with: + python-version: "3.11" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements-dev.txt + - name: Lint with ruff + run: | + ruff . + - name: Test with pytest + run: | + PYTHONPATH=`pwd` pytest --cache-clear --cov=. tests diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0d4dcb4..138a3a2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,12 +1,11 @@ # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: - - repo: https://github.com/psf/black - rev: 20.8b1 + - repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.1.6 hooks: - - id: black - language_version: python3.8 - - repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.3 - hooks: - - id: flake8 + # Run the linter. + - id: ruff + # Run the formatter. + - id: ruff-format diff --git a/Dockerfile b/Dockerfile index a5f8367..01d5b5b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.8.5-slim-buster +FROM python:3.11.4-slim-buster WORKDIR /app diff --git a/Makefile b/Makefile index c2c44b5..259ca02 100644 --- a/Makefile +++ b/Makefile @@ -1,28 +1,18 @@ -.PHONY: doit prepare-env run-tests +.PHONY: lint prepare-env tests # Removes the existing virtualenv, creates a new one, install dependencies. prepare-env: rm -rf .venv - python3.8 -m venv .venv + python3.11 -m venv .venv .venv/bin/pip install -U pip - .venv/bin/pip install -r requirements.txt + .venv/bin/pip install -r requirements-dev.txt -doit: - # Dependencies should be installed from requirements-dev.txt. - # Sorts imports in python files. - docker run -v `pwd`:`pwd` -w `pwd` quay.io/amboss-mededu/pyfmt:0.7 isort . - docker run -v `pwd`:`pwd` -w `pwd` quay.io/amboss-mededu/pyfmt:0.7 black --exclude .venv . - # Linting. - docker run -v `pwd`:`pwd` -w `pwd` quay.io/amboss-mededu/pyfmt:0.7 flake8 --exclude .venv --exclude=.venv --max-line-length=120 . - # Formats python files again after flake8. - docker run -v `pwd`:`pwd` -w `pwd` quay.io/amboss-mededu/pyfmt:0.7 black --exclude .venv . - # Static type checking. - docker run -v `pwd`:`pwd` -w `pwd` quay.io/amboss-mededu/pyfmt:0.7 mypy --ignore-missing-imports quepid_es_proxy +lint: + .venv/bin/ruff format . - -run-tests: - PYTHONPATH=`pwd` .venv/bin/pytest -W ignore tests/units --cov-report xml:cov.xml --cov . +tests: + PYTHONPATH=`pwd` .venv/bin/pytest -vv -W ignore tests/units --cov-report xml:cov.xml --cov . run-server: PROXY_USERNAME="lab_user" \ diff --git a/README.md b/README.md index 6e444c2..1ba8992 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,9 @@ rarely available publicly or can be tunneled to the labeler's computer. ## Run with Docker -The image of the proxy is publicly available on [https://quay.io/amboss-mededu/quepid_es_proxy](https://quay.io/amboss-mededu/quepid_es_proxy). +~~The image of the proxy is publicly available on [https://quay.io/amboss-mededu/quepid_es_proxy](https://quay.io/amboss-mededu/quepid_es_proxy).~~ +Currently the image is not publicly available. Please build it locally. + To run the proxy docker execute ```bash docker run \ @@ -19,7 +21,7 @@ docker run \ -e "ES_USE_SSL=true" \ -e "WEB_CONCURRENCY=2" \ -p 5000:5000 \ -quay.io/amboss-mededu/quepid_es_proxy +quepid_es_proxy ``` The proxy is now available with basic auth now on `http://username_is_here:password_is_here@localhost:5000/`. Use this address in Quepid instead of Elasticsearch. @@ -36,12 +38,12 @@ docker run \ -e "WEB_CONCURRENCY=2" \ -p 5000:5000 \ --network="elasticsearch-docker-network" \ -quay.io/amboss-mededu/quepid_es_proxy +quepid_es_proxy ``` ## Run locally with Python virtual environment. -Proxy uses Python 3.8. +Proxy uses Python 3.11. First prepare a virtual environment `make prepare-env`. The proxy will be available with the default credentials on diff --git a/pyproject.toml b/pyproject.toml index 74b536d..38db817 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.black] include = '\.pyi?$' -target-version = ['py37'] +target-version = ['py311'] exclude = ''' /( \.git @@ -16,4 +16,31 @@ exclude = ''' | blib2to3 | tests/data )/ -''' \ No newline at end of file +''' + +[tool.ruff] +# Docs for Ruff are here https://github.com/charliermarsh/ruff +line-length = 120 + +# Enable Pyflakes `E` and `F` codes. `I` is for isort. +select = ["C", "E", "F", "W", "I001", "SIM", "TCH", "UP", "ERA", "PIE", "RET", "TID"] +ignore = ["C901"] + +exclude = [ + ".direnv", + ".git", + ".mypy_cache", + ".ruff_cache", + ".venv", + "__pypackages__", +] +per-file-ignores = {} + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +target-version = "py311" + +[tool.ruff.isort] +combine-as-imports = true +force-single-line = false \ No newline at end of file diff --git a/quepid_es_proxy/elasticsearch/executor.py b/quepid_es_proxy/elasticsearch/executor.py index 9431c0c..bfb0130 100644 --- a/quepid_es_proxy/elasticsearch/executor.py +++ b/quepid_es_proxy/elasticsearch/executor.py @@ -1,5 +1,3 @@ -from typing import List, Optional - from .connection import get_connection @@ -8,9 +6,9 @@ async def search( from_: int, size: int, explain: bool = False, - source: Optional[List[str]] = None, - query: Optional[str] = None, - q: Optional[str] = None, + source: list[str] | None = None, + query: str | None = None, + q: str | None = None, ): conn = await get_connection() payload = { diff --git a/quepid_es_proxy/main.py b/quepid_es_proxy/main.py index 62a523f..42ecdad 100644 --- a/quepid_es_proxy/main.py +++ b/quepid_es_proxy/main.py @@ -1,8 +1,6 @@ -from typing import List, Optional, Union - from fastapi import Depends, FastAPI from fastapi.middleware.cors import CORSMiddleware -from pydantic import BaseModel +from pydantic import BaseModel, Field from .auth import basic_auth from .elasticsearch import executor @@ -10,7 +8,7 @@ app = FastAPI() # Replace "*" to the list of your origins, e.g. -# origins = ["quepid.yourcompany.com", "localhost:8080"] +# origins = ["quepid.yourcompany.com", "localhost:8080"] # noqa: ERA001 origins = "*" app.add_middleware( @@ -30,20 +28,15 @@ async def root(): class ProxyRequst(BaseModel): explain: bool - from_: int + from_: int = Field(..., alias="from") size: int - source: Union[str, List[str], None] - query: Optional[dict] - - class Config: - fields = {"from_": "from", "source": "_source"} + source: str | list[str] | None = Field(None, alias="_source") + query: dict | None @app.post("/{index_name}") -async def search_proxy( - index_name: str, body: ProxyRequst, username: str = Depends(basic_auth) -) -> dict: - result = await executor.search( +async def search_proxy(index_name: str, body: ProxyRequst, username: str = Depends(basic_auth)) -> dict: + return await executor.search( index_name, body.from_, body.size, @@ -52,7 +45,6 @@ async def search_proxy( {"query": body.query} if body.query else None, None, ) - return result @app.get("/{index_name}") @@ -63,7 +55,7 @@ async def explain_missing_documents( size: int, username: str = Depends(basic_auth), ) -> dict: - result = await executor.search( + return await executor.search( index_name, 0, size, @@ -72,7 +64,6 @@ async def explain_missing_documents( None, q, ) - return result @app.post("/{index_name}/_doc/{doc_id}/_explain") diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..98ebe2f --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,8 @@ +-r requirements.txt +pytest==7.4.3 +pytest-cov==4.1.0 +pytest-aiohttp==1.0.5 +pytest-asyncio==0.21.1 +pre-commit==3.5.0 +coverage==7.3.2 +ruff==0.1.6 diff --git a/requirements.txt b/requirements.txt index e2f8204..07b59ec 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,9 +1,3 @@ -fastapi==0.65.2 -uvicorn==0.11.8 -asynctest==0.13.0 -pytest==6.0.2 -pytest-cov==2.10.1 -pytest-aiohttp==0.3.0 -coverage==5.3 +fastapi==0.104.1 +uvicorn==0.24.0.post1 elasticsearch==7.9.1 -pre-commit==2.7.1 \ No newline at end of file diff --git a/tests/units/test_main.py b/tests/units/test_main.py index d1fb693..bf48926 100644 --- a/tests/units/test_main.py +++ b/tests/units/test_main.py @@ -1,23 +1,20 @@ +from unittest.mock import patch + import pytest -from asynctest import patch +@pytest.mark.asyncio @pytest.mark.parametrize( "query,expected_query", [(None, None), ({"day": "Friday"}, {"query": {"day": "Friday"}})], ) async def test_search_proxy(environment, query, expected_query): - from quepid_es_proxy import main - with patch.object( - main.executor, "search", return_value={"test": "passed"} - ) as search_mock: + with patch.object(main.executor, "search", return_value={"test": "passed"}) as search_mock: result = await main.search_proxy( "big-index", - body=main.ProxyRequst( - **{"explain": True, "from": 3, "size": 7, "query": query} - ), + body=main.ProxyRequst(**{"explain": True, "from": 3, "size": 7, "query": query}), ) assert result == {"test": "passed"} @@ -32,29 +29,23 @@ async def test_search_proxy(environment, query, expected_query): ) +@pytest.mark.asyncio async def test_explain_missing_documents(): from quepid_es_proxy import main - with patch.object( - main.executor, "search", return_value={"test": "passed"} - ) as search_mock: + with patch.object(main.executor, "search", return_value={"test": "passed"}) as search_mock: result = await main.explain_missing_documents( index_name="index-123", _source="_id,title", q="title:Berlin", size=2 ) assert result == {"test": "passed"} - search_mock.assert_awaited_once_with( - "index-123", 0, 2, False, "_id,title", None, "title:Berlin" - ) + search_mock.assert_awaited_once_with("index-123", 0, 2, False, "_id,title", None, "title:Berlin") +@pytest.mark.asyncio async def test_explain(): from quepid_es_proxy import main - with patch.object( - main.executor, "explain", return_value={"test": "passed again!"} - ) as explain_mock: - result = await main.explain( - index_name="index-123", doc_id="123_321", query={"match": "all"} - ) + with patch.object(main.executor, "explain", return_value={"test": "passed again!"}) as explain_mock: + result = await main.explain(index_name="index-123", doc_id="123_321", query={"match": "all"}) assert result == {"test": "passed again!"} explain_mock.assert_awaited_once_with("index-123", "123_321", {"match": "all"})