From 4ba36539e76c6b8d9bb620e68d899b5e55e6ce86 Mon Sep 17 00:00:00 2001 From: loopsaaage Date: Mon, 9 Sep 2024 11:40:40 +0800 Subject: [PATCH] Add starters and demo --- .editorconfig | 31 ++++ .flake8 | 93 +++++++++++ .gitignore | 144 ++++++++++++++++++ README.CN.md | 30 ++++ README.md | 115 +++++++++++++- .../__pycache__/__init__.cpython-39.pyc | Bin 150 -> 0 bytes .../__pycache__/demo.cpython-39.pyc | Bin 627 -> 0 bytes infra_ai_service/alembic.ini | 104 +++++++++++++ {tests => infra_ai_service/api}/__init__.py | 0 infra_ai_service/api/common/__init__.py | 1 + infra_ai_service/api/example/__init__.py | 0 infra_ai_service/api/example/schemas.py | 19 +++ infra_ai_service/api/example/services.py | 24 +++ infra_ai_service/api/example/views.py | 26 ++++ infra_ai_service/api/router.py | 8 + infra_ai_service/api/system/__init__.py | 0 infra_ai_service/api/system/views.py | 12 ++ infra_ai_service/core/.env-example | 11 ++ infra_ai_service/core/app.py | 26 ++++ infra_ai_service/core/config.py | 65 ++++++++ infra_ai_service/db/__init__.py | 0 infra_ai_service/db/db.py | 18 +++ infra_ai_service/db/migrations/README | 1 + infra_ai_service/db/migrations/env.py | 77 ++++++++++ infra_ai_service/db/migrations/script.py.mako | 25 +++ .../versions/2022-07-17-10-45_b6a9795b9043.py | 53 +++++++ infra_ai_service/db/models/__init__.py | 13 ++ infra_ai_service/db/models/common.py | 32 ++++ infra_ai_service/db/models/example.py | 11 ++ infra_ai_service/demo.py | 16 -- infra_ai_service/server.py | 18 +++ requirements.txt | 17 +++ test-demos/async_demo.py | 15 ++ test-demos/demo.py | 54 +++++++ tests/__pycache__/__init__.cpython-39.pyc | Bin 139 -> 0 bytes .../test_demo.cpython-39-pytest-8.3.2.pyc | Bin 3524 -> 0 bytes tests/conftest.py | 59 +++++++ tests/test_demo.py | 28 ---- tests/test_example.py | 67 ++++++++ tests/test_health.py | 15 ++ 40 files changed, 1183 insertions(+), 45 deletions(-) create mode 100644 .editorconfig create mode 100644 .flake8 create mode 100644 .gitignore create mode 100644 README.CN.md delete mode 100644 infra_ai_service/__pycache__/__init__.cpython-39.pyc delete mode 100644 infra_ai_service/__pycache__/demo.cpython-39.pyc create mode 100644 infra_ai_service/alembic.ini rename {tests => infra_ai_service/api}/__init__.py (100%) create mode 100644 infra_ai_service/api/common/__init__.py create mode 100644 infra_ai_service/api/example/__init__.py create mode 100644 infra_ai_service/api/example/schemas.py create mode 100644 infra_ai_service/api/example/services.py create mode 100644 infra_ai_service/api/example/views.py create mode 100644 infra_ai_service/api/router.py create mode 100644 infra_ai_service/api/system/__init__.py create mode 100644 infra_ai_service/api/system/views.py create mode 100644 infra_ai_service/core/.env-example create mode 100644 infra_ai_service/core/app.py create mode 100644 infra_ai_service/core/config.py create mode 100644 infra_ai_service/db/__init__.py create mode 100644 infra_ai_service/db/db.py create mode 100644 infra_ai_service/db/migrations/README create mode 100644 infra_ai_service/db/migrations/env.py create mode 100644 infra_ai_service/db/migrations/script.py.mako create mode 100644 infra_ai_service/db/migrations/versions/2022-07-17-10-45_b6a9795b9043.py create mode 100644 infra_ai_service/db/models/__init__.py create mode 100644 infra_ai_service/db/models/common.py create mode 100644 infra_ai_service/db/models/example.py delete mode 100644 infra_ai_service/demo.py create mode 100644 infra_ai_service/server.py create mode 100644 requirements.txt create mode 100644 test-demos/async_demo.py create mode 100644 test-demos/demo.py delete mode 100644 tests/__pycache__/__init__.cpython-39.pyc delete mode 100644 tests/__pycache__/test_demo.cpython-39-pytest-8.3.2.pyc create mode 100644 tests/conftest.py delete mode 100644 tests/test_demo.py create mode 100644 tests/test_example.py create mode 100644 tests/test_health.py diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..eb831fe --- /dev/null +++ b/.editorconfig @@ -0,0 +1,31 @@ +root = true + +[*] +tab_width = 4 +end_of_line = lf +max_line_length = 88 +ij_visual_guides = 88 +insert_final_newline = true +trim_trailing_whitespace = true + +[*.{js,py,html}] +charset = utf-8 + +[*.md] +trim_trailing_whitespace = false + +[*.{yml,yaml}] +indent_style = space +indent_size = 2 + +[Makefile] +indent_style = tab + +[.flake8] +indent_style = space +indent_size = 2 + +[*.py] +indent_style = space +indent_size = 4 +ij_python_from_import_parentheses_force_if_multiline = true diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..8b6458b --- /dev/null +++ b/.flake8 @@ -0,0 +1,93 @@ +[flake8] +max-complexity = 6 +inline-quotes = double +max-line-length = 88 +extend-ignore = E203 +docstring_style=sphinx + +ignore = + ; Found `f` string + WPS305, + ; Missing docstring in public module + D100, + ; Missing docstring in magic method + D105, + ; Missing docstring in __init__ + D107, + ; Found `__init__.py` module with logic + WPS412, + ; Found class without a base class + WPS306, + ; Missing docstring in public nested class + D106, + ; First line should be in imperative mood + D401, + ; Found `__init__.py` module with logic + WPS326, + ; Found string constant over-use + WPS226, + ; Found upper-case constant in a class + WPS115, + ; Found nested function + WPS602, + ; Found method without arguments + WPS605, + ; Found overused expression + WPS204, + ; Found too many module members + WPS202, + ; Found too high module cognitive complexity + WPS232, + ; line break before binary operator + W503, + ; Found module with too many imports + WPS201, + ; Inline strong start-string without end-string. + RST210, + ; Found nested class + WPS431, + ; Found wrong module name + WPS100, + ; Found too many methods + WPS214, + ; Found too long ``try`` body + WPS229, + ; Found unpythonic getter or setter + WPS615, + ; Found a line that starts with a dot + WPS348, + ; Found complex default value (for dependency injection) + WPS404, + ; not perform function calls in argument defaults (for dependency injection) + B008, + ; line to long + E501, + +per-file-ignores = + ; all tests + test_*.py,tests.py,tests_*.py,*/tests/*,conftest.py: + ; Use of assert detected + S101, + ; Found outer scope names shadowing + WPS442, + ; Found too many local variables + WPS210, + ; Found magic number + WPS432, + ; Missing parameter(s) in Docstring + DAR101, + + ; all init files + __init__.py: + ; ignore not used imports + F401, + ; ignore import with wildcard + F403, + ; Found wrong metadata variable + WPS410, + +exclude = + ./.git, + ./venv, + migrations, + ./var, diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..9d38591 --- /dev/null +++ b/.gitignore @@ -0,0 +1,144 @@ +### Python template + +.idea/ +.vscode/ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +.python-version diff --git a/README.CN.md b/README.CN.md new file mode 100644 index 0000000..03b0b35 --- /dev/null +++ b/README.CN.md @@ -0,0 +1,30 @@ +# 注意事项 + + +## 前置条件 +- 需要安装本地psql数据库 +```shell +sudo apt update +sudo apt install postgresql postgresql-contrib +sudo systemctl status postgresql +sudo -u postgres psql +ALTER USER postgres PASSWORD 'postgres'; +# 建立数据库(db) +CREATE DATABASE db; +``` + +## 本地启动 +```shell +cd infra_ai_service/ +pip install -r requirements.txt +# 迁移数据 +alembic revision --autogenerate -m "Example model" +alembic upgrade head +pytest . +# 启动 +python infra_ai_service/server.py +``` + + +## 本地访问 +- 浏览器打开 http://localhost:8000/ 显示 {"Hello":"World"} diff --git a/README.md b/README.md index 6ab9a22..06f0640 100644 --- a/README.md +++ b/README.md @@ -1 +1,114 @@ -# infraAIService \ No newline at end of file +# FastAPI Starter Project + +Project includes: + +- `fastapi` +- `sqlmodel` +- `alembic` + +## + +## Models + +Check db/models and migrations, there is one example. + +## Using docker + +Setup env variables in `app/core/.env` using `app/core/.env-example` + +#### Install and run + +```bash +docker-compose up -d web + +# you can track logs with: +docker-compose logs -f --tail=100 web +``` + +Go to: http://localhost:8000/api/docs/ + +#### Migrations + +Create migrations + +```bash +docker-compose exec web alembic revision --autogenerate -m "Example model" +``` + +Apply migrations + +```bash +docker-compose exec web alembic upgrade head +``` + +#### Tests + +Run tests + +```bash +docker-compose exec web pytest . +``` + +## Without docker + +#### Install + +```bash +pip install -r requirements.txt +``` + +Setup env variables in `app/core/.env`. + +#### Run + +```bash +cd infra_ai_service/ +python infra_ai_service/server.py +``` + +Go to: http://localhost:8000/api/docs/ + +#### Migrations + +Create migrations + +```bash +cd infra_ai_service +alembic revision --autogenerate -m "Example model" +``` + +Apply migrations + +```bash +cd infra_ai_service +alembic upgrade head +``` + +#### Tests + +Run tests + +```bash +cd infra_ai_service +pytest . +``` + +## Environment Variables + +To run this project, you will need to add the following environment variables to your app/core/.env file + +`BASE_URL` - default: http://localhost:8000 + +`RELOAD` - default: false + +`DB_HOST` - default: localhost + +`DB_PORT` - default: 5432 + +`DB_USER` - default: postgres + +`DB_PASS` - default: postgres + +`DB_BASE` - default: db + +`DB_ECHO` - default: false diff --git a/infra_ai_service/__pycache__/__init__.cpython-39.pyc b/infra_ai_service/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index fb6804059a4223aeca23a387aa3e94696d93e577..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 150 zcmYe~<>g`kg55iArGeY& zqU_>=#N<@{%)GRsL`Toy)S|LX7&ksKGrkxq5FekJmst`YuUAlci^C>2KczG$)edCn IXCP((0I+u?t^fc4 diff --git a/infra_ai_service/__pycache__/demo.cpython-39.pyc b/infra_ai_service/__pycache__/demo.cpython-39.pyc deleted file mode 100644 index 7423ed65e69b4c71e961f64eb50ed0261d06a62a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 627 zcma)3y-veG3_d6QNh>NbAgIJrB$h06WI+g27+AVgVqmGR^;JYon#xHEsX7&2rjNjb zuw-T86_~IS3LQYjr?Wr%&i}g6s4J$cmu&RP@=cUYvRJv`8yz!4h48HI6k2Gr1T0~h zrG*u&vb53ZDYrHhpEU`V+u<8I%ob_^2dbx5&|RsOmBC{MDan_deVn%M!@#s3LK;U? zJv41^G9rC>(=#;l)@omS{pibpCJ0ZaX=ZCgkSuw6pep0-r8Kz|61SoR`y1sIRaniO zC>g}0hjE#;b#!TEd-T13L=O@__NIPX+lVgs@90f_T>!$^qTQI6@dQhdu^w_Z*Y3Xa zvzqUB^kfpoE%as{n${p~%?*V|E@bsv?I-4%C?xu=-%8=q;3$y{K!L-Db{KWtQa)Fb c_DQPD%|GiaP%*Z%AjHHs=lN2^ list[Example]: + examples = await self.session.execute(select(Example)) + + return examples.scalars().fetchall() + + async def create_example(self, data: ExampleCreateSchema) -> Example: + example = Example(**data.dict()) + self.session.add(example) + await self.session.commit() + await self.session.refresh(example) + + return example diff --git a/infra_ai_service/api/example/views.py b/infra_ai_service/api/example/views.py new file mode 100644 index 0000000..bb2d5ff --- /dev/null +++ b/infra_ai_service/api/example/views.py @@ -0,0 +1,26 @@ +from infra_ai_service.api.example.schemas import ExampleCreateSchema, ExampleSchema +from infra_ai_service.api.example.services import ExampleService +from infra_ai_service.db.db import db_session +from infra_ai_service.db.models.example import Example +from fastapi import APIRouter, Depends +from sqlmodel.ext.asyncio.session import AsyncSession + +router = APIRouter() + + +@router.get("/", response_model=list[ExampleSchema]) +async def get_examples( + session: AsyncSession = Depends(db_session), +) -> list[Example]: + example_service = ExampleService(session=session) + return await example_service.get_all_examples() + + +@router.post("/", response_model=ExampleSchema) +async def create_example( + data: ExampleCreateSchema, + session: AsyncSession = Depends(db_session), +) -> Example: + example_service = ExampleService(session=session) + example = await example_service.create_example(data) + return example diff --git a/infra_ai_service/api/router.py b/infra_ai_service/api/router.py new file mode 100644 index 0000000..3f53c77 --- /dev/null +++ b/infra_ai_service/api/router.py @@ -0,0 +1,8 @@ +from fastapi.routing import APIRouter + +from infra_ai_service.api.example.views import router as example_router +from infra_ai_service.api.system.views import router as system_router + +api_router = APIRouter() +api_router.include_router(system_router, prefix="/system", tags=["system"]) +api_router.include_router(example_router, prefix="/example", tags=["example"]) diff --git a/infra_ai_service/api/system/__init__.py b/infra_ai_service/api/system/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/infra_ai_service/api/system/views.py b/infra_ai_service/api/system/views.py new file mode 100644 index 0000000..a2b55dc --- /dev/null +++ b/infra_ai_service/api/system/views.py @@ -0,0 +1,12 @@ +from fastapi import APIRouter + +router = APIRouter() + + +@router.get("/health/") +async def health() -> None: + """ + Checks the health of a project. + + It returns 200 if the project is healthy. + """ diff --git a/infra_ai_service/core/.env-example b/infra_ai_service/core/.env-example new file mode 100644 index 0000000..d9fb804 --- /dev/null +++ b/infra_ai_service/core/.env-example @@ -0,0 +1,11 @@ +# create new with next to this one, with name .env + +RELOAD=True + +DB_HOST=db +DB_PORT=5432 +DB_USER=postgres +DB_PASS=postgres +DB_BASE=db + +BASE_URL=http://localhost:8000 diff --git a/infra_ai_service/core/app.py b/infra_ai_service/core/app.py new file mode 100644 index 0000000..3060a98 --- /dev/null +++ b/infra_ai_service/core/app.py @@ -0,0 +1,26 @@ +from infra_ai_service.api.router import api_router +from fastapi import FastAPI +from fastapi.responses import UJSONResponse + + +def get_app() -> FastAPI: + """ + Get FastAPI application. + + This is the main constructor of an application. + + :return: application. + """ + app = FastAPI( + title="FastAPI Starter Project", + description="FastAPI Starter Project", + version="1.0", + docs_url="/api/docs/", + redoc_url="/api/redoc/", + openapi_url="/api/openapi.json", + default_response_class=UJSONResponse, + ) + + app.include_router(router=api_router, prefix="/api") + + return app diff --git a/infra_ai_service/core/config.py b/infra_ai_service/core/config.py new file mode 100644 index 0000000..636348f --- /dev/null +++ b/infra_ai_service/core/config.py @@ -0,0 +1,65 @@ +from pathlib import Path +from sys import modules + +from pydantic import BaseSettings + +BASE_DIR = Path(__file__).parent.resolve() + + +class Settings(BaseSettings): + """Application settings.""" + + ENV: str = "dev" + HOST: str = "0.0.0.0" + PORT: int = 8000 + _BASE_URL: str = f"https://{HOST}:{PORT}" + # quantity of workers for uvicorn + WORKERS_COUNT: int = 1 + # Enable uvicorn reloading + RELOAD: bool = False + # Database settings + DB_HOST: str = "localhost" + DB_PORT: int = 5432 + DB_USER: str = "postgres" + DB_PASS: str = "postgres" + _DB_BASE: str = "db" + DB_ECHO: bool = False + + @property + def DB_BASE(self): + return self._DB_BASE + + @property + def BASE_URL(self) -> str: + return self._BASE_URL if self._BASE_URL.endswith("/") else f"{self._BASE_URL}/" + + @property + def DB_URL(self) -> str: + """ + Assemble Database URL from settings. + + :return: Database URL. + """ + + return f"postgresql+asyncpg://{self.DB_USER}:{self.DB_PASS}@{self.DB_HOST}:{self.DB_PORT}/{self.DB_BASE}" + + class Config: + env_file = f"{BASE_DIR}/.env" + env_file_encoding = "utf-8" + fields = { + "_BASE_URL": { + "env": "BASE_URL", + }, + "_DB_BASE": { + "env": "DB_BASE", + }, + } + + +class TestSettings(Settings): + @property + def DB_BASE(self): + return f"{super().DB_BASE}_test" + + +settings = TestSettings() if "pytest" in modules else Settings() diff --git a/infra_ai_service/db/__init__.py b/infra_ai_service/db/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/infra_ai_service/db/db.py b/infra_ai_service/db/db.py new file mode 100644 index 0000000..e6fd548 --- /dev/null +++ b/infra_ai_service/db/db.py @@ -0,0 +1,18 @@ +from typing import AsyncGenerator + +from infra_ai_service.core.config import settings +from sqlalchemy.ext.asyncio import create_async_engine +from sqlalchemy.orm import sessionmaker +from sqlmodel.ext.asyncio.session import AsyncSession + +async_engine = create_async_engine(settings.DB_URL, echo=settings.DB_ECHO, future=True) + + +async def db_session() -> AsyncGenerator: + async_session = sessionmaker( + bind=async_engine, + class_=AsyncSession, + expire_on_commit=False, + ) + async with async_session() as session: + yield session diff --git a/infra_ai_service/db/migrations/README b/infra_ai_service/db/migrations/README new file mode 100644 index 0000000..e0d0858 --- /dev/null +++ b/infra_ai_service/db/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration with an async dbapi. \ No newline at end of file diff --git a/infra_ai_service/db/migrations/env.py b/infra_ai_service/db/migrations/env.py new file mode 100644 index 0000000..e1dc042 --- /dev/null +++ b/infra_ai_service/db/migrations/env.py @@ -0,0 +1,77 @@ +import asyncio +from logging.config import fileConfig + +from alembic import context +from infra_ai_service.core.config import settings +from infra_ai_service.db.models import load_all_models +from sqlalchemy.ext.asyncio.engine import create_async_engine +from sqlmodel import SQLModel + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) # type: ignore + +load_all_models() +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = SQLModel.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + context.configure( + url=str(settings.DB_URL), + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def do_run_migrations(connection): + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +async def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = create_async_engine(str(settings.DB_URL)) + + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + + +if context.is_offline_mode(): + run_migrations_offline() +else: + asyncio.run(run_migrations_online()) diff --git a/infra_ai_service/db/migrations/script.py.mako b/infra_ai_service/db/migrations/script.py.mako new file mode 100644 index 0000000..52b6c3e --- /dev/null +++ b/infra_ai_service/db/migrations/script.py.mako @@ -0,0 +1,25 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +import sqlalchemy as sa +import sqlmodel +from alembic import op +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/infra_ai_service/db/migrations/versions/2022-07-17-10-45_b6a9795b9043.py b/infra_ai_service/db/migrations/versions/2022-07-17-10-45_b6a9795b9043.py new file mode 100644 index 0000000..c3a16b4 --- /dev/null +++ b/infra_ai_service/db/migrations/versions/2022-07-17-10-45_b6a9795b9043.py @@ -0,0 +1,53 @@ +"""Init migrations + +Revision ID: b6a9795b9043 +Revises: +Create Date: 2022-07-17 10:45:33.821637 + +""" +import sqlalchemy as sa +import sqlmodel +from alembic import op + +# revision identifiers, used by Alembic. +revision = "b6a9795b9043" +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "example", + sa.Column( + "id", + sqlmodel.sql.sqltypes.GUID(), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column( + "created_at", + sa.DateTime(), + server_default=sa.text("current_timestamp(0)"), + nullable=False, + ), + sa.Column( + "updated_at", + sa.DateTime(), + server_default=sa.text("current_timestamp(0)"), + nullable=False, + ), + sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("active", sa.Boolean(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index(op.f("ix_example_id"), "example", ["id"], unique=True) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f("ix_example_id"), table_name="example") + op.drop_table("example") + # ### end Alembic commands ### diff --git a/infra_ai_service/db/models/__init__.py b/infra_ai_service/db/models/__init__.py new file mode 100644 index 0000000..2d36ba6 --- /dev/null +++ b/infra_ai_service/db/models/__init__.py @@ -0,0 +1,13 @@ +import pkgutil +from pathlib import Path + + +def load_all_models() -> None: + """Load all models from this folder.""" + package_dir = Path(__file__).resolve().parent + modules = pkgutil.walk_packages( + path=[str(package_dir)], + prefix="db.models.", + ) + for module in modules: + __import__(module.name) # noqa: WPS421 diff --git a/infra_ai_service/db/models/common.py b/infra_ai_service/db/models/common.py new file mode 100644 index 0000000..98d1cb0 --- /dev/null +++ b/infra_ai_service/db/models/common.py @@ -0,0 +1,32 @@ +import uuid as uuid_pkg +from datetime import datetime + +from sqlalchemy import text +from sqlmodel import Field, SQLModel + + +class UUIDModel(SQLModel): + id: uuid_pkg.UUID = Field( + default_factory=uuid_pkg.uuid4, + primary_key=True, + index=True, + nullable=False, + sa_column_kwargs={"server_default": text("gen_random_uuid()"), "unique": True}, + ) + + +class TimestampModel(SQLModel): + created_at: datetime = Field( + default_factory=datetime.utcnow, + nullable=False, + sa_column_kwargs={"server_default": text("current_timestamp(0)")}, + ) + + updated_at: datetime = Field( + default_factory=datetime.utcnow, + nullable=False, + sa_column_kwargs={ + "server_default": text("current_timestamp(0)"), + "onupdate": text("current_timestamp(0)"), + }, + ) diff --git a/infra_ai_service/db/models/example.py b/infra_ai_service/db/models/example.py new file mode 100644 index 0000000..f6dae90 --- /dev/null +++ b/infra_ai_service/db/models/example.py @@ -0,0 +1,11 @@ +from infra_ai_service.db.models.common import TimestampModel, UUIDModel + + +class Example(TimestampModel, UUIDModel, table=True): + __tablename__ = "example" + + name: str + active: bool = True + + def __repr__(self): + return f"" diff --git a/infra_ai_service/demo.py b/infra_ai_service/demo.py deleted file mode 100644 index 8927b60..0000000 --- a/infra_ai_service/demo.py +++ /dev/null @@ -1,16 +0,0 @@ -def add(x, y): - return x + y - - -def subtract(x, y): - return x - y - - -def multiply(x, y): - return x * y - - -def divide(x, y): - if y == 0: - raise ValueError("Cannot divide by zero!") - return x / y diff --git a/infra_ai_service/server.py b/infra_ai_service/server.py new file mode 100644 index 0000000..e0a1f6f --- /dev/null +++ b/infra_ai_service/server.py @@ -0,0 +1,18 @@ +import uvicorn +from core.config import settings + + +def main() -> None: + """Entrypoint of the application.""" + uvicorn.run( + "core.app:get_app", + workers=settings.WORKERS_COUNT, + host=settings.HOST, + port=settings.PORT, + reload=settings.RELOAD, + factory=True, + ) + + +if __name__ == "__main__": + main() diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..4f3c833 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,17 @@ +fastapi==0.109.1 +uvicorn[standard]==0.20.0 +ujson==5.5.0 +python-dotenv==0.21.0 +sqlmodel==0.0.8 +asyncpg==0.29.0 +alembic==1.8.1 +sqlalchemy-utils==0.38.3 +mypy==0.961 +yesqa==1.3.0 +black==24.3.0 +isort==5.10.1 +pytest==6.2.5 +pytest-asyncio==0.19.0 +requests==2.31.0 +httpx==0.23.0 +pydantic==1.10.12 diff --git a/test-demos/async_demo.py b/test-demos/async_demo.py new file mode 100644 index 0000000..2f3c7a5 --- /dev/null +++ b/test-demos/async_demo.py @@ -0,0 +1,15 @@ +import asyncio + + +async def fetch_data(): + print("开始获取数据...") + await asyncio.sleep(2) # 模拟IO操作 + print("数据获取完成") + return {'data': 123} + + +async def main(): + result = await fetch_data() + print(result) + +asyncio.run(main()) \ No newline at end of file diff --git a/test-demos/demo.py b/test-demos/demo.py new file mode 100644 index 0000000..069adb2 --- /dev/null +++ b/test-demos/demo.py @@ -0,0 +1,54 @@ +from fastapi import FastAPI +from pydantic import BaseModel +from fastapi import Depends +from fastapi import HTTPException +from fastapi.middleware.cors import CORSMiddleware + + +class Item(BaseModel): + name: str + description: str = None + price: float + tax: float = None + + +app = FastAPI() +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + + +@app.get("/") +def read_root(): + return {"Hello": "World"} + + +@app.post("/items/") +def create_item(item: Item): + return {"name": item.name, "price": item.price} + + +@app.get("/items/{item_id}") +async def read_item(item_id: int): + return {"item_id": item_id} + + +def common_parameters(q: str = None, skip: int = 0, limit: int = 100): + return {"q": q, "skip": skip, "limit": limit} + + +@app.get("/items/") +async def read_items(commons: dict = Depends(common_parameters)): + return commons + + +@app.get("/items/{item_id}") +def read_item(item_id: int): + if item_id not in [1, 2, 3]: + raise HTTPException(status_code=404, detail="Item not found") + return {"name": [1, 2, 3][item_id]} + diff --git a/tests/__pycache__/__init__.cpython-39.pyc b/tests/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index bcdf4241abdc74d8056bb7f5e5e90f9fee5487e8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 139 zcmYe~<>g`kfY& zqU_>=#N<@{%)GRsL`Toy)S|LXAh#s7xTIJ=K0Y%qvm`!Vub}c4hfQvNN@-529mu55 HK+FIDG9Mt0 diff --git a/tests/__pycache__/test_demo.cpython-39-pytest-8.3.2.pyc b/tests/__pycache__/test_demo.cpython-39-pytest-8.3.2.pyc deleted file mode 100644 index 177285b7e3138115a64f0dc5598ae2f6e09d2b4a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3524 zcmdT{%WvF77@zU`v71M$mbAP=p-@AcX7g$z8q^ALKtj0`72yMxn{nu>_U>xNX-Tjz zgd-9?R~(XKE*v=Wr})ZgZ=7>LeBao+wbQgD2qai*e)G)C=kf3ReP65xUfeS~!7BnmM1Y}?F`zord6#}wYpE4`ICO0~VnPw)~Y%w>tGb^*fW;IivQ1LnJ zgjTCybqi;jR>KOkP}=>3@+q|PSj)}IVymicu)HlTci04*%p7UCTUqWNW4Q^ITTs;8 zW&}Rm)cR0`%OC0p_$dzPp|ZAiKzCLzUm0w#c*6O%YRs<9Vr$+LSFm32#8o-4HV6)GYFb$$_265O6Q<(8ug^MSY*VCzY7VDkmdgo@nbE7n| z+`2R0=q=P2F4a3L_0Fm<-?G}G)95W85dI$A>S5>lgMJcSf86H}#UR{>u6KKzJiPPi z-H1QxLKp8)NDNq%^ydbk0Oh?hhAa~r0HHrq?m_)xP1reJ6VR`YYJw^?K^tp={ui2{ zqnEu@6O2j|K=4kX3HDeM?A**uPzOsYg9XZg zWLpw_6z@=pp0RxndZ%Xl|04n_jR>d!&u!qo1JTYI$H9{AWgK*I=D9fdBDOap_=xTG zIQaM2t^(Wb+^N{k4fsBi?L*1zXOu_1I*#%XswCS_8LA}rF;?+2xbX)FR{?;^@}z%< z%96YdK6m&vJQ_9qIR=k%H?;5>gXf{wU@&UKIEQK?(FGJ3bIOPu_RAG&hGAaGVg`hNIu9R&^t>$?2n0Hf1$;Ly4S&eb| zY74NFe@^BxLD&r-LB&jF4$~Kz8zJv@1)~%$^w&Lg` QyhVZG^pY~8S~@;|0;3%`G5`Po diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..ba25a4d --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,59 @@ +import asyncio +from typing import AsyncGenerator, Generator + +import pytest +import pytest_asyncio +from asyncpg.exceptions import InvalidCatalogNameError +from infra_ai_service.core.app import get_app +from infra_ai_service.db.db import async_engine +from fastapi import FastAPI +from httpx import AsyncClient +from sqlalchemy.orm import sessionmaker +from sqlalchemy.util import concurrency +from sqlalchemy_utils import create_database, database_exists +from sqlmodel import SQLModel +from sqlmodel.ext.asyncio.session import AsyncSession + + +@pytest.fixture(scope="session") +def event_loop(request) -> Generator: # : indirect usage + loop = asyncio.get_event_loop_policy().new_event_loop() + yield loop + loop.close() + + +@pytest_asyncio.fixture +async def client(app: FastAPI) -> AsyncGenerator: + async with AsyncClient(app=app, base_url="http://test") as client: + yield client + + +@pytest_asyncio.fixture +def app() -> FastAPI: + return get_app() + + +def create_db_if_not_exists(db_url): + try: + db_exists = database_exists(db_url) + except InvalidCatalogNameError: + db_exists = False + + if not db_exists: + create_database(db_url) + + +@pytest_asyncio.fixture(scope="function") +async def db_session() -> AsyncGenerator: + session = sessionmaker(async_engine, class_=AsyncSession, expire_on_commit=False) + async with session() as s: + await concurrency.greenlet_spawn(create_db_if_not_exists, async_engine.url) + async with async_engine.begin() as conn: + await conn.run_sync(SQLModel.metadata.create_all) + + yield s + + async with async_engine.begin() as conn: + await conn.run_sync(SQLModel.metadata.drop_all) + + await async_engine.dispose() diff --git a/tests/test_demo.py b/tests/test_demo.py deleted file mode 100644 index 3ba84ee..0000000 --- a/tests/test_demo.py +++ /dev/null @@ -1,28 +0,0 @@ -# tests/test_calculator.py - -import pytest - -from infra_ai_service.demo import add, divide, multiply, subtract - - -def test_add(): - assert add(3, 5) == 8 - assert add(-1, 1) == 0 - - -def test_subtract(): - assert subtract(10, 5) == 5 - assert subtract(-1, -1) == 0 - - -def test_multiply(): - assert multiply(3, 5) == 15 - assert multiply(-1, 5) == -5 - - -def test_divide(): - assert divide(10, 2) == 5 - assert divide(10, 5) == 2 - - with pytest.raises(ValueError): - divide(10, 0) diff --git a/tests/test_example.py b/tests/test_example.py new file mode 100644 index 0000000..2c1d04d --- /dev/null +++ b/tests/test_example.py @@ -0,0 +1,67 @@ +import pytest +from infra_ai_service.db.models.example import Example +from fastapi import FastAPI, status +from httpx import AsyncClient +from sqlalchemy import func, select +from sqlmodel.ext.asyncio.session import AsyncSession + + +@pytest.mark.asyncio +async def test_list_example_empty( + client: AsyncClient, + app: FastAPI, + db_session: AsyncSession, +) -> None: + """ + Checks empty list of example + """ + url = app.url_path_for("get_examples") + response = await client.get(url) + assert response.status_code == status.HTTP_200_OK + assert response.json() == [] + + +@pytest.mark.asyncio +async def test_list_example( + client: AsyncClient, + app: FastAPI, + db_session: AsyncSession, +) -> None: + """ + Checks list of example + """ + example_data = {"name": "Example 1", "active": True} + example = Example(**example_data) + db_session.add(example) + await db_session.commit() + + url = app.url_path_for("get_examples") + response = await client.get(url) + + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert len(data) == 1 + assert data[0]["name"] == example_data["name"] + assert data[0]["active"] == example_data["active"] + + +@pytest.mark.asyncio +async def test_create_example( + client: AsyncClient, + app: FastAPI, + db_session: AsyncSession, +) -> None: + """ + Checks create of example + """ + example_data = {"name": "Example 1", "active": True} + + url = app.url_path_for("get_examples") + response = await client.post(url, json=example_data) + + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert data["name"] == example_data["name"] + assert data["active"] == example_data["active"] + count = await db_session.execute(select([func.count()]).select_from(Example)) + assert count.scalar() == 1 diff --git a/tests/test_health.py b/tests/test_health.py new file mode 100644 index 0000000..8fbf5a8 --- /dev/null +++ b/tests/test_health.py @@ -0,0 +1,15 @@ +import pytest +from fastapi import FastAPI, status +from httpx import AsyncClient + +@pytest.mark.asyncio +async def test_health(client: AsyncClient, app: FastAPI) -> None: + """ + Checks the health endpoint. + + :param client: client for the infra_ai_service. + :param app: current FastAPI application. + """ + url = app.url_path_for("health") + response = await client.get(url) + assert response.status_code == status.HTTP_200_OK