diff --git a/.dockerignore b/.dockerignore deleted file mode 100644 index e69de29..0000000 diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index bda7236..8693ef0 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -23,11 +23,10 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - python -m pip install ruff pytest - python -m pip install -r requirements.txt + make install-dev - name: Code style run: | - pre-commit run --all-files + poetry run pre-commit run --all-files - name: Pytest run: | - make tests + make test diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index feca58c..05bce2d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,13 +5,14 @@ repos: rev: v4.5.0 hooks: - id: check-added-large-files + args: ["--maxkb=700"] - id: check-yaml - id: check-toml - id: check-json - id: check-symlinks - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.7 + rev: v0.4.2 hooks: - id: ruff - id: ruff-format @@ -27,7 +28,7 @@ repos: - manual - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.7.1 + rev: v1.10.0 hooks: - id: mypy pass_filenames: false diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..cccb881 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,32 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [1.0.0] - 2024-06-01 + +### Added + +- API: Character selection by id +- API: Paginated characters selection by gender, status, species, order by, order by direction +- API: Character search by name +- API: Episode selection by id +- API: Paginated episodes selection +- API: Season selection by id +- API: Paginated seasons selection +- API: Callbacks for characters, episodes, seasons +- API: Server Sent Events (SSE) for characters +- API: User registration +- API: User authorization +- API: User activation +- API: User confirmation resend message +- API: User information update +- GraphQL: Character selection by id +- GraphQL: Paginated (Edged) characters selection by gender, status, species, order by, order by direction +- GraphQL: Episode selection by id +- GraphQL: Paginated (Edged) episodes selection +- GraphQL: Season selection by id +- GraphQL: Paginated (Edged) seasons selection + +[1.0.0]: https://github.com/koldakov/futuramaapi/releases/tag/1.0.0 diff --git a/Makefile b/Makefile index c35b0e3..eebbf64 100644 --- a/Makefile +++ b/Makefile @@ -1,28 +1,27 @@ SHELL = /bin/bash +PYTHON = python3.12 help: # Display this message @sed -ne '/@sed/!s/# //p' $(MAKEFILE_LIST) -messages-init: # locale=LANG, init LANG language - @test $${locale?Please specify locale. Example \"locale=en_CA\"} - @pybabel init -l $(locale) -i locale/messages.pot -d locale +install-dev: # Install DEV/TEST Environ and dependencies + @echo "Upgrading pip" + @$(PYTHON) -m pip install --upgrade pip + @echo "Installing poetry" + @$(PYTHON) -m pip install poetry + @echo "Installing dependencies" + @$(PYTHON) -m poetry install -messages-extract: # Extract messages to locale/messages.pot - @pybabel extract \ - --version=0.0.1 \ - --msgid-bugs-address=coldie322@gmail.com \ - --project=FuturamaAPI \ - --copyright-holder=FuturamaAPI \ - --mapping babel.cfg \ - --output-file=locale/messages.pot \ - . +install: # Install Environ and dependencies + @echo "Upgrading pip" + @$(PYTHON) -m pip install --upgrade pip + @echo "Installing poetry" + @$(PYTHON) -m pip install poetry + @echo "Installing dependencies" + @$(PYTHON) -m poetry install --without dev --without test -messages: # Update all locales - @$(MAKE) messages-extract - @pybabel update --input-file=locale/messages.pot --output-dir=locale +test: # Run tests + @poetry run $(PYTHON) -m pytest -messages-compile: # Generate .mo files for all locales - @pybabel compile --directory=locale - -tests: # Run tests - @python -m pytest +migrate: # Migrate + @poetry run $(PYTHON) -m alembic upgrade head diff --git a/README.md b/README.md index 18a54e3..216e860 100644 --- a/README.md +++ b/README.md @@ -58,8 +58,8 @@ If you create models in a new file please import it in env.py. Because alembic does not detect child classes. ```commandline -alembic revision --autogenerate -m "Revision Name" -alembic upgrade head +poetry run alembic revision --autogenerate -m "Revision Name" +poetry run alembic upgrade head ```

(back to top)

@@ -72,7 +72,7 @@ export $(cat .env | xargs) # Compile tranlations make messages-compile # Run hypercorn server -hypercorn --reload app.main:app +hypercorn --reload futuramaapi.main:app ```

(back to top)

diff --git a/alembic.ini b/alembic.ini index e4ed6f6..5278187 100644 --- a/alembic.ini +++ b/alembic.ini @@ -2,7 +2,7 @@ [alembic] # path to migration scripts -script_location = alembic +script_location = futuramaapi/repositories/migrations # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s # Uncomment the line below if you want the files to be prepended with date and time diff --git a/app/core/__init__.py b/app/core/__init__.py deleted file mode 100644 index 54be719..0000000 --- a/app/core/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from app.core.settings import feature_flags, settings - -__all__ = [ - "feature_flags", - "settings", -] diff --git a/app/graph_ql/routers.py b/app/graph_ql/routers.py deleted file mode 100644 index b2c4539..0000000 --- a/app/graph_ql/routers.py +++ /dev/null @@ -1,7 +0,0 @@ -import strawberry -from strawberry.fastapi import GraphQLRouter - -from app.graph_ql.schemas import Query - -schema = strawberry.Schema(Query) -router = GraphQLRouter(schema, path="/graphql") diff --git a/app/graph_ql/schemas.py b/app/graph_ql/schemas.py deleted file mode 100644 index 3c6ca3f..0000000 --- a/app/graph_ql/schemas.py +++ /dev/null @@ -1,286 +0,0 @@ -from typing import Any - -import strawberry - -from app.repositories.models import ( - Character as CharacterModel, -) -from app.repositories.models import ( - CharacterDoesNotExist, - CharacterGender, - CharacterGenderFilter, - CharacterSpecies, - CharacterSpeciesFilter, - CharacterStatus, - CharacterStatusFilter, - EpisodeDoesNotExist, - SeasonDoesNotExist, -) -from app.repositories.models import ( - Episode as EpisodeModel, -) -from app.repositories.models import ( - Season as SeasonModel, -) -from app.repositories.sessions import get_async_session_ctx -from app.services.base import EpisodeBase as EpisodeBaseSchema -from app.services.characters import Character as CharacterSchema -from app.services.episodes import ( - Episode as EpisodeSchema, -) -from app.services.episodes import ( - SeasonEpisode as SeasonEpisodeSchema, -) -from app.services.seasons import ( - EpisodeSeason as EpisodeSeasonSchema, -) -from app.services.seasons import ( - Season as SeasonSchema, -) - - -class BaseQueryException(Exception): - ... - - -class CharacterQueryException(BaseQueryException): - ... - - -class LimitViolation(CharacterQueryException): - ... - - -class OffsetViolation(CharacterQueryException): - ... - - -@strawberry.experimental.pydantic.type(model=CharacterSchema) -class Character: - id: strawberry.auto # noqa: A003 - name: strawberry.auto - gender: strawberry.enum(CharacterGender) # type: ignore - status: strawberry.enum(CharacterStatus) # type: ignore - species: strawberry.enum(CharacterSpecies) # type: ignore - created_at: strawberry.auto - image: strawberry.auto - - -@strawberry.type -class PageBase: - limit: int - offset: int - total: int - edges: list[Any] - - @staticmethod - def get_schema_class(): - raise NotImplementedError() - - @staticmethod - def get_edge_class(): - raise NotImplementedError() - - @classmethod - def from_params(cls, edges, limit: int, offset: int, total: int, /): - schema_class = cls.get_schema_class() - edge_class = cls.get_edge_class() - return cls( - limit=limit, # type: ignore - offset=offset, # type: ignore - total=total, # type: ignore - edges=[edge_class.from_pydantic(schema_class.model_validate(edge)) for edge in edges], # type: ignore - ) - - -@strawberry.type -class Characters(PageBase): - edges: list[Character] - - @staticmethod - def get_schema_class(): - return CharacterSchema - - @staticmethod - def get_edge_class(): - return Character - - -@strawberry.experimental.pydantic.type(model=SeasonEpisodeSchema, all_fields=True) -class SeasonEpisode: - ... - - -@strawberry.experimental.pydantic.type(model=EpisodeBaseSchema, all_fields=True) -class EpisodeBase: - ... - - -@strawberry.experimental.pydantic.type(model=EpisodeSchema) -class Episode(EpisodeBase): - air_date: strawberry.auto - duration: strawberry.auto - created_at: strawberry.auto - season: SeasonEpisode - broadcast_code: str - - -@strawberry.experimental.pydantic.type(model=EpisodeSeasonSchema) -class EpisodeSeason(EpisodeBase): - id: strawberry.auto # noqa: A003 - name: strawberry.auto - broadcast_number: strawberry.auto - production_code: strawberry.auto - - -@strawberry.type -class Episodes(PageBase): - edges: list[Episode] - - @staticmethod - def get_schema_class(): - return EpisodeSchema - - @staticmethod - def get_edge_class(): - return Episode - - -@strawberry.experimental.pydantic.type(model=SeasonSchema) -class Season: - id: strawberry.auto # noqa: A003 - episodes: list[EpisodeSeason] - - -def validate_limit(limit: int, min_: int, max_: int, /) -> None: - if not min_ <= limit <= max_: - raise LimitViolation(f"Limit can be more than {min_} and less than {max_}") from None - - -@strawberry.type -class Seasons(PageBase): - edges: list[Season] - - @staticmethod - def get_schema_class(): - return SeasonSchema - - @staticmethod - def get_edge_class(): - return Season - - -@strawberry.type -class Query: - @strawberry.field() - async def character(self, character_id: int) -> Character | None: - async with get_async_session_ctx() as session: - try: - character: CharacterModel = await CharacterModel.get( - session, - character_id, - ) - except CharacterDoesNotExist: - return None - return Character.from_pydantic(CharacterSchema.model_validate(character)) # type: ignore - - @strawberry.field() - async def characters( # noqa: PLR0913 - self, - *, - limit: int | None = 50, - offset: int | None = 0, - gender: strawberry.enum(CharacterGenderFilter) | None = None, # type: ignore - status: strawberry.enum(CharacterStatusFilter) | None = None, # type: ignore - species: strawberry.enum(CharacterSpeciesFilter) | None = None, # type: ignore - ) -> Characters: - if limit is None: - limit = 50 - if offset is None: - offset = 0 - # For some reason self does not work under strawberry decorator, - # so class attrs can't be used. Please find another way. - _min_l: int = 1 - _max_l: int = 50 - _min_offset: int = 0 - validate_limit(limit, _min_l, _max_l) - - async with get_async_session_ctx() as session: - total: int = await CharacterModel.count(session) - validate_limit(offset, _min_offset, total) - characters = await CharacterModel.filter( - session, - limit=limit, - offset=offset, - gender=gender, - character_status=status, - species=species, - ) - return Characters.from_params(characters, limit, offset, total) - - @strawberry.field() - async def episode(self, episode_id: int) -> Episode | None: - async with get_async_session_ctx() as session: - try: - episode: EpisodeModel = await EpisodeModel.get( - session, - episode_id, - ) - except EpisodeDoesNotExist: - return None - return Episode.from_pydantic(EpisodeSchema.model_validate(episode)) # type: ignore - - @strawberry.field() - async def episodes( - self, - *, - limit: int | None = 50, - offset: int | None = 0, - ) -> Episodes: - if limit is None: - limit = 50 - if offset is None: - offset = 0 - validate_limit(limit, 1, 50) - async with get_async_session_ctx() as session: - total: int = await CharacterModel.count(session) - validate_limit(offset, 0, total) - episodes = await EpisodeModel.filter( - session, - limit=limit, - select_in_load=EpisodeModel.season, - offset=offset, - ) - return Episodes.from_params(episodes, limit, offset, total) - - @strawberry.field() - async def season(self, season_id: int) -> Season | None: - async with get_async_session_ctx() as session: - try: - season: SeasonModel = await SeasonModel.get(session, season_id) - except SeasonDoesNotExist: - return None - return Season.from_pydantic(SeasonSchema.model_validate(season)) # type: ignore - - @strawberry.field() - async def seasons( - self, - *, - limit: int | None = 50, - offset: int | None = 0, - ) -> Seasons: - if limit is None: - limit = 50 - if offset is None: - offset = 0 - validate_limit(limit, 1, 50) - async with get_async_session_ctx() as session: - total: int = await SeasonModel.count(session) - validate_limit(offset, 0, total) - seasons = await SeasonModel.filter( - session, - limit=limit, - select_in_load=SeasonModel.episodes, - offset=offset, - ) - return Seasons.from_params(seasons, limit, offset, total) diff --git a/app/graph_ql/tests/test_schemas.py b/app/graph_ql/tests/test_schemas.py deleted file mode 100644 index e8457ff..0000000 --- a/app/graph_ql/tests/test_schemas.py +++ /dev/null @@ -1,16 +0,0 @@ -import pytest - -from app.graph_ql.schemas import LimitViolation, validate_limit - - -class TestLimitValidation: - def test_validate_limit_should_raise_limit_violation_when_limit_less_then_min_allowed_value(self): - with pytest.raises(LimitViolation): - validate_limit(0, 1, 3) - - def test_validate_limit_should_raise_limit_violation_when_limit_more_then_max_allowed_value(self): - with pytest.raises(LimitViolation): - validate_limit(4, 1, 3) - - def test_validate_limit_should_return_none_when_limit_more_then_min_value_and_less_then_max_value(self): - assert validate_limit(2, 1, 3) is None diff --git a/app/main.py b/app/main.py deleted file mode 100644 index 2974cd3..0000000 --- a/app/main.py +++ /dev/null @@ -1,83 +0,0 @@ -import mimetypes - -from fastapi import FastAPI -from fastapi.middleware.cors import CORSMiddleware -from fastapi.staticfiles import StaticFiles -from fastapi_pagination import add_pagination - -from app.core import feature_flags, settings -from app.graph_ql.routers import router as graphql_router -from app.middlewares.secure import HTTPSRedirectMiddleware -from app.routers.callbacks import router as callbacks_router -from app.routers.characters import router as characters_router -from app.routers.episodes import router as episodes_router -from app.routers.notifications import router as notifications_router -from app.routers.root import router as root_router -from app.routers.seasons import router as seasons_router -from app.routers.tokens import router as tokens_router -from app.routers.users import router as users_router - -mimetypes.add_type("image/webp", ".webp") - -app = FastAPI( - docs_url=None, - redoc_url=None, -) - -if feature_flags.enable_https_redirect: - app.add_middleware(HTTPSRedirectMiddleware) - -app.add_middleware( - CORSMiddleware, - allow_origins=settings.allow_origins, - allow_methods=["*"], - allow_headers=["*"], -) - -app.include_router(root_router) - -# API -app.include_router( - characters_router, - tags=["characters"], - prefix="/api", -) -app.include_router( - episodes_router, - tags=["episodes"], - prefix="/api", -) -app.include_router( - notifications_router, - tags=["notifications"], - prefix="/api", -) -app.include_router( - seasons_router, - tags=["seasons"], - prefix="/api", -) -app.include_router( - callbacks_router, - tags=["callbacks"], - prefix="/api", -) -app.include_router( - graphql_router, - prefix="/api", - include_in_schema=False, -) -app.include_router( - users_router, - tags=["users"], - prefix="/api", -) -app.include_router( - tokens_router, - tags=["tokens"], - prefix="/api", -) - -app.mount("/static", StaticFiles(directory="static"), name="static") - -add_pagination(app) diff --git a/app/middlewares/secure.py b/app/middlewares/secure.py deleted file mode 100644 index 6abd37a..0000000 --- a/app/middlewares/secure.py +++ /dev/null @@ -1,56 +0,0 @@ -import logging - -from starlette import status -from starlette.datastructures import URL -from starlette.responses import RedirectResponse -from starlette.types import ASGIApp, Receive, Scope, Send - -from app.core import settings - -logger = logging.getLogger(__name__) - - -class HTTPSRedirectMiddleware: - https_port = 443 - http_port = 80 - proto_header = "x-forwarded-proto" - port_header = "x-forwarded-port" - - def __init__(self, app: ASGIApp) -> None: - self.app = app - - def is_secure(self, headers: dict): - try: - host: str = headers["host"] - except KeyError: - logger.info("Host not found in headers") - return False - try: - proto: str = headers[self.proto_header] - except KeyError: - logger.info("x-forwarded-proto not found in headers") - return False - try: - port: str = headers[self.port_header] - except KeyError: - logger.info("x-forwarded-port not found in headers") - return False - - if host == settings.trusted_host and proto in ("https", "wss") and int(port) == self.https_port: - return True - return False - - async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: - headers: dict = {h[0].decode().lower(): h[1].decode() for h in scope["headers"]} - if not self.is_secure(headers): - url = URL(scope=scope) - redirect_scheme = {"http": "https", "ws": "wss"}[url.scheme] - netloc = url.hostname if url.port in (self.http_port, self.https_port) else url.netloc - url = url.replace(scheme=redirect_scheme, netloc=netloc) - response = RedirectResponse( - url, - status_code=status.HTTP_307_TEMPORARY_REDIRECT, - ) - await response(scope, receive, send) - else: - await self.app(scope, receive, send) diff --git a/app/repositories/base.py b/app/repositories/base.py deleted file mode 100644 index ce94779..0000000 --- a/app/repositories/base.py +++ /dev/null @@ -1,198 +0,0 @@ -from collections.abc import Sequence -from enum import Enum -from typing import TYPE_CHECKING, Any, Self -from uuid import UUID, uuid4 - -from asyncpg.exceptions import UniqueViolationError -from sqlalchemy import UUID as COLUMN_UUID -from sqlalchemy import Column, DateTime, Row, select -from sqlalchemy.exc import IntegrityError, NoResultFound -from sqlalchemy.ext.asyncio.session import AsyncSession -from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, selectinload -from sqlalchemy.orm.attributes import InstrumentedAttribute -from sqlalchemy.sql import func -from sqlalchemy.sql.elements import BinaryExpression, UnaryExpression - -if TYPE_CHECKING: - from sqlalchemy.engine.result import Result - - -class OrderByDirection(Enum): - ASC = "asc" - DESC = "desc" - - -class OrderBy(Enum): - ID = "id" - CREATED_AT = "createdAt" - - -class ModelDoesNotExist(Exception): - """Model Does Not Exist.""" - - -class ModelAlreadyExist(Exception): - """Model Already Exists""" - - -class Base(DeclarativeBase): - __abstract__ = True - - order_by = OrderBy - - model_already_exists: type[ModelAlreadyExist] = ModelAlreadyExist - model_does_not_exist: type[ModelDoesNotExist] = ModelDoesNotExist - - id: Mapped[int] = mapped_column(primary_key=True) # noqa: A003 - - created_at = Column( - DateTime( - timezone=True, - ), - server_default=func.now(), - nullable=False, - ) - uuid = Column( - COLUMN_UUID( - as_uuid=True, - ), - primary_key=False, - unique=True, - nullable=False, - default=uuid4, - ) - - @classmethod - async def get( - cls, - session: AsyncSession, - val: int | str | UUID, - /, - *, - field: InstrumentedAttribute | None = None, - ) -> Self: - if field is None: - field = cls.id - statement = select(cls).where(field == val) - cursor: Result = await session.execute(statement) - try: - return cursor.scalars().one() - except NoResultFound: - raise cls.model_does_not_exist() from None - - @staticmethod - def filter_obj_to_cond( - obj, - orig, - model_field: Column[str | Enum], - /, - ) -> BinaryExpression: - if obj.name.startswith("NOT_"): - return model_field != orig[obj.name.split("NOT_", 1)[1]] - return model_field == orig[obj.name] - - @classmethod - def get_filter_statement( # noqa: PLR0913 - cls, - *, - limit: int | None = None, - order_by=OrderBy.ID, - order_by_direction=OrderByDirection.ASC, - select_in_load: InstrumentedAttribute | None = None, - offset: int | None = None, - **kwargs, - ): - statement = select(cls) - statement = statement.order_by( - cls.get_order_by( - field=order_by, - direction=order_by_direction, - ) - ) - cond_list: list = cls.get_cond_list(**kwargs) - if cond_list: - statement = statement.where(*cond_list) - if select_in_load is not None: - statement = statement.options(selectinload(select_in_load)) - if offset: - statement = statement.offset(offset) - if limit is not None: - statement = statement.limit(limit) - return statement - - @classmethod - def get_cond_list(cls, **kwargs) -> list[BinaryExpression]: - return [] - - @classmethod - async def filter( # noqa: A003, PLR0913 - cls, - session: AsyncSession, - /, - *, - limit: int | None = None, - order_by=None, - order_by_direction=OrderByDirection.ASC, - select_in_load: InstrumentedAttribute | None = None, - **kwargs, - ) -> Sequence[Row[tuple[Any, ...] | Any]]: - if order_by is None: - order_by = cls.order_by.ID - statement = cls.get_filter_statement( - limit=limit, - order_by=order_by, - order_by_direction=order_by_direction, - select_in_load=select_in_load, - **kwargs, - ) - cursor: Result = await session.execute(statement) - return cursor.scalars().all() - - @classmethod - def get_order_by( - cls, - *, - field=OrderBy.ID, - direction=OrderByDirection.ASC, - ) -> UnaryExpression: - _field: InstrumentedAttribute - if field is None: - _field = cls.id - else: - _field = cls.__table__.c[field.name.lower()] - if direction == OrderByDirection.DESC: - return _field.desc() - return _field.asc() - - @classmethod - async def count(cls, session: AsyncSession) -> int: - res = await session.execute(func.count(cls.id)) - return res.scalar() - - @classmethod - async def add( - cls, - session: AsyncSession, - data, - /, - *, - commit: bool = True, - extra_fields: dict[ - str, - Any, - ] - | None = None, - ) -> Self: - obj: Self = cls(**data.model_dump()) - if extra_fields is not None: - for name, value in extra_fields.items(): - setattr(obj, name, value) - session.add(obj) - if commit is True: - try: - await session.commit() - except IntegrityError as err: - if err.orig.sqlstate == UniqueViolationError.sqlstate: - raise cls.model_already_exists() from None - raise - return obj diff --git a/app/repositories/models.py b/app/repositories/models.py deleted file mode 100644 index f172d6b..0000000 --- a/app/repositories/models.py +++ /dev/null @@ -1,410 +0,0 @@ -from enum import Enum -from typing import TYPE_CHECKING -from uuid import UUID - -from fastapi_storages import FileSystemStorage -from fastapi_storages.integrations.sqlalchemy import ImageType -from sqlalchemy import ( - VARCHAR, - Boolean, - Column, - Date, - ForeignKey, - Integer, - SmallInteger, - select, -) -from sqlalchemy.dialects.postgresql import ENUM # TODO: engine agnostic. -from sqlalchemy.exc import NoResultFound -from sqlalchemy.ext.asyncio.session import AsyncSession -from sqlalchemy.orm import Mapped, mapped_column, relationship, selectinload -from sqlalchemy.orm.attributes import InstrumentedAttribute -from sqlalchemy.sql.elements import BinaryExpression - -from app.core import settings -from app.repositories.base import Base, ModelAlreadyExist, ModelDoesNotExist - -if TYPE_CHECKING: - from sqlalchemy.engine.result import Result - - -def to_camel( - val: str, -): - return "".join([word if idx == 0 else word.capitalize() for idx, word in enumerate(val.lower().split("_"))]) - - -def generate_inverted_enum( - name: str, - proto_enum: type[Enum], - /, -) -> Enum: - return Enum(name, {f"NOT_{i.name}": f"!{i.value}" for i in [*proto_enum]}) - - -def generate_filter_enum( - name: str, - enums, -) -> Enum: - unpacked_enums: list[Enum] = [val for _e in enums for val in _e] - return Enum(name, {e.name: to_camel(e.value) for e in unpacked_enums}) - - -class CharacterStatus(Enum): - ALIVE = "ALIVE" - DEAD = "DEAD" - UNKNOWN = "UNKNOWN" - - -CharacterInvertedStatus = generate_inverted_enum( - "CharacterInvertedStatus", - CharacterStatus, -) - - -CharacterStatusFilter = generate_filter_enum( - "CharacterStatusFilter", - [ - CharacterStatus, - CharacterInvertedStatus, - ], -) - - -class CharacterGender(Enum): - MALE = "MALE" - FEMALE = "FEMALE" - UNKNOWN = "UNKNOWN" - - -CharacterInvertedGender = generate_inverted_enum( - "CharacterInvertedGender", - CharacterGender, -) - - -CharacterGenderFilter = generate_filter_enum( - "CharacterGenderFilter", - [ - CharacterGender, - CharacterInvertedGender, - ], -) - - -class CharacterSpecies(Enum): - HUMAN = "HUMAN" - ROBOT = "ROBOT" - HEAD = "HEAD" - ALIEN = "ALIEN" - MUTANT = "MUTANT" - MONSTER = "MONSTER" - UNKNOWN = "UNKNOWN" - - -CharacterInvertedSpecies = generate_inverted_enum( - "CharacterInvertedSpecies", - CharacterSpecies, -) - - -CharacterSpeciesFilter = generate_filter_enum( - "CharacterSpeciesFilter", - [ - CharacterSpecies, - CharacterInvertedSpecies, - ], -) - - -class Season(Base): - __tablename__ = "seasons" - - # Mappers - episodes: Mapped[list["Episode"]] = relationship( - back_populates="season", - ) - - @classmethod - async def get( - cls, - session: AsyncSession, - val: int | str | UUID, - /, - *, - field: InstrumentedAttribute = None, - ) -> "Season": - if field is None: - field = Season.id - cursor: "Result" = await session.execute( - select(Season).where(field == val).options(selectinload(Season.episodes)) - ) - try: - return cursor.scalars().one() - except NoResultFound as err: - raise SeasonDoesNotExist() from err - - -class SeasonDoesNotExist(ModelDoesNotExist): - """Season does not exist.""" - - -class EpisodeCharacterAssociation(Base): - __tablename__ = "episode_character_association" - - id = None # noqa: A003 - created_at = None - uuid = None - episode_id: Mapped[int] = mapped_column( - ForeignKey("episodes.id"), - primary_key=True, - ) - character_id: Mapped[int] = mapped_column( - ForeignKey("characters.id"), - primary_key=True, - ) - - -class Episode(Base): - __tablename__ = "episodes" - - name = Column( - VARCHAR( - length=128, - ), - nullable=True, - ) - air_date = Column( - Date(), - nullable=True, - ) - duration = Column( - Integer, - nullable=True, - ) - production_code = Column( - VARCHAR( - length=8, - ), - nullable=True, - ) - broadcast_number = Column( - SmallInteger, - nullable=True, - ) - - # Mappers - season_id: Mapped[int] = mapped_column( - ForeignKey("seasons.id"), - ) - season: Mapped["Season"] = relationship( - back_populates="episodes", - ) - - characters: Mapped[list["Character"]] = relationship( - secondary="episode_character_association", - back_populates="episodes", - ) - - @classmethod - async def get( - cls, - session: AsyncSession, - val: int | str | UUID, - /, - *, - field: InstrumentedAttribute = None, - ) -> "Episode": - if field is None: - field = Episode.id - cursor: "Result" = await session.execute( - select(Episode).where(field == val).options(selectinload(Episode.season)) - ) - try: - return cursor.scalars().one() - except NoResultFound as err: - raise EpisodeDoesNotExist() from err - - -class EpisodeDoesNotExist(ModelDoesNotExist): - """Episode does not exist.""" - - -class CharacterOrderBy(Enum): - ID = "id" - NAME = "name" - CREATED_AT = "createdAt" - - -class Character(Base): - __tablename__ = "characters" - order_by = CharacterOrderBy - - name = Column( - VARCHAR( - length=128, - ), - nullable=False, - ) - status = Column( - ENUM( - CharacterStatus, - ), - nullable=False, - ) - gender = Column( - ENUM( - CharacterGender, - ), - nullable=False, - ) - species = Column( - ENUM( - CharacterSpecies, - ), - nullable=False, - ) - image = Column( - ImageType( - storage=FileSystemStorage(path=settings.project_root / settings.static), - ), - ) - - # Mappers - episodes: Mapped[list["Episode"]] = relationship( - secondary="episode_character_association", - back_populates="characters", - ) - - @classmethod - async def get( - cls, - session: AsyncSession, - val: int | str | UUID, - /, - *, - field: InstrumentedAttribute = None, - ) -> "Character": - if field is None: - field = Character.id - cursor: "Result" = await session.execute(select(Character).where(field == val)) - try: - return cursor.scalars().one() - except NoResultFound as err: - raise CharacterDoesNotExist() from err - - @classmethod - def get_cond_list(cls, **kwargs) -> list[BinaryExpression]: - gender: CharacterGenderFilter | None = kwargs.get("gender") # type: ignore[valid-type] - character_status: CharacterStatusFilter | None = kwargs.get("character_status") # type: ignore[valid-type] - species: CharacterSpeciesFilter | None = kwargs.get("species") # type: ignore[valid-type] - query: str | None = kwargs.get("query") - cond_list = [] - if gender is not None: - cond_list.append( - cls.filter_obj_to_cond( - gender, - CharacterGender, - Character.gender, - ) - ) - if character_status is not None: - cond_list.append( - cls.filter_obj_to_cond( - character_status, - CharacterStatus, - Character.status, - ) - ) - if species is not None: - cond_list.append( - cls.filter_obj_to_cond( - species, - CharacterSpecies, - Character.species, - ) - ) - if query is not None: - cond_list.append(Character.name.ilike(f"%{query.lower()}%")) - return cond_list - - -class CharacterDoesNotExist(ModelDoesNotExist): - """Character does not exist.""" - - -class UserAlreadyExists(ModelAlreadyExist): - """User does not exist.""" - - -class User(Base): - __tablename__ = "users" - model_already_exists = UserAlreadyExists - - name = Column( - VARCHAR( - length=64, - ), - nullable=False, - ) - surname = Column( - VARCHAR( - length=64, - ), - nullable=False, - ) - middle_name = Column( - VARCHAR( - length=64, - ), - nullable=True, - ) - email = Column( - VARCHAR( - length=320, - ), - nullable=False, - unique=True, - ) - username = Column( - VARCHAR( - length=64, - ), - nullable=False, - unique=True, - ) - password = Column( - VARCHAR( - length=128, - ), - nullable=False, - ) - is_confirmed = Column( - Boolean, - default=False, - ) - is_subscribed = Column( - Boolean, - default=True, - ) - - @classmethod - async def get( - cls, - session: AsyncSession, - val: int | str | UUID, - /, - *, - field: InstrumentedAttribute = None, - ) -> "User": - if field is None: - field = User.id - cursor: "Result" = await session.execute(select(User).where(field == val)) - try: - return cursor.scalars().one() - except NoResultFound as err: - raise UserDoesNotExist() from err - - -class UserDoesNotExist(ModelDoesNotExist): - """User does not exist.""" diff --git a/app/repositories/sessions.py b/app/repositories/sessions.py deleted file mode 100644 index 5178fdb..0000000 --- a/app/repositories/sessions.py +++ /dev/null @@ -1,39 +0,0 @@ -from contextlib import asynccontextmanager - -from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine -from sqlalchemy.ext.asyncio.session import AsyncSession - -from app.core import settings - -async_engine = create_async_engine( - str(settings.database_url), - echo=True, - future=True, -) - - -def build_async_session() -> async_sessionmaker: - return async_sessionmaker( - bind=async_engine, - class_=AsyncSession, - expire_on_commit=False, - ) - - -async def get_async_session() -> AsyncSession: - async_session: async_sessionmaker = build_async_session() - async with async_session() as session: - try: - yield session - finally: - await session.close() - - -@asynccontextmanager -async def get_async_session_ctx() -> AsyncSession: - async_session = build_async_session() - async with async_session() as session: - try: - yield session - finally: - await session.close() diff --git a/app/repositories/tests/test_models.py b/app/repositories/tests/test_models.py deleted file mode 100644 index db67e10..0000000 --- a/app/repositories/tests/test_models.py +++ /dev/null @@ -1,12 +0,0 @@ -from app.repositories.models import to_camel - - -class TestModelUtils: - def test_to_camel_should_return_snake_case_when_lower_case_text_has_one_underline(self): - assert to_camel("snake_case") == "snakeCase" - - def test_to_camel_should_return_snake_case_when_lower_case_text_has_two_underlines(self): - assert to_camel("snake_case_snake") == "snakeCaseSnake" - - def test_to_camel_should_return_snake_case_when_upper_case_text_has_one_underline(self): - assert to_camel("SNAKE_CASE") == "snakeCase" diff --git a/app/routers/root.py b/app/routers/root.py deleted file mode 100644 index bf43aff..0000000 --- a/app/routers/root.py +++ /dev/null @@ -1,63 +0,0 @@ -from fastapi import APIRouter, Depends, Request, status -from fastapi.openapi.docs import get_redoc_html -from fastapi.responses import FileResponse, Response -from sqlalchemy.ext.asyncio.session import AsyncSession - -from app.repositories.sessions import get_async_session -from app.services.root import process_about, process_get_root -from app.templates import gnu_translations - -router = APIRouter() - - -@router.get( - "/health", - tags=["health_check"], - include_in_schema=False, - status_code=status.HTTP_200_OK, -) -async def get_health() -> Response: - return Response(status_code=status.HTTP_200_OK) - - -@router.get( - "/", - include_in_schema=False, - status_code=status.HTTP_200_OK, - name="root", -) -async def get_root( - request: Request, - session: AsyncSession = Depends(get_async_session), # noqa: B008 -) -> Response: - return await process_get_root(request, session) - - -@router.get( - "/favicon.ico", - include_in_schema=False, -) -async def favicon() -> FileResponse: - return FileResponse("favicon.ico") - - -@router.get( - "/docs", - include_in_schema=False, - name="redoc_html", -) -async def get_redoc(): - return get_redoc_html( - openapi_url="/openapi.json", - title=f'{gnu_translations.gettext("FB00003")} | {gnu_translations.gettext("FB00001")}', - redoc_favicon_url="/favicon.ico", - ) - - -@router.get( - "/about", - include_in_schema=False, - name="about", -) -async def about(request: Request): - return await process_about(request) diff --git a/app/routers/users.py b/app/routers/users.py deleted file mode 100644 index 6069843..0000000 --- a/app/routers/users.py +++ /dev/null @@ -1,94 +0,0 @@ -from typing import Annotated - -from fastapi import APIRouter, Depends, status -from sqlalchemy.ext.asyncio.session import AsyncSession - -from app.repositories.sessions import get_async_session -from app.services.auth import oauth2_scheme -from app.services.security import AccessTokenData -from app.services.users import ( - User, - UserAdd, - UserUpdate, - process_activate, - process_add_user, - process_get_me, - process_update, -) - -router = APIRouter(prefix="/users") - - -@router.post( - "", - status_code=status.HTTP_201_CREATED, - response_model=User, - name="user", -) -async def add_user( - body: UserAdd, - session: AsyncSession = Depends(get_async_session), # noqa: B008 -) -> User: - """Create User. - - The user add endpoint is an API function allowing the creation of new user accounts. - It receives user details via HTTP requests, validates the information, - and stores it in the system's database. - This endpoint is essential for user registration and onboarding. - - Please note that currently endpoint is not protected. - However, if there are a lot of spam requests, the endpoint will be blocked or limited. - """ - return await process_add_user(body, session) - - -@router.get( - "/me", - response_model=User, - name="user_me", -) -async def get_me( - token: Annotated[AccessTokenData, Depends(oauth2_scheme)], - session: AsyncSession = Depends(get_async_session), # noqa: B008 -) -> User: - """Get user details. - - Retrieve authenticated user profile information, including username, email, and account details. - Personalize user experiences within the application using the JSON response containing user-specific data. - """ - return await process_get_me(token, session) - - -@router.get( - "/activate", - response_model=User, - name="activate_user", -) -async def activate( - sig: str, - session: AsyncSession = Depends(get_async_session), # noqa: B008 -) -> User: - """Get user details. - - Retrieve authenticated user profile information, including username, email, and account details. - Personalize user experiences within the application using the JSON response containing user-specific data. - """ - return await process_activate(sig, session) - - -@router.put( - "/", - response_model=User, - name="update_user", -) -async def update( - user: UserUpdate, - token: Annotated[AccessTokenData, Depends(oauth2_scheme)], - session: AsyncSession = Depends(get_async_session), # noqa: B008 -) -> User: - """Update user details. - - This endpoint is crucial for users to manage and maintain accurate profile information, - often including authentication and authorization checks for security. - """ - return await process_update(token, user, session) diff --git a/app/services/auth.py b/app/services/auth.py deleted file mode 100644 index 701c721..0000000 --- a/app/services/auth.py +++ /dev/null @@ -1,4 +0,0 @@ -from app.services.security import OAuth2JWTBearer, OAuth2JWTBearerRefresh - -oauth2_scheme = OAuth2JWTBearer(tokenUrl="token") -oauth2_refresh_scheme = OAuth2JWTBearerRefresh(tokenUrl="token") diff --git a/app/services/base.py b/app/services/base.py deleted file mode 100644 index 1bc8435..0000000 --- a/app/services/base.py +++ /dev/null @@ -1,13 +0,0 @@ -from pydantic import BaseModel, Field - - -class EpisodeBase(BaseModel): - id: int # noqa: A003 - name: str - broadcast_number: int = Field(alias="number") - production_code: str = Field( - alias="productionCode", - examples=[ - "1ACV01", - ], - ) diff --git a/app/services/callbacks.py b/app/services/callbacks.py deleted file mode 100644 index f93d7a6..0000000 --- a/app/services/callbacks.py +++ /dev/null @@ -1,248 +0,0 @@ -import json -from asyncio import sleep -from random import randint - -from fastapi import BackgroundTasks -from httpx import AsyncClient, Response -from pydantic import BaseModel, ConfigDict, Field, HttpUrl -from sqlalchemy.ext.asyncio.session import AsyncSession - -from app.repositories.models import ( - Character as CharacterModel, -) -from app.repositories.models import ( - CharacterDoesNotExist as CharacterDoesNotExistException, -) -from app.repositories.models import ( - Episode as EpisodeModel, -) -from app.repositories.models import ( - EpisodeDoesNotExist as EpisodeDoesNotExistException, -) -from app.repositories.models import ( - Season as SeasonModel, -) -from app.repositories.models import ( - SeasonDoesNotExist as SeasonDoesNotExistException, -) -from app.services.characters import Character -from app.services.episodes import Episode -from app.services.seasons import Season - -MIN_DELAY: int = 5 -MAX_DELAY: int = 10 - - -class CallbackRequest(BaseModel): - callback_url: HttpUrl = Field( - alias="callbackUrl", - ) - - model_config = ConfigDict(populate_by_name=True) - - -class CallbackResponse(BaseModel): - delay: int = Field( - ge=MIN_DELAY, - le=MAX_DELAY, - description="Delay after which the callback will be sent.", - ) - - -class _ObjectDoesNotExist(BaseModel): - id: int = Field( # noqa: A003 - description="Requested object ID.", - ) - detail: str = Field( - examples=[ - "Not found", - ], - ) - - -class CharacterDoesNotExist(_ObjectDoesNotExist): - """Character does not exist response.""" - - -class _ObjectType(BaseModel): - type: str = Field( # noqa: A003 - description="Requested Object type.", - ) - - -class CharacterCallbackResponse(_ObjectType): - item: Character | CharacterDoesNotExist - - -async def _get_character_or_not_found_object( - id_: int, - session: AsyncSession, - /, -) -> Character | CharacterDoesNotExist: - character: Character | CharacterDoesNotExist - try: - character = await CharacterModel.get(session, id_) - except CharacterDoesNotExistException: - character = CharacterDoesNotExist( - id=id_, - detail="Not found", - ) - return character - - -async def _send_callback(url: HttpUrl, body: BaseModel, /): - async with AsyncClient(http2=True) as client: - callback_response: Response = await client.post( - f"{url}", - json=json.loads(body.model_dump_json(by_alias=True)), - ) - callback_response.raise_for_status() - - -async def callback_characters_background_task( - character_id: int, - callback_request: CallbackRequest, - response: CallbackResponse, - session: AsyncSession, - /, -): - await sleep(response.delay) - character: Character | CharacterDoesNotExist = await _get_character_or_not_found_object(character_id, session) - body = CharacterCallbackResponse( - type=Character.__name__, - item=character, - ) - await _send_callback(callback_request.callback_url, body) - - -async def process_characters_callback( - character_id, - callback_request: CallbackRequest, - session: AsyncSession, - background_tasks: BackgroundTasks, - /, -) -> CallbackResponse: - response: CallbackResponse = CallbackResponse(delay=randint(MIN_DELAY, MAX_DELAY)) # noqa: S311 - background_tasks.add_task( - callback_characters_background_task, - character_id, - callback_request, - response, - session, - ) - return response - - -class EpisodeDoesNotExist(_ObjectDoesNotExist): - """Episode does not exist response.""" - - -async def _get_episode_or_not_found_object( - id_: int, - session: AsyncSession, - /, -) -> Episode | EpisodeDoesNotExist: - episode: Episode | EpisodeDoesNotExist - try: - episode = await EpisodeModel.get(session, id_) - except EpisodeDoesNotExistException: - episode = EpisodeDoesNotExist( - id=id_, - detail="Not found", - ) - return episode - - -class EpisodeCallbackResponse(_ObjectType): - item: Episode | EpisodeDoesNotExist - - -async def callback_episodes_background_task( - episode_id: int, - callback_request: CallbackRequest, - response: CallbackResponse, - session: AsyncSession, - /, -): - await sleep(response.delay) - episode: Episode | EpisodeDoesNotExist = await _get_episode_or_not_found_object(episode_id, session) - body = EpisodeCallbackResponse( - type=Episode.__name__, - item=episode, - ) - await _send_callback(callback_request.callback_url, body) - - -async def process_episodes_callback( - episode_id, - episode_request, - session, - background_tasks, -) -> CallbackResponse: - response: CallbackResponse = CallbackResponse(delay=randint(MIN_DELAY, MAX_DELAY)) # noqa: S311 - background_tasks.add_task( - callback_episodes_background_task, - episode_id, - episode_request, - response, - session, - ) - return response - - -# Season related part. -class SeasonDoesNotExist(_ObjectDoesNotExist): - """Season does not exist response.""" - - -async def _get_season_or_not_found_object( - id_: int, - session: AsyncSession, - /, -) -> Season | SeasonDoesNotExist: - season: Season | SeasonDoesNotExist - try: - season = await SeasonModel.get(session, id_) - except SeasonDoesNotExistException: - season = SeasonDoesNotExist( - id=id_, - detail="Not found", - ) - return season - - -class SeasonCallbackResponse(_ObjectType): - item: Season | SeasonDoesNotExist - - -async def callback_seasons_background_task( - season_id: int, - callback_request: CallbackRequest, - response: CallbackResponse, - session: AsyncSession, - /, -): - await sleep(response.delay) - season: Season | SeasonDoesNotExist = await _get_season_or_not_found_object(season_id, session) - body = SeasonCallbackResponse( - type=Season.__name__, - item=season, - ) - await _send_callback(callback_request.callback_url, body) - - -async def process_seasons_callback( - season_id, - season_request, - session, - background_tasks, -) -> CallbackResponse: - response: CallbackResponse = CallbackResponse(delay=randint(MIN_DELAY, MAX_DELAY)) # noqa: S311 - background_tasks.add_task( - callback_seasons_background_task, - season_id, - season_request, - response, - session, - ) - return response diff --git a/app/services/characters.py b/app/services/characters.py deleted file mode 100644 index 1f109ec..0000000 --- a/app/services/characters.py +++ /dev/null @@ -1,109 +0,0 @@ -from datetime import datetime -from typing import Any - -from fastapi import HTTPException, Request, status -from fastapi_pagination import Page -from fastapi_pagination.ext.sqlalchemy import paginate -from fastapi_storages import StorageImage -from pydantic import BaseModel, ConfigDict, Field, HttpUrl, field_validator -from sqlalchemy.ext.asyncio.session import AsyncSession - -from app.core import settings -from app.repositories.base import OrderByDirection -from app.repositories.models import Character as CharacterModel -from app.repositories.models import ( - CharacterDoesNotExist, - CharacterGender, - CharacterGenderFilter, - CharacterSpecies, - CharacterSpeciesFilter, - CharacterStatus, - CharacterStatusFilter, -) - - -def build_url(*, path: str | None = None): - path = f"{settings.static}/{path}" if path else f"{settings.static}" - return HttpUrl.build( - scheme="https", - host=settings.trusted_host, - path=path, - ) - - -class Character(BaseModel): - id: int # noqa: A003 - name: str - gender: CharacterGender - status: CharacterStatus - species: CharacterSpecies - created_at: datetime = Field(alias="createdAt") - image: HttpUrl | None = None - - @field_validator("image", mode="before") - @classmethod - def make_url(cls, value: StorageImage | None) -> HttpUrl | None: - """Makes URL from DB path. - - FastAPI does NOT work properly with proxy, so for now protocol will be hardcoded. - TODO: propagate forwarded headers, rely on trusted host. - - Args: - value (fastapi_storages.StorageImage): Image field. - - Returns: - ``pydantic.HttpUrl`` if Character has an image returns absolute URL to image and ``None`` otherwise. - """ - if value is None: - return None - return build_url(path=value._name) - - model_config = ConfigDict(from_attributes=True, populate_by_name=True) - - def __init__(self, request: Request | None = None, **data: Any): - self.request = request - super().__init__(**data) - - -async def get_character( - character_id: int, - session: AsyncSession, - /, -) -> Character: - try: - character: CharacterModel = await CharacterModel.get(session, character_id) - except CharacterDoesNotExist: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) from None - return Character.model_validate(character) - - -async def process_get_character( - character_id: int, - session: AsyncSession, - /, -) -> Character: - return await get_character(character_id, session) - - -async def process_get_characters( # noqa: PLR0913 - session: AsyncSession, - /, - *, - gender: CharacterGenderFilter | None = None, - character_status: CharacterStatusFilter | None = None, - species: CharacterSpeciesFilter | None = None, - order_by: CharacterModel.order_by | None = None, - direction: OrderByDirection | None = None, - query: str | None = None, -) -> Page[Character]: - return await paginate( - session, - CharacterModel.get_filter_statement( - order_by=order_by, - order_by_direction=direction, - gender=gender, - character_statusx=character_status, - species=species, - query=query, - ), - ) diff --git a/app/services/emails.py b/app/services/emails.py deleted file mode 100644 index 2617022..0000000 --- a/app/services/emails.py +++ /dev/null @@ -1,49 +0,0 @@ -from fastapi_mail import ConnectionConfig, FastMail, MessageSchema, MessageType -from pydantic import BaseModel, EmailStr, HttpUrl - -from app.core import settings - -conf = ConnectionConfig( - MAIL_USERNAME=settings.email.host_user, - MAIL_PASSWORD=settings.email.api_key, - MAIL_FROM=settings.email.default_from, - MAIL_PORT=settings.email.port, - MAIL_SERVER=settings.email.host, - MAIL_FROM_NAME=settings.email.from_name, - MAIL_STARTTLS=settings.email.start_tls, - MAIL_SSL_TLS=settings.email.ssl_tls, - USE_CREDENTIALS=settings.email.use_credentials, - VALIDATE_CERTS=settings.email.validate_certs, - TEMPLATE_FOLDER=settings.project_root / "templates", -) - - -fast_mail = FastMail(conf) - - -class _User(BaseModel): - name: str - surname: str - - -class ConfirmationBody(BaseModel): - url: HttpUrl - user: _User - - -async def send_confirmation( - emails: list[EmailStr], - subject: str, - template_body: ConfirmationBody, - /, -): - message = MessageSchema( - subject=subject, - recipients=emails, - template_body=template_body.model_dump(), - subtype=MessageType.html, - ) - await fast_mail.send_message( - message, - template_name="emails/confirmation.html", - ) diff --git a/app/services/episodes.py b/app/services/episodes.py deleted file mode 100644 index 292d0a0..0000000 --- a/app/services/episodes.py +++ /dev/null @@ -1,70 +0,0 @@ -from datetime import date, datetime - -from fastapi import HTTPException, status -from fastapi_pagination import Page -from fastapi_pagination.ext.sqlalchemy import paginate -from pydantic import BaseModel, ConfigDict, Field, computed_field -from sqlalchemy.ext.asyncio.session import AsyncSession - -from app.repositories.models import ( - Episode as EpisodeModel, -) -from app.repositories.models import ( - EpisodeDoesNotExist, -) -from app.services.base import EpisodeBase - - -class SeasonEpisode(BaseModel): - id: int # noqa: A003 - - model_config = ConfigDict(from_attributes=True, populate_by_name=True) - - -class Episode(EpisodeBase): - air_date: date | None = Field(alias="airDate") - duration: int | None - created_at: datetime = Field(alias="createdAt") - season: SeasonEpisode - - model_config = ConfigDict(from_attributes=True, populate_by_name=True) - - @computed_field( # type: ignore[misc] - alias="broadcastCode", - examples=[ - "S01E01", - ], - return_type=str, - ) - @property - def broadcast_code(self) -> str: - return f"S{self.season.id:02d}E{self.broadcast_number:02d}" - - -async def get_episode( - episode_id: int, - session: AsyncSession, - /, -) -> Episode: - try: - episode: EpisodeModel = await EpisodeModel.get(session, episode_id) - except EpisodeDoesNotExist: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) from None - return Episode.model_validate(episode) - - -async def process_get_episode( - episode_id: int, - session: AsyncSession, - /, -) -> Episode: - return await get_episode(episode_id, session) - - -async def process_get_episodes(session: AsyncSession, /) -> Page[Episode]: - return await paginate( - session, - EpisodeModel.get_filter_statement( - select_in_load=EpisodeModel.season, - ), - ) diff --git a/app/services/notifications.py b/app/services/notifications.py deleted file mode 100644 index 293f1e1..0000000 --- a/app/services/notifications.py +++ /dev/null @@ -1,60 +0,0 @@ -from asyncio import sleep -from datetime import datetime -from random import randint - -from fastapi import Request -from pydantic import BaseModel, Field -from sqlalchemy.ext.asyncio.session import AsyncSession -from sse_starlette import EventSourceResponse, ServerSentEvent - -from app.services.characters import Character, get_character -from app.templates import gnu_translations - -MIN_COORDINATE = 0 -MAX_COORDINATE = 2**6 - - -class CharacterMove(BaseModel): - name: str = Field(gnu_translations.gettext("FB00007")) - x: int = Field( - description=gnu_translations.gettext("FB00008"), - ge=MIN_COORDINATE, - le=MAX_COORDINATE, - ) - y: int = Field( - description=gnu_translations.gettext("FB00009"), - ge=MIN_COORDINATE, - le=MAX_COORDINATE, - ) - time: datetime = datetime.now() - - -async def generate_character_move( - request: Request, - character: Character, - /, -): - # I don't like infinite loops, please check if range can be used. - while True: - if await request.is_disconnected(): - # Can be removed. Do not trust lib, force connection close. - break - - yield ServerSentEvent( - data=CharacterMove( - name=character.name, - x=randint(MIN_COORDINATE, MAX_COORDINATE), # noqa: S311 - y=randint(MIN_COORDINATE, MAX_COORDINATE), # noqa: S311 - ).model_dump() - ) - await sleep(randint(1, 3)) # noqa: S311 - - -async def process_character_sse( - character_id: int, - request: Request, - session: AsyncSession, - /, -) -> EventSourceResponse: - character: Character = await get_character(character_id, session) - return EventSourceResponse(generate_character_move(request, character)) diff --git a/app/services/root.py b/app/services/root.py deleted file mode 100644 index f54a1f7..0000000 --- a/app/services/root.py +++ /dev/null @@ -1,34 +0,0 @@ -from fastapi import Request -from fastapi.responses import Response -from sqlalchemy.ext.asyncio.session import AsyncSession - -from app.repositories.models import Character as CharacterModel -from app.repositories.models import User as UserModel -from app.templates import templates - - -async def process_get_root( - request: Request, - session: AsyncSession, - /, -) -> Response: - characters = await CharacterModel.filter(session, limit=6) - user_count = await UserModel.count(session) - return templates.TemplateResponse( - request, - "index.html", - { - "characters": characters, - "user_count": user_count, - }, - ) - - -async def process_about( - request: Request, - /, -) -> Response: - return templates.TemplateResponse( - request, - "about.html", - ) diff --git a/app/services/seasons.py b/app/services/seasons.py deleted file mode 100644 index d416225..0000000 --- a/app/services/seasons.py +++ /dev/null @@ -1,45 +0,0 @@ -from fastapi import HTTPException, status -from fastapi_pagination import Page -from fastapi_pagination.ext.sqlalchemy import paginate -from pydantic import BaseModel, ConfigDict -from sqlalchemy.ext.asyncio.session import AsyncSession - -from app.repositories.models import ( - Season as SeasonModel, -) -from app.repositories.models import ( - SeasonDoesNotExist, -) -from app.services.base import EpisodeBase - - -class EpisodeSeason(EpisodeBase): - model_config = ConfigDict(from_attributes=True, populate_by_name=True) - - -class Season(BaseModel): - id: int # noqa: A003 - episodes: list[EpisodeSeason] - - model_config = ConfigDict(from_attributes=True, populate_by_name=True) - - -async def process_get_season( - season_id: int, - session: AsyncSession, - /, -) -> Season: - try: - season: SeasonModel = await SeasonModel.get(session, season_id) - except SeasonDoesNotExist: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) from None - return Season.model_validate(season) - - -async def process_get_seasons(session: AsyncSession, /) -> Page[Season]: - return await paginate( - session, - SeasonModel.get_filter_statement( - select_in_load=SeasonModel.episodes, - ), - ) diff --git a/app/services/security.py b/app/services/security.py deleted file mode 100644 index 7d0502a..0000000 --- a/app/services/security.py +++ /dev/null @@ -1,162 +0,0 @@ -from copy import deepcopy -from datetime import UTC, datetime, timedelta -from enum import Enum -from typing import Annotated -from uuid import UUID - -from fastapi import HTTPException, Request, status -from fastapi.param_functions import Body -from fastapi.security import OAuth2PasswordBearer -from jose import exceptions, jwt -from pydantic import BaseModel, Field, ValidationError -from typing_extensions import Doc - -from app.core import settings - -DEFAULT_JWT_EXPIRATION_TIME: int = 15 * 60 -REFRESH_JWT_EXPIRATION_TIME: int = 60 * 60 * 24 * 21 - - -class TokenType(Enum): - REFRESH = "REFRESH" - ACCESS = "ACCESS" - - -class TokenBase(BaseModel): - type: TokenType # noqa: A003 - - -class AccessTokenData(TokenBase): - uuid: UUID - type: TokenType = TokenType.ACCESS # noqa: A003 - - -class RefreshTokenData(TokenBase): - nonce: str = Field(min_length=32, max_length=32) - uuid: UUID - type: TokenType = TokenType.REFRESH # noqa: A003 - - -def generate_jwt_signature( - payload: dict, - /, - *, - expiration_time: int = DEFAULT_JWT_EXPIRATION_TIME, - algorithm: str = "HS256", -) -> str: - cleaned_payload: dict = deepcopy(payload) - - cleaned_payload.update( - { - "exp": datetime.now(UTC) + timedelta(seconds=expiration_time), - } - ) - - return jwt.encode(cleaned_payload, settings.secret_key, algorithm=algorithm) - - -class SignatureErrorBase(Exception): - """Base JWT Error""" - - -class FatalSignatureError(SignatureErrorBase): - """Fatal Signature Error""" - - -class SignatureExpiredError(SignatureErrorBase): - """Signature Expired Error""" - - -def decode_jwt_signature( - token: str, - /, - *, - algorithms: list[str] | None = None, -) -> dict: - if algorithms is None: - algorithms = ["HS256"] - - try: - return jwt.decode(token, settings.secret_key, algorithms=algorithms) - except (exceptions.JWSError, exceptions.JWSSignatureError, exceptions.JWTError): - raise FatalSignatureError() from None - except exceptions.ExpiredSignatureError: - raise SignatureExpiredError() from None - - -class UnauthorizedResponse(BaseModel): - detail: str - - -class OAuth2PasswordRequestJson: - def __init__( - self, - *, - username: Annotated[ - str, - Body(), - Doc( - """ - `username` string. The OAuth2 spec requires the exact field name - `username`. - """ - ), - ], - password: Annotated[ - str, - Body(), - Doc( - """ - `password` string. The OAuth2 spec requires the exact field name - `password". - """ - ), - ], - ): - self.username = username - self.password = password - - -class OAuth2JWTBearerBase(OAuth2PasswordBearer): - _model: type[BaseModel] | None = None - - def extra_checks(self, model): - raise NotImplementedError() from None - - async def __call__(self, request: Request) -> str | BaseModel | None: - if self._model is None: - raise ValueError("model is not defined") - param = await super().__call__(request) - try: - decoded_token: dict = decode_jwt_signature(param) - except SignatureExpiredError: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Token expired", - ) from None - except FatalSignatureError: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - ) from None - try: - model = self._model(**decoded_token) - except ValidationError: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) from None - self.extra_checks(model) - return model - - -class OAuth2JWTBearer(OAuth2JWTBearerBase): - _model: type[BaseModel] | None = AccessTokenData - - def extra_checks(self, model): - if model.type != TokenType.ACCESS: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) from None - - -class OAuth2JWTBearerRefresh(OAuth2JWTBearerBase): - _model: type[BaseModel] | None = RefreshTokenData - - def extra_checks(self, model): - if model.type != TokenType.REFRESH: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) from None diff --git a/app/services/tokens.py b/app/services/tokens.py deleted file mode 100644 index ff64e01..0000000 --- a/app/services/tokens.py +++ /dev/null @@ -1,75 +0,0 @@ -from json import loads -from uuid import uuid4 - -from fastapi import HTTPException, status -from pydantic import BaseModel, ConfigDict, Field -from sqlalchemy.ext.asyncio.session import AsyncSession - -from app.repositories.models import User as UserModel -from app.repositories.models import UserDoesNotExist -from app.services.hashers import hasher -from app.services.security import ( - REFRESH_JWT_EXPIRATION_TIME, - AccessTokenData, - OAuth2PasswordRequestJson, - RefreshTokenData, - generate_jwt_signature, -) - - -class Token(BaseModel): - access_token: str = Field(alias="accessToken") - refresh_token: str = Field(alias="refreshToken") - - model_config = ConfigDict( - from_attributes=True, - populate_by_name=True, - ) - - -async def process_token_auth_user( - session: AsyncSession, - data: OAuth2PasswordRequestJson, - /, -) -> Token: - try: - user: UserModel = await UserModel.get( - session, - data.username, - field=UserModel.username, - ) - except UserDoesNotExist: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) from None - if not hasher.verify(data.password, user.password): - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) from None - - return Token( - access_token=generate_jwt_signature( - loads( - AccessTokenData(uuid=user.uuid).model_dump_json(by_alias=True), - ), - ), - refresh_token=generate_jwt_signature( - loads( - RefreshTokenData( - uuid=user.uuid, - nonce=uuid4().hex, - ).model_dump_json(by_alias=True) - ), - expiration_time=REFRESH_JWT_EXPIRATION_TIME, - ), - ) - - -class RefreshToken(BaseModel): - access_token: str - - -async def process_refresh_token_auth_user(data: RefreshTokenData) -> RefreshToken: - return RefreshToken( - access_token=generate_jwt_signature( - loads( - AccessTokenData(**data.model_dump()).model_dump_json(by_alias=True), - ), - ) - ) diff --git a/app/services/users.py b/app/services/users.py deleted file mode 100644 index d90c2d7..0000000 --- a/app/services/users.py +++ /dev/null @@ -1,227 +0,0 @@ -from datetime import datetime -from gettext import gettext as _ -from json import dumps, loads -from urllib.parse import urlencode -from uuid import UUID - -from fastapi import HTTPException, status -from pydantic import BaseModel, ConfigDict, EmailStr, Field, HttpUrl, field_validator -from sqlalchemy.ext.asyncio.session import AsyncSession - -from app.core import feature_flags, settings -from app.repositories.models import ( - User as UserModel, -) -from app.repositories.models import ( - UserAlreadyExists, - UserDoesNotExist, -) -from app.services.emails import ConfirmationBody, send_confirmation -from app.services.hashers import hasher -from app.services.security import ( - AccessTokenData, - FatalSignatureError, - SignatureExpiredError, - decode_jwt_signature, - generate_jwt_signature, -) - - -class UserBase(BaseModel): - name: str = Field( - min_length=1, - max_length=64, - ) - surname: str = Field( - min_length=1, - max_length=64, - ) - middle_name: str | None = Field( - default=None, - alias="middleName", - min_length=1, - max_length=64, - ) - email: EmailStr - username: str = Field( - min_length=5, - max_length=64, - ) - password: str = Field( - min_length=8, - max_length=128, - ) - is_subscribed: bool = Field( - default=True, - alias="isSubscribed", - ) - - model_config = ConfigDict( - from_attributes=True, - populate_by_name=True, - ) - - -class PasswordHashMixin: - @field_validator("password", mode="before") - @classmethod - def hash_password(cls, value: str) -> str: - return hasher.encode(value) - - -class UserAdd(UserBase, PasswordHashMixin): - ... - - -class User(UserBase): - id: int # noqa: A003 - is_confirmed: bool = Field(alias="isConfirmed") - created_at: datetime = Field(alias="createdAt") - - -EXPIRATION_72_HOURS = 60 * 60 * 72 - - -def _get_signature(uuid: UUID): - return generate_jwt_signature( - loads( - dumps( - { - "uuid": uuid, - }, - default=str, - ) - ), - expiration_time=EXPIRATION_72_HOURS, - ) - - -def get_confirmation_body(user: UserModel, /) -> ConfirmationBody: - url = HttpUrl.build( - scheme="https", - host=settings.trusted_host, - path="api/users/activate", - query=urlencode( - { - "sig": _get_signature(user.uuid), - } - ), - ) - return ConfirmationBody( - url=url, - user={ - "name": user.name, - "surname": user.surname, - }, - ) - - -async def process_add_user(body: UserAdd, session: AsyncSession, /) -> User: - try: - user: UserModel = await UserModel.add(session, body) - except UserAlreadyExists: - raise HTTPException( - detail="User with username or email already exists", - status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, - ) from None - if feature_flags.activate_users: - await send_confirmation( - [user.email], - _("FuturamaAPI - Account Activation"), - get_confirmation_body(user), - ) - return User.model_validate(user) - - -async def process_get_me(token: AccessTokenData, session: AsyncSession, /) -> User: - try: - user: UserModel = await UserModel.get(session, token.uuid, field=UserModel.uuid) - except UserDoesNotExist: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) from None - return User.model_validate(user) - - -def _get_uuid(signature: str, /) -> UUID: - try: - decoded_signature = decode_jwt_signature(signature) - except SignatureExpiredError: - raise HTTPException( - detail="Token has expired", - status_code=status.HTTP_401_UNAUTHORIZED, - ) from None - except FatalSignatureError: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) from None - - try: - uuid = decoded_signature["uuid"] - except KeyError: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) from None - - return UUID(uuid) - - -async def process_activate(signature: str, session: AsyncSession, /) -> User: - uuid = _get_uuid(signature) - try: - user: UserModel = await UserModel.get(session, uuid, field=UserModel.uuid) - except UserDoesNotExist: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) from None - if user.is_confirmed: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) from None - - user.is_confirmed = True - await session.commit() - - return User.model_validate(user) - - -class UserUpdate(BaseModel, PasswordHashMixin): - name: str | None = Field( - min_length=1, - max_length=64, - default=None, - ) - surname: str | None = Field( - min_length=1, - max_length=64, - default=None, - ) - middle_name: str | None = Field( - default=None, - alias="middleName", - min_length=1, - max_length=64, - ) - password: str | None = Field( - default=None, - min_length=8, - max_length=128, - ) - is_subscribed: bool | None = Field( - default=None, - alias="isSubscribed", - ) - - model_config = ConfigDict( - from_attributes=True, - populate_by_name=True, - ) - - -async def process_update( - token: AccessTokenData, - request_user: UserUpdate, - session: AsyncSession, - /, -) -> User: - request_user_dict: dict = request_user.model_dump(exclude_none=True) - if not request_user_dict: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) from None - try: - user: UserModel = await UserModel.get(session, token.uuid, field=UserModel.uuid) - except UserDoesNotExist: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) from None - for field, value in request_user_dict.items(): - setattr(user, field, value) - await session.commit() - return user diff --git a/app/templates.py b/app/templates.py deleted file mode 100644 index a5374d8..0000000 --- a/app/templates.py +++ /dev/null @@ -1,30 +0,0 @@ -import gettext -from typing import TYPE_CHECKING - -from fastapi.templating import Jinja2Templates -from jinja2 import pass_context - -if TYPE_CHECKING: - from fastapi import Request - from starlette.datastructures import URL - -try: - gnu_translations = gettext.translation( - domain="messages", - localedir="locale", - languages=["en_US"], - ) -except FileNotFoundError: - raise RuntimeError("Please compile messages first") from None - - -@pass_context -def relative_path_for(context: dict, name: str, /, **path_params) -> str: - request: "Request" = context["request"] - http_url: "URL" = request.url_for(name, **path_params) - return http_url.path - - -templates = Jinja2Templates(directory="templates", extensions=["jinja2.ext.i18n"]) -templates.env.globals["relative_path_for"] = relative_path_for -templates.env.install_gettext_translations(gnu_translations) diff --git a/babel.cfg b/babel.cfg deleted file mode 100644 index d75463f..0000000 --- a/babel.cfg +++ /dev/null @@ -1,8 +0,0 @@ -[ignore: **/static/**] -[ignore: **/theme/**] - -[python: **.py] - -[jinja2: **/templates/**.html] -encoding = utf-8 -silent = false diff --git a/configs/plugins/__init__.py b/configs/plugins/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/configs/plugins/env_vars.py b/configs/plugins/env_vars.py deleted file mode 100644 index f369e86..0000000 --- a/configs/plugins/env_vars.py +++ /dev/null @@ -1,16 +0,0 @@ -import os - -import pytest - - -@pytest.hookimpl(tryfirst=True) -def pytest_load_initial_conftests(args, early_config, parser): - with open(".env.template") as file: - data = file.read() - - for line in data.splitlines(): - if not line.strip() or line.startswith("#"): - continue - - key, value = line.split()[0].split("=", 1) - os.environ[key] = value diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh index e6619e4..016ac74 100755 --- a/docker-entrypoint.sh +++ b/docker-entrypoint.sh @@ -5,10 +5,7 @@ then PORT=8080 fi -# locale -make messages-compile - # Migrations -alembic upgrade head +make migrate -hypercorn -b :$PORT -k uvloop app.main:app +poetry run python -m futuramaapi -b :$PORT diff --git a/futuramaapi/__init__.py b/futuramaapi/__init__.py new file mode 100644 index 0000000..f955241 --- /dev/null +++ b/futuramaapi/__init__.py @@ -0,0 +1,7 @@ +from .apps import app +from .apps.hypercorn import hypercorn_config + +__all__ = [ + "app", + "hypercorn_config", +] diff --git a/futuramaapi/__main__.py b/futuramaapi/__main__.py new file mode 100644 index 0000000..23e27bf --- /dev/null +++ b/futuramaapi/__main__.py @@ -0,0 +1,11 @@ +import sys + +from .apps import run + + +def _run() -> int: + return run(sys.argv[1:]) + + +if __name__ == "__main__": + sys.exit(_run()) diff --git a/futuramaapi/apps/__init__.py b/futuramaapi/apps/__init__.py new file mode 100644 index 0000000..85aabcf --- /dev/null +++ b/futuramaapi/apps/__init__.py @@ -0,0 +1,11 @@ +from fastapi import FastAPI + +from .app import futurama_api +from .hypercorn import run + +app: FastAPI = futurama_api.app + +__all__ = [ + "app", + "run", +] diff --git a/futuramaapi/apps/app.py b/futuramaapi/apps/app.py new file mode 100644 index 0000000..d9995a9 --- /dev/null +++ b/futuramaapi/apps/app.py @@ -0,0 +1,86 @@ +import mimetypes +from collections.abc import Generator +from contextlib import asynccontextmanager +from typing import Any + +from fastapi import FastAPI +from fastapi.routing import APIRouter +from fastapi.staticfiles import StaticFiles +from fastapi_pagination import add_pagination + +from futuramaapi.core import feature_flags, settings +from futuramaapi.middlewares.cors import CORSMiddleware +from futuramaapi.middlewares.secure import HTTPSRedirectMiddleware +from futuramaapi.repositories.session import session_manager + +mimetypes.add_type("image/webp", ".webp") + + +class FuturamaAPI: + def __init__( + self, + routers: list[APIRouter], + *, + lifespan: Generator[Any, Any, None] | Any | None, + ) -> None: + self.routers: list[APIRouter] = routers + self.app: FastAPI = FastAPI( + docs_url=None, + redoc_url=None, + lifespan=lifespan, + ) + + self.build() + + def _add_middlewares(self) -> None: + if feature_flags.enable_https_redirect: + self.app.add_middleware(HTTPSRedirectMiddleware) + + self.app.add_middleware( + CORSMiddleware, + allow_origins=settings.allow_origins, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + + def _add_routers(self) -> None: + for router in self.routers: + self.app.include_router(router) + + def _add_static(self) -> None: + self.app.mount( + "/static", + StaticFiles(directory="static"), + name="static", + ) + + def build(self) -> None: + self._add_middlewares() + self._add_routers() + self._add_static() + + add_pagination(self.app) + + +@asynccontextmanager +async def _lifespan(_: FastAPI): + yield + if session_manager.engine is not None: + await session_manager.close() + + +def _get_routers() -> list[APIRouter]: + from futuramaapi.routers import api_router, graphql_router, root_router + + return [ + api_router, + graphql_router, + root_router, + ] + + +futurama_api: FuturamaAPI = FuturamaAPI( + _get_routers(), + lifespan=_lifespan, +) diff --git a/futuramaapi/apps/hypercorn.py b/futuramaapi/apps/hypercorn.py new file mode 100644 index 0000000..4b5960e --- /dev/null +++ b/futuramaapi/apps/hypercorn.py @@ -0,0 +1,28 @@ +import sys +from typing import TYPE_CHECKING + +from hypercorn.__main__ import main + +if TYPE_CHECKING: + from collections.abc import Sequence + + +class Config: + worker_class = "uvloop" + + +hypercorn_config: Config = Config() + + +def run( + args: list[str] | None, +) -> int: + argv: Sequence[str] = args if args is not None else sys.argv[1:] + main( + [ + "futuramaapi:app", + "--config=python:futuramaapi.hypercorn_config", + *argv, + ] + ) + return 0 diff --git a/futuramaapi/core/__init__.py b/futuramaapi/core/__init__.py new file mode 100644 index 0000000..40dd32c --- /dev/null +++ b/futuramaapi/core/__init__.py @@ -0,0 +1,7 @@ +from futuramaapi.core.configs import email_settings, feature_flags, settings + +__all__ = [ + "feature_flags", + "email_settings", + "settings", +] diff --git a/app/core/settings.py b/futuramaapi/core/configs.py similarity index 53% rename from app/core/settings.py rename to futuramaapi/core/configs.py index af20f56..70a5c59 100644 --- a/app/core/settings.py +++ b/futuramaapi/core/configs.py @@ -2,7 +2,8 @@ from typing import Any from urllib.parse import ParseResult, urlparse -from pydantic import EmailStr, Field, PostgresDsn +from fastapi_mail import ConnectionConfig, FastMail, MessageSchema, MessageType +from pydantic import BaseModel, EmailStr, Field, HttpUrl, PostgresDsn from pydantic.fields import FieldInfo from pydantic_settings import ( BaseSettings, @@ -11,6 +12,8 @@ SettingsConfigDict, ) +from futuramaapi.helpers.templates import TEMPLATES_PATH + class EmailSettings(BaseSettings): default_from: EmailStr = Field( @@ -30,6 +33,59 @@ class EmailSettings(BaseSettings): env_prefix="email_", ) + @property + def fast_mail(self) -> FastMail: + return FastMail(self.connection_config) + + @property + def connection_config(self) -> ConnectionConfig: + return ConnectionConfig( + MAIL_USERNAME=self.host_user, + MAIL_PASSWORD=self.api_key, + MAIL_FROM=self.default_from, + MAIL_PORT=self.port, + MAIL_SERVER=self.host, + MAIL_FROM_NAME=self.from_name, + MAIL_STARTTLS=self.start_tls, + MAIL_SSL_TLS=self.ssl_tls, + USE_CREDENTIALS=self.use_credentials, + VALIDATE_CERTS=self.validate_certs, + TEMPLATE_FOLDER=settings.project_root / TEMPLATES_PATH, + ) + + @staticmethod + def get_message_schema( + subject: str, + emails: list[EmailStr], + template_body: BaseModel, + ) -> MessageSchema: + return MessageSchema( + subject=subject, + recipients=emails, + template_body=template_body.model_dump(), + subtype=MessageType.html, + ) + + async def send( + self, + emails: list[EmailStr], + subject: str, + template_body: BaseModel, + template_name: str, + /, + ): + if feature_flags.send_emails is False: + return + + await self.fast_mail.send_message( + self.get_message_schema( + subject, + emails, + template_body, + ), + template_name=template_name, + ) + email_settings = EmailSettings() @@ -78,13 +134,22 @@ def settings_customise_sources( # noqa: PLR0913 ) -> tuple[PydanticBaseSettingsSource, ...]: return (_EnvSource(settings_cls),) + def build_url(self, *, path: str | None = None) -> HttpUrl: + path = f"{self.static}/{path}" if path else f"{self.static}" + return HttpUrl.build( + scheme="https", + host=self.trusted_host, + path=path, + ) + settings = Settings() class FeatureFlags(BaseSettings): - activate_users: bool = False enable_https_redirect: bool = False + send_emails: bool = True + activate_users: bool = False feature_flags = FeatureFlags() diff --git a/alembic/versions/__init__.py b/futuramaapi/helpers/__init__.py similarity index 100% rename from alembic/versions/__init__.py rename to futuramaapi/helpers/__init__.py diff --git a/app/services/hashers.py b/futuramaapi/helpers/hashers.py similarity index 58% rename from app/services/hashers.py rename to futuramaapi/helpers/hashers.py index 1b819d7..ce33ef0 100644 --- a/app/services/hashers.py +++ b/futuramaapi/helpers/hashers.py @@ -1,69 +1,72 @@ import base64 import hashlib +import logging import math import secrets +from abc import ABC, abstractmethod from collections.abc import Callable from pydantic import BaseModel -RANDOM_STRING_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" +logger = logging.getLogger(__name__) class HasherBaseException(Exception): """Hasher Base Exception.""" -def pbkdf2( - password: bytes, - salt: bytes, - iterations: int, - /, - *, - dk_len: int | None = 0, - digest: Callable | None = None, -): - if digest is None: - digest = hashlib.sha256 - dk_len = dk_len or None - return hashlib.pbkdf2_hmac(digest().name, password, salt, iterations, dk_len) - - -def compare( - val1: bytes, - val2: bytes, - /, -) -> bool: - return secrets.compare_digest(val1, val2) - - -def get_random_string( - length: int, - /, - *, - allowed_chars=RANDOM_STRING_CHARS, -): - return "".join(secrets.choice(allowed_chars) for i in range(length)) - - class DecodedPassword(BaseModel): algorithm: str - hash: str # noqa: A003 + hash: str iterations: int salt: str -class PasswordHasherBase: +class PasswordHasherBase(ABC): algorithm: str | None = None - library = None - salt_entropy = 128 - separator = "." + salt_entropy: int = 128 + separator: str = "." + random_string_chars: str = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" + + @staticmethod + def pbkdf2( + password: bytes, + salt: bytes, + iterations: int, + /, + *, + dk_len: int | None = 0, + digest: Callable | None = None, + ): + if digest is None: + digest = hashlib.sha256 + dk_len = dk_len or None + return hashlib.pbkdf2_hmac(digest().name, password, salt, iterations, dk_len) + + def get_random_string( + self, + length: int, + /, + *, + allowed_chars: str | None = None, + ): + chars = allowed_chars if allowed_chars is not None else self.random_string_chars + return "".join(secrets.choice(chars) for _ in range(length)) + + @staticmethod + def compare( + val1: bytes, + val2: bytes, + /, + ) -> bool: + return secrets.compare_digest(val1, val2) def salt(self) -> str: - count = math.ceil(self.salt_entropy / math.log2(len(RANDOM_STRING_CHARS))) - return get_random_string(count, allowed_chars=RANDOM_STRING_CHARS) + count = math.ceil(self.salt_entropy / math.log2(len(self.random_string_chars))) + return self.get_random_string(count, allowed_chars=self.random_string_chars) - def verify(self, password, encoded, /) -> bool: - raise NotImplementedError() + @abstractmethod + def verify(self, password, encoded, /) -> bool: ... def _check_encode_args(self, password: str, salt: str, /): if not password: @@ -71,6 +74,7 @@ def _check_encode_args(self, password: str, salt: str, /): if not salt or self.separator in salt: raise ValueError() from None + @abstractmethod def encode( self, password: str, @@ -78,11 +82,10 @@ def encode( *, salt: str | None = None, iterations: int | None = None, - ) -> str: - raise NotImplementedError() + ) -> str: ... - def decode(self, encoded, /) -> DecodedPassword: - raise NotImplementedError() + @abstractmethod + def decode(self, encoded, /) -> DecodedPassword: ... class HashMismatch(HasherBaseException): @@ -109,7 +112,7 @@ def encode( if iterations is None: iterations = self.iterations - hash_ = pbkdf2( + hash_ = self.pbkdf2( password.encode(), salt.encode(), iterations, @@ -121,6 +124,7 @@ def encode( def decode(self, encoded, /) -> DecodedPassword: algorithm, iterations, salt, hash_ = encoded.split(self.separator, 3) if algorithm != self.algorithm: + logger.exception("Algorithm mismatch") raise HashMismatch() from None return DecodedPassword( algorithm=algorithm, @@ -136,7 +140,7 @@ def verify(self, password: str, encoded: str, /) -> bool: salt=decoded_password.salt, iterations=decoded_password.iterations, ) - return compare(encoded.encode(), encoded_.encode()) + return self.compare(encoded.encode(), encoded_.encode()) hasher = PasswordHasherPBKDF2() diff --git a/futuramaapi/helpers/pydantic.py b/futuramaapi/helpers/pydantic.py new file mode 100644 index 0000000..ed55fdf --- /dev/null +++ b/futuramaapi/helpers/pydantic.py @@ -0,0 +1,48 @@ +import json +import uuid +from typing import Any, ClassVar + +import pydash +from pydantic import BaseModel as BaseModelOrig +from pydantic import ConfigDict, Field, SecretStr +from pydash import camel_case + +from futuramaapi.helpers.hashers import PasswordHasherBase, hasher + + +class BaseModel(BaseModelOrig): + hasher: ClassVar[PasswordHasherBase] = hasher + + model_config = ConfigDict( + from_attributes=True, + populate_by_name=True, + alias_generator=camel_case, + ) + + def to_dict(self, *, by_alias: bool = True, reveal_secrets: bool = False, exclude_unset=False) -> dict: + result: dict = json.loads(self.model_dump_json(by_alias=by_alias, exclude_unset=exclude_unset)) + if reveal_secrets is False: + return result + + secret_dict: dict = {} + name: str + for name in self.model_fields_set: + field: Any = getattr(self, name) + if isinstance(field, SecretStr): + secret_dict.update( + { + name: field.get_secret_value(), + } + ) + return pydash.merge(result, secret_dict) + + +class BaseTokenModel(BaseModel): + def refresh_nonce(self) -> None: + self.nonce = self._get_nonce() + + @staticmethod + def _get_nonce() -> str: + return uuid.uuid4().hex + + nonce: str = Field(default_factory=_get_nonce) diff --git a/futuramaapi/helpers/templates.py b/futuramaapi/helpers/templates.py new file mode 100644 index 0000000..44f469e --- /dev/null +++ b/futuramaapi/helpers/templates.py @@ -0,0 +1,27 @@ +from pathlib import Path +from typing import TYPE_CHECKING + +from fastapi.templating import Jinja2Templates +from jinja2 import pass_context + +if TYPE_CHECKING: + from fastapi import Request + from starlette.datastructures import URL + +TEMPLATES_PATH: Path = Path("templates") + + +@pass_context +def relative_path_for(context: dict, name: str, /, **path_params) -> str: + request: "Request" = context["request"] + http_url: "URL" = request.url_for(name, **path_params) + return http_url.path + + +def _build_templates(templates_dir: Path) -> Jinja2Templates: + _templates: Jinja2Templates = Jinja2Templates(directory=str(templates_dir)) + _templates.env.globals["relative_path_for"] = relative_path_for + return _templates + + +templates: Jinja2Templates = _build_templates(TEMPLATES_PATH) diff --git a/app/graph_ql/__init__.py b/futuramaapi/middlewares/__init__.py similarity index 100% rename from app/graph_ql/__init__.py rename to futuramaapi/middlewares/__init__.py diff --git a/futuramaapi/middlewares/cors.py b/futuramaapi/middlewares/cors.py new file mode 100644 index 0000000..c77fb32 --- /dev/null +++ b/futuramaapi/middlewares/cors.py @@ -0,0 +1,10 @@ +from fastapi.middleware.cors import CORSMiddleware as CORSMiddlewareBase + + +class CORSMiddleware(CORSMiddlewareBase): + def is_allowed_origin(self, origin: str) -> bool: + # Starlette restricts to have origin "*" with allow_credentials for ``fastapi.middleware.cors.CORSMiddleware``. + # But for FuturamaAPI it's fine if anyone can access API. + # Not a security issue at all. But if you have any suggestions you are free to create a task here: + # https://github.com/koldakov/futuramaapi/issues. + return True diff --git a/futuramaapi/middlewares/secure.py b/futuramaapi/middlewares/secure.py new file mode 100644 index 0000000..1d2eaca --- /dev/null +++ b/futuramaapi/middlewares/secure.py @@ -0,0 +1,64 @@ +import logging + +from starlette import status +from starlette.datastructures import URL +from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint +from starlette.requests import Request +from starlette.responses import RedirectResponse, Response +from starlette.types import Scope + +from futuramaapi.core import settings + +logger = logging.getLogger(__name__) + + +class HTTPSRedirectMiddleware(BaseHTTPMiddleware): + https_port: int = 443 + http_port: int = 80 + + def is_secure(self, headers: dict): + try: + host: str = headers["host"] + except KeyError: + logger.info("Host not found in headers") + return False + try: + proto: str = headers["x-forwarded-proto"] + except KeyError: + logger.info("x-forwarded-proto not found in headers") + return False + try: + port: str = headers["x-forwarded-port"] + except KeyError: + logger.info("x-forwarded-port not found in headers") + return False + + if host == settings.trusted_host and proto in ("https", "wss") and int(port) == self.https_port: + return True + return False + + def _fix_url(self, scope: Scope, /): + url = URL(scope=scope) + redirect_scheme = {"http": "https", "ws": "wss"}[url.scheme] + netloc = url.hostname if url.port in (self.http_port, self.https_port) else url.netloc + return url.replace(scheme=redirect_scheme, netloc=netloc) + + @staticmethod + def headers_to_dict(headers: list, /) -> dict: + return {h[0].decode(): h[1].decode() for h in headers} + + async def dispatch( + self, + request: Request, + call_next: RequestResponseEndpoint, + ) -> Response: + headers: dict = self.headers_to_dict(request.scope["headers"]) + if self.is_secure(headers): + return await call_next(request) + + url: URL = self._fix_url(request.scope) + return RedirectResponse( + url, + status_code=status.HTTP_301_MOVED_PERMANENTLY, + headers={h[0].decode(): h[1].decode() for h in request.scope["headers"]}, + ) diff --git a/app/graph_ql/tests/__init__.py b/futuramaapi/mixins/__init__.py similarity index 100% rename from app/graph_ql/tests/__init__.py rename to futuramaapi/mixins/__init__.py diff --git a/futuramaapi/mixins/pydantic.py b/futuramaapi/mixins/pydantic.py new file mode 100644 index 0000000..f76ed29 --- /dev/null +++ b/futuramaapi/mixins/pydantic.py @@ -0,0 +1,231 @@ +import json +import logging +from abc import ABC, abstractmethod +from copy import deepcopy +from datetime import UTC, datetime, timedelta +from typing import TYPE_CHECKING, Any, ClassVar, Literal, Self + +import jwt +from fastapi import Request +from fastapi_pagination import Page +from fastapi_pagination.ext.sqlalchemy import paginate +from jwt.exceptions import ExpiredSignatureError, InvalidSignatureError, InvalidTokenError +from pydantic.main import IncEx +from sqlalchemy.ext.asyncio.session import AsyncSession +from starlette.templating import _TemplateResponse + +from futuramaapi.core import settings +from futuramaapi.helpers.pydantic import BaseModel +from futuramaapi.helpers.templates import templates +from futuramaapi.repositories.base import Base, FilterStatementKwargs, ModelAlreadyExistsError, ModelDoesNotExistError +from futuramaapi.routers.exceptions import ModelExistsError, ModelNotFoundError + +if TYPE_CHECKING: + from sqlalchemy import Select + +logger = logging.getLogger(__name__) + + +class _PydanticSanityCheck[Model: BaseModel]: # type: ignore[valid-type] + _required_methods: ClassVar[tuple[str, ...]] = ( + "model_validate", + "model_dump_json", + "model_dump", + ) + + @classmethod + @abstractmethod + def model_validate( + cls: type[Model], # type: ignore[name-defined] + obj: Any, + *, + strict: bool | None = None, + from_attributes: bool | None = None, + context: dict[str, Any] | None = None, + ) -> Model: # type: ignore[name-defined] + ... + + @abstractmethod + def model_dump_json( # noqa: PLR0913 + self, + *, + indent: int | None = None, + include: IncEx = None, + exclude: IncEx = None, + context: dict[str, Any] | None = None, + by_alias: bool = False, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + round_trip: bool = False, + warnings: bool | Literal["none", "warn", "error"] = True, + serialize_as_any: bool = False, + ) -> str: ... + + @abstractmethod + def model_dump( # noqa: PLR0913 + self, + *, + mode: Literal["json", "python"] | str = "python", + include: IncEx = None, + exclude: IncEx = None, + context: dict[str, Any] | None = None, + by_alias: bool = False, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + round_trip: bool = False, + warnings: bool | Literal["none", "warn", "error"] = True, + serialize_as_any: bool = False, + ) -> dict[str, Any]: ... + + def __init_subclass__(cls): + """ + Sanity check. + """ + if not all(hasattr(cls, attr) for attr in cls._required_methods): + raise RuntimeError(f"Class {cls.__name__} should be inherited from ``pydantic.BaseModel``.") + + +class BaseModelDatabaseMixin[Model: BaseModel](ABC, _PydanticSanityCheck): # type: ignore[valid-type] + model: ClassVar[type[Base]] + + id: int + + @classmethod + async def count(cls, session: AsyncSession, /) -> int: + return await cls.model.count(session) + + @classmethod + async def get(cls, session: AsyncSession, id_: int, /) -> Self: + try: + obj: Base = await cls.model.get(session, id_) + except ModelDoesNotExistError as err: + logger.info( + "Model already exists", + extra={ + "id": id_, + "err": err, + }, + ) + raise ModelNotFoundError() from None + return cls.model_validate(obj) + + @classmethod + async def paginate( + cls, + session: AsyncSession, + /, + filter_params: FilterStatementKwargs | None = None, + ) -> Page[Model]: # type: ignore[name-defined] + if filter_params is None: + filter_params = FilterStatementKwargs( + offset=0, + limit=20, + ) + + statement: Select[tuple[Base]] = cls.model.get_filter_statement(filter_params) + return await paginate( + session, + statement, + ) + + @classmethod + async def create(cls, session: AsyncSession, data: BaseModel, /) -> Self: + try: + obj: Base = await cls.model.create(session, data) + except ModelAlreadyExistsError as err: + logger.info( + "Model already exists", + extra={ + "data": data.model_dump(), + "err": err, + }, + ) + raise ModelExistsError() from None + return cls.model_validate(obj) + + async def update(self, session: AsyncSession, data: BaseModel): + data_dict: dict[str, str] = data.to_dict(by_alias=False, reveal_secrets=True, exclude_unset=True) + obj: Base = await self.model.update(session, self.id, data_dict) + + updated: BaseModel = self.model_validate(obj) + for field in updated.model_fields_set: + val: Any = getattr(updated, field) + setattr(self, field, val) + + @classmethod + async def filter(cls, session: AsyncSession, kwargs: FilterStatementKwargs, /) -> list[Self]: + return [cls.model_validate(character) for character in await cls.model.filter(session, kwargs)] + + +class TokenBaseError(Exception): ... + + +class DecodedTokenError(TokenBaseError): ... + + +class BaseModelTokenMixin(ABC, _PydanticSanityCheck): + @staticmethod + def _get_payload(payload: dict, exp: datetime, /) -> dict: + cleaned_payload: dict = deepcopy(payload) + cleaned_payload.update( + { + "exp": exp, + } + ) + + return cleaned_payload + + def tokenize(self, exp: int, /, *, algorithm="HS256") -> str: + """Tokenizes the given model. + + Args: + exp (int): Expiration time in seconds. + algorithm (str): Tokenize algorithm. Default is HS256. + + Returns: + str: JWT. + """ + exp_time: datetime = datetime.now(UTC) + timedelta(seconds=exp) + payload = json.loads(self.model_dump_json(by_alias=True)) + return jwt.encode( + self._get_payload(payload, exp_time), + settings.secret_key, + algorithm=algorithm, + ) + + @classmethod + def decode(cls, token: str, /, *, algorithm="HS256"): + try: + token_: dict = jwt.decode(token, key=settings.secret_key, algorithms=[algorithm]) + except (ExpiredSignatureError, InvalidSignatureError, InvalidTokenError): + raise DecodedTokenError() from None + return cls(**token_) + + +class BaseModelTemplateMixin(ABC, _PydanticSanityCheck): + template_name: ClassVar[str] + + def get_context(self) -> dict: + return self.model_dump() + + def get_response( + self, + request: Request, + /, + *, + template_name: str | None = None, + ) -> _TemplateResponse: + if template_name is None: + template_name = self.template_name + + return templates.TemplateResponse( + request, + template_name, + context=self.get_context(), + ) + + @classmethod + @abstractmethod + async def from_request(cls, session: AsyncSession, request: Request, /) -> Self: ... diff --git a/app/middlewares/__init__.py b/futuramaapi/repositories/__init__.py similarity index 100% rename from app/middlewares/__init__.py rename to futuramaapi/repositories/__init__.py diff --git a/futuramaapi/repositories/base.py b/futuramaapi/repositories/base.py new file mode 100644 index 0000000..d6f6268 --- /dev/null +++ b/futuramaapi/repositories/base.py @@ -0,0 +1,264 @@ +import logging +from collections.abc import Sequence +from enum import Enum +from typing import TYPE_CHECKING, Any, Literal, NamedTuple, Self +from uuid import UUID, uuid4 + +from asyncpg.exceptions import UniqueViolationError +from sqlalchemy import UUID as COLUMN_UUID +from sqlalchemy import Column, DateTime, Row, Select, select +from sqlalchemy.exc import IntegrityError, NoResultFound +from sqlalchemy.ext.asyncio.session import AsyncSession +from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column +from sqlalchemy.orm.attributes import InstrumentedAttribute +from sqlalchemy.orm.strategy_options import Load +from sqlalchemy.sql import func +from sqlalchemy.sql.elements import BinaryExpression, UnaryExpression + +from futuramaapi.helpers.pydantic import BaseModel + +if TYPE_CHECKING: + from sqlalchemy.engine.result import Result + +logger = logging.getLogger(__name__) + + +class ModelBaseError(Exception): ... + + +class ModelDoesNotExistError(ModelBaseError): ... + + +class ModelAlreadyExistsError(ModelBaseError): ... + + +class ModelFieldError(ModelBaseError): ... + + +class FilterStatementKwargs(NamedTuple): + offset: int | None = None + limit: int | None = None + order_by: str | None = "id" + order_by_direction: Literal["asc", "desc"] = "asc" + extra: dict | None = None + + +class Base(DeclarativeBase): + __abstract__ = True + + negation: str = "!" + + id: Mapped[int] = mapped_column(primary_key=True) + + created_at = Column( + DateTime( + timezone=True, + ), + server_default=func.now(), + nullable=False, + ) + uuid = Column( + COLUMN_UUID( + as_uuid=True, + ), + primary_key=False, + unique=True, + nullable=False, + default=uuid4, + ) + + order_by_direction: Literal["asc", "desc"] = "asc" + + @classmethod + async def count(cls, session: AsyncSession, /) -> int: + cursor: Result = await session.execute(func.count(cls.id)) + return cursor.scalar() + + @staticmethod + def get_select_in_load() -> list[Load]: + return [] + + @classmethod + def get_options(cls) -> list[Load]: + return [*cls.get_select_in_load()] + + @classmethod + def get_cond_list(cls, **kwargs) -> list[BinaryExpression]: + return [] + + @classmethod + async def get( + cls, + session: AsyncSession, + val: int | str | UUID, + /, + *, + field: InstrumentedAttribute | None = None, + ) -> Self: + options: list[Load] = cls.get_options() + if field is None: + field = cls.id + + statement: Select = select(cls).where(field == val) + if options: + statement = statement.options(*options) + + cursor: Result = await session.execute(statement) + try: + return cursor.scalars().one() + except NoResultFound as err: + raise ModelDoesNotExistError() from err + + @classmethod + async def get_or_none( + cls, + session: AsyncSession, + val: int | str | UUID, + /, + field: InstrumentedAttribute | None = None, + ) -> Self | None: + try: + return await cls.get(session, val, field=field) + except ModelDoesNotExistError: + return None + + @classmethod + def get_order_by( + cls, + *, + field_name: str | None = None, + direction: Literal["asc", "desc"] = "asc", + ) -> UnaryExpression: + field: InstrumentedAttribute + if field_name is None: + field = cls.id + else: + field = cls.__table__.c[field_name.lower()] + + if direction == "desc": + return field.desc() + return field.asc() + + @classmethod + def get_filter_statement( + cls, + kwargs: FilterStatementKwargs, + /, + ) -> Select[tuple[Self]]: + statement: Select[tuple[Base]] = select(cls) + statement = statement.order_by( + cls.get_order_by( + field_name=kwargs.order_by, + direction=kwargs.order_by_direction, + ) + ) + + cond_list: list = [] + if kwargs.extra is not None: + cond_list = cls.get_cond_list(**kwargs.extra) + if cond_list: + statement = statement.where(*cond_list) + options: list[Load] = cls.get_options() + if options: + statement = statement.options(*options) + if kwargs.offset is not None: + statement = statement.offset(kwargs.offset) + if kwargs.limit is not None: + statement = statement.limit(kwargs.limit) + return statement + + @classmethod + def get_binary_cond(cls, field: Column[str | Enum], value: str, /) -> BinaryExpression: + if value.startswith(cls.negation): + return field != value[1:] + + return field == value + + @classmethod + async def create( + cls, + session: AsyncSession, + data: BaseModel, + /, + *, + commit: bool = True, + extra_fields: dict[ + str, + Any, + ] + | None = None, + ) -> Self: + obj: Self = cls(**data.to_dict(by_alias=False, reveal_secrets=True)) + if extra_fields is not None: + for name, value in extra_fields.items(): + setattr(obj, name, value) + session.add(obj) + if commit is True: + try: + await session.commit() + except IntegrityError as err: + if err.orig.sqlstate == UniqueViolationError.sqlstate: + raise ModelAlreadyExistsError() from None + await session.rollback() + raise + return obj + + @classmethod + def validate_field(cls, field: str, value: Any, /) -> None: + try: + field_: InstrumentedAttribute = getattr(cls, field) + except AttributeError as err: + logger.exception( + "Field does not exist.", + extra={ + "data": { + "field": field, + "err": err, + "model": cls, + } + }, + ) + raise ModelFieldError() from None + if field_.nullable is False and value is None: + logger.exception( + "Attempt to assign None to non nullable field.", + extra={ + "data": { + "field": field, + "model": cls, + } + }, + ) + raise ModelFieldError() from None + + @classmethod + async def update( + cls, + session: AsyncSession, + id_: int, + data: dict, + /, + ) -> Self: + try: + obj: Self = await cls.get(session, id_) + except ModelDoesNotExistError: + raise + + for field, value in data.items(): + if value is not None: + setattr(obj, field, value) + + await session.commit() + + return obj + + @classmethod + async def filter( + cls, + session: AsyncSession, + kwargs: FilterStatementKwargs, + /, + ) -> Sequence[Row[tuple[Any, ...] | Any]]: + statement = cls.get_filter_statement(kwargs) + cursor: Result = await session.execute(statement) + return cursor.scalars().all() diff --git a/alembic/README b/futuramaapi/repositories/migrations/README similarity index 100% rename from alembic/README rename to futuramaapi/repositories/migrations/README diff --git a/app/repositories/__init__.py b/futuramaapi/repositories/migrations/__init__.py similarity index 100% rename from app/repositories/__init__.py rename to futuramaapi/repositories/migrations/__init__.py diff --git a/alembic/env.py b/futuramaapi/repositories/migrations/env.py similarity index 93% rename from alembic/env.py rename to futuramaapi/repositories/migrations/env.py index 4f8e26c..9e0a565 100644 --- a/alembic/env.py +++ b/futuramaapi/repositories/migrations/env.py @@ -1,14 +1,14 @@ import asyncio from logging.config import fileConfig +from alembic import context from sqlalchemy import pool from sqlalchemy.engine import Connection from sqlalchemy.ext.asyncio import async_engine_from_config -from alembic import context -from app.core import settings -from app.repositories import models # noqa: F401, do not remove. -from app.repositories.base import Base +from futuramaapi.core import settings +from futuramaapi.repositories import models # noqa: F401, do not remove. +from futuramaapi.repositories.base import Base # this is the Alembic Config object, which provides # access to the values within the .ini file in use. diff --git a/alembic/script.py.mako b/futuramaapi/repositories/migrations/script.py.mako similarity index 100% rename from alembic/script.py.mako rename to futuramaapi/repositories/migrations/script.py.mako diff --git a/alembic/versions/1353be8a56b8_initial_project_structure.py b/futuramaapi/repositories/migrations/versions/1353be8a56b8_initial_project_structure.py similarity index 100% rename from alembic/versions/1353be8a56b8_initial_project_structure.py rename to futuramaapi/repositories/migrations/versions/1353be8a56b8_initial_project_structure.py index c1d72f3..2f4fc4e 100644 --- a/alembic/versions/1353be8a56b8_initial_project_structure.py +++ b/futuramaapi/repositories/migrations/versions/1353be8a56b8_initial_project_structure.py @@ -4,12 +4,12 @@ Revises: d413d1284339 Create Date: 2023-12-02 18:33:01.171361 """ + from collections.abc import Sequence import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - from alembic import op +from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. revision: str = "1353be8a56b8" diff --git a/alembic/versions/1b86ee33d1ba_add_broadcast_number_to_episode.py b/futuramaapi/repositories/migrations/versions/1b86ee33d1ba_add_broadcast_number_to_episode.py similarity index 100% rename from alembic/versions/1b86ee33d1ba_add_broadcast_number_to_episode.py rename to futuramaapi/repositories/migrations/versions/1b86ee33d1ba_add_broadcast_number_to_episode.py index b420ea5..a1936da 100644 --- a/alembic/versions/1b86ee33d1ba_add_broadcast_number_to_episode.py +++ b/futuramaapi/repositories/migrations/versions/1b86ee33d1ba_add_broadcast_number_to_episode.py @@ -4,10 +4,10 @@ Revises: c03e060df1b8 Create Date: 2023-12-21 21:57:04.032458 """ + from collections.abc import Sequence import sqlalchemy as sa - from alembic import op # revision identifiers, used by Alembic. diff --git a/alembic/versions/928d4358646c_add_image_field.py b/futuramaapi/repositories/migrations/versions/928d4358646c_add_image_field.py similarity index 95% rename from alembic/versions/928d4358646c_add_image_field.py rename to futuramaapi/repositories/migrations/versions/928d4358646c_add_image_field.py index 73b3652..de3144d 100644 --- a/alembic/versions/928d4358646c_add_image_field.py +++ b/futuramaapi/repositories/migrations/versions/928d4358646c_add_image_field.py @@ -4,14 +4,15 @@ Revises: 1353be8a56b8 Create Date: 2023-12-08 20:58:59.382849 """ + from collections.abc import Sequence import sqlalchemy as sa +from alembic import op from fastapi_storages import FileSystemStorage from fastapi_storages.integrations.sqlalchemy import ImageType -from alembic import op -from app.core import settings +from futuramaapi.core import settings # revision identifiers, used by Alembic. revision: str = "928d4358646c" diff --git a/app/repositories/tests/__init__.py b/futuramaapi/repositories/migrations/versions/__init__.py similarity index 100% rename from app/repositories/tests/__init__.py rename to futuramaapi/repositories/migrations/versions/__init__.py diff --git a/alembic/versions/c03e060df1b8_add_production_code_to_episode.py b/futuramaapi/repositories/migrations/versions/c03e060df1b8_add_production_code_to_episode.py similarity index 100% rename from alembic/versions/c03e060df1b8_add_production_code_to_episode.py rename to futuramaapi/repositories/migrations/versions/c03e060df1b8_add_production_code_to_episode.py index 190fb65..dc07e25 100644 --- a/alembic/versions/c03e060df1b8_add_production_code_to_episode.py +++ b/futuramaapi/repositories/migrations/versions/c03e060df1b8_add_production_code_to_episode.py @@ -4,10 +4,10 @@ Revises: 928d4358646c Create Date: 2023-12-21 20:12:27.108201 """ + from collections.abc import Sequence import sqlalchemy as sa - from alembic import op # revision identifiers, used by Alembic. diff --git a/alembic/versions/d413d1284339_initial_revision.py b/futuramaapi/repositories/migrations/versions/d413d1284339_initial_revision.py similarity index 99% rename from alembic/versions/d413d1284339_initial_revision.py rename to futuramaapi/repositories/migrations/versions/d413d1284339_initial_revision.py index 1484abe..ea24303 100644 --- a/alembic/versions/d413d1284339_initial_revision.py +++ b/futuramaapi/repositories/migrations/versions/d413d1284339_initial_revision.py @@ -4,6 +4,7 @@ Revises: Create Date: 2023-11-25 19:46:49.496715 """ + from collections.abc import Sequence revision: str = "d413d1284339" diff --git a/alembic/versions/ee5656c8dc7f_define_user_model.py b/futuramaapi/repositories/migrations/versions/ee5656c8dc7f_define_user_model.py similarity index 100% rename from alembic/versions/ee5656c8dc7f_define_user_model.py rename to futuramaapi/repositories/migrations/versions/ee5656c8dc7f_define_user_model.py index 2cfd326..ab5d552 100644 --- a/alembic/versions/ee5656c8dc7f_define_user_model.py +++ b/futuramaapi/repositories/migrations/versions/ee5656c8dc7f_define_user_model.py @@ -4,10 +4,10 @@ Revises: 1b86ee33d1ba Create Date: 2024-01-21 21:40:59.557432 """ + from collections.abc import Sequence import sqlalchemy as sa - from alembic import op revision: str = "ee5656c8dc7f" diff --git a/futuramaapi/repositories/models.py b/futuramaapi/repositories/models.py new file mode 100644 index 0000000..72ae71b --- /dev/null +++ b/futuramaapi/repositories/models.py @@ -0,0 +1,228 @@ +from enum import Enum + +from fastapi_storages import FileSystemStorage +from fastapi_storages.integrations.sqlalchemy import ImageType +from sqlalchemy import ( + VARCHAR, + Boolean, + Column, + Date, + ForeignKey, + Integer, + SmallInteger, +) +from sqlalchemy.dialects.postgresql import ENUM # TODO: engine agnostic. +from sqlalchemy.orm import Mapped, mapped_column, relationship, selectinload +from sqlalchemy.orm.strategy_options import Load +from sqlalchemy.sql.elements import BinaryExpression + +from futuramaapi.core import settings +from futuramaapi.repositories.base import Base + + +class SeasonModel(Base): + __tablename__ = "seasons" + + # Mappers + episodes: Mapped[list["EpisodeModel"]] = relationship( + back_populates="season", + ) + + @staticmethod + def get_select_in_load() -> list[Load]: + return [selectinload(SeasonModel.episodes)] + + +class EpisodeCharacterAssociationModel(Base): + __tablename__ = "episode_character_association" + + id = None + created_at = None + uuid = None + + episode_id: Mapped[int] = mapped_column( + ForeignKey("episodes.id"), + primary_key=True, + ) + character_id: Mapped[int] = mapped_column( + ForeignKey("characters.id"), + primary_key=True, + ) + + +class EpisodeModel(Base): + __tablename__ = "episodes" + + name = Column( + VARCHAR( + length=128, + ), + nullable=True, + ) + air_date = Column( + Date(), + nullable=True, + ) + duration = Column( + Integer, + nullable=True, + ) + production_code = Column( + VARCHAR( + length=8, + ), + nullable=True, + ) + broadcast_number = Column( + SmallInteger, + nullable=True, + ) + + # Mappers + season_id: Mapped[int] = mapped_column( + ForeignKey("seasons.id"), + ) + season: Mapped["SeasonModel"] = relationship( + back_populates="episodes", + ) + + characters: Mapped[list["CharacterModel"]] = relationship( + secondary="episode_character_association", + back_populates="episodes", + ) + + @staticmethod + def get_select_in_load() -> list[Load]: + return [selectinload(EpisodeModel.season)] + + +class CharacterModel(Base): + __tablename__ = "characters" + + class CharacterSpecies(Enum): + HUMAN = "HUMAN" + ROBOT = "ROBOT" + HEAD = "HEAD" + ALIEN = "ALIEN" + MUTANT = "MUTANT" + MONSTER = "MONSTER" + UNKNOWN = "UNKNOWN" + + class CharacterStatus(Enum): + ALIVE = "ALIVE" + DEAD = "DEAD" + UNKNOWN = "UNKNOWN" + + class CharacterGender(Enum): + MALE = "MALE" + FEMALE = "FEMALE" + UNKNOWN = "UNKNOWN" + + name = Column( + VARCHAR( + length=128, + ), + nullable=False, + ) + status = Column( + ENUM( + CharacterStatus, + ), + nullable=False, + ) + gender = Column( + ENUM( + CharacterGender, + ), + nullable=False, + ) + species = Column( + ENUM( + CharacterSpecies, + ), + nullable=False, + ) + image = Column( + ImageType( + storage=FileSystemStorage(path=settings.project_root / settings.static), + ), + ) + + # Mappers + episodes: Mapped[list["EpisodeModel"]] = relationship( + secondary="episode_character_association", + back_populates="characters", + ) + + @classmethod + def get_cond_list(cls, **kwargs) -> list[BinaryExpression]: + gender: str | None = kwargs.get("gender") + status: str | None = kwargs.get("status") + species: str | None = kwargs.get("species") + query: str | None = kwargs.get("query") + + cond_list = [] + if gender is not None: + gender = gender.upper() + cond_list.append(cls.get_binary_cond(cls.gender, gender)) + if status is not None: + status = status.upper() + cond_list.append(cls.get_binary_cond(cls.status, status)) + if species is not None: + species = species.upper() + cond_list.append(cls.get_binary_cond(cls.species, species)) + + if query is not None: + cond_list.append(cls.name.ilike(f"%{query.lower()}%")) + return cond_list + + +class UserModel(Base): + __tablename__ = "users" + + name = Column( + VARCHAR( + length=64, + ), + nullable=False, + ) + surname = Column( + VARCHAR( + length=64, + ), + nullable=False, + ) + middle_name = Column( + VARCHAR( + length=64, + ), + nullable=True, + ) + email = Column( + VARCHAR( + length=320, + ), + nullable=False, + unique=True, + ) + username = Column( + VARCHAR( + length=64, + ), + nullable=False, + unique=True, + ) + password = Column( + VARCHAR( + length=128, + ), + nullable=False, + ) + is_confirmed = Column( + Boolean, + default=False, + ) + is_subscribed = Column( + Boolean, + default=True, + ) diff --git a/futuramaapi/repositories/session.py b/futuramaapi/repositories/session.py new file mode 100644 index 0000000..b7fc1d5 --- /dev/null +++ b/futuramaapi/repositories/session.py @@ -0,0 +1,69 @@ +from collections.abc import AsyncIterator +from contextlib import asynccontextmanager +from typing import Any + +from pydantic import PostgresDsn +from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine, AsyncSession, async_sessionmaker, create_async_engine + +from futuramaapi.core import settings + + +class SessionManager: + def __init__(self, host: PostgresDsn, /, *, kwargs: dict[str, Any] | None = None) -> None: + if kwargs is None: + kwargs = {} + + self.engine: AsyncEngine | None = create_async_engine(str(host), **kwargs) + self._session_maker: async_sessionmaker[AsyncSession] | None = async_sessionmaker( + autocommit=False, + bind=self.engine, + expire_on_commit=False, + ) + + async def close(self) -> None: + if self.engine is None: + raise Exception("DatabaseSessionManager is not initialized") + + await self.engine.dispose() + + self.engine = None + self._session_maker = None + + @asynccontextmanager + async def connect(self) -> AsyncIterator[AsyncConnection]: + if self.engine is None: + raise RuntimeError("DatabaseSessionManager is not initialized") + + async with self.engine.begin() as connection: + try: + yield connection + except Exception: + await connection.rollback() + raise + + @asynccontextmanager + async def session(self) -> AsyncIterator[AsyncSession]: + if self._session_maker is None: + raise Exception("DatabaseSessionManager is not initialized") + + session = self._session_maker() + try: + yield session + except Exception: + await session.rollback() + raise + finally: + await session.close() + + +session_manager: SessionManager = SessionManager( + settings.database_url, + kwargs={ + "echo": True, + }, +) + + +async def get_async_session(): + async with session_manager.session() as session: + yield session diff --git a/futuramaapi/routers/__init__.py b/futuramaapi/routers/__init__.py new file mode 100644 index 0000000..083141e --- /dev/null +++ b/futuramaapi/routers/__init__.py @@ -0,0 +1,27 @@ +from fastapi import APIRouter + +from .callbacks import router as callbacks_router +from .characters import router as characters_router +from .episodes import router as episodes_router +from .graphql import router as graphql_router +from .notifications import router as notification_router +from .root import router as root_router +from .seasons import router as seasons_router +from .tokens import router as tokens_router +from .users import router as users_router + +__all__ = [ + "api_router", + "graphql_router", + "root_router", +] + +api_router = APIRouter(prefix="/api") + +api_router.include_router(callbacks_router) +api_router.include_router(characters_router) +api_router.include_router(episodes_router) +api_router.include_router(notification_router) +api_router.include_router(seasons_router) +api_router.include_router(tokens_router) +api_router.include_router(users_router) diff --git a/futuramaapi/routers/callbacks/__init__.py b/futuramaapi/routers/callbacks/__init__.py new file mode 100644 index 0000000..9726651 --- /dev/null +++ b/futuramaapi/routers/callbacks/__init__.py @@ -0,0 +1,5 @@ +from .api import router + +__all__ = [ + "router", +] diff --git a/app/routers/callbacks.py b/futuramaapi/routers/callbacks/api.py similarity index 77% rename from app/routers/callbacks.py rename to futuramaapi/routers/callbacks/api.py index be2ab7e..10454e2 100644 --- a/app/routers/callbacks.py +++ b/futuramaapi/routers/callbacks/api.py @@ -1,19 +1,21 @@ from fastapi import APIRouter, BackgroundTasks, Depends, status from sqlalchemy.ext.asyncio.session import AsyncSession -from app.repositories.sessions import get_async_session -from app.services.callbacks import ( +from futuramaapi.repositories.session import get_async_session +from futuramaapi.routers.callbacks.schemas import Character +from futuramaapi.routers.episodes.schemas import Episode +from futuramaapi.routers.seasons.schemas import Season + +from .schemas import ( + CallbackObjectResponse, CallbackRequest, CallbackResponse, - CharacterCallbackResponse, - EpisodeCallbackResponse, - SeasonCallbackResponse, - process_characters_callback, - process_episodes_callback, - process_seasons_callback, ) -router = APIRouter(prefix="/callbacks") +router = APIRouter( + prefix="/callbacks", + tags=["callbacks"], +) _characters_callbacks_router = APIRouter() @@ -23,7 +25,7 @@ status_code=status.HTTP_200_OK, ) def character_callback( - body: CharacterCallbackResponse, + body: CallbackObjectResponse, ): """Request to the provided callback URL.""" @@ -37,7 +39,7 @@ def character_callback( ) async def create_characters_callback_request( character_id: int, - character_request: CallbackRequest, + request: CallbackRequest, background_tasks: BackgroundTasks, session: AsyncSession = Depends(get_async_session), # noqa: B008 ) -> CallbackResponse: @@ -49,7 +51,7 @@ async def create_characters_callback_request( * Receive a delay after which the callback will be sent. * Receive a notification back to the API, as a callback. """ - return await process_characters_callback(character_id, character_request, session, background_tasks) + return await CallbackResponse.process(session, Character, request, character_id, background_tasks) _episodes_callbacks_router = APIRouter() @@ -60,7 +62,7 @@ async def create_characters_callback_request( status_code=status.HTTP_200_OK, ) def episodes_callback( - body: EpisodeCallbackResponse, + body: CallbackObjectResponse, ): """Request to the provided callback URL.""" @@ -74,7 +76,7 @@ def episodes_callback( ) async def create_episodes_callback_request( episode_id: int, - episode_request: CallbackRequest, + request: CallbackRequest, background_tasks: BackgroundTasks, session: AsyncSession = Depends(get_async_session), # noqa: B008 ) -> CallbackResponse: @@ -86,7 +88,7 @@ async def create_episodes_callback_request( * Receive a delay after which the callback will be sent. * Receive a notification back to the API, as a callback. """ - return await process_episodes_callback(episode_id, episode_request, session, background_tasks) + return await CallbackResponse.process(session, Episode, request, episode_id, background_tasks) # Season related endpoints. @@ -98,7 +100,7 @@ async def create_episodes_callback_request( status_code=status.HTTP_200_OK, ) def seasons_callback( - body: SeasonCallbackResponse, + body: CallbackObjectResponse, ): """Request to the provided callback URL.""" @@ -112,7 +114,7 @@ def seasons_callback( ) async def create_seasons_callback_request( season_id: int, - season_request: CallbackRequest, + request: CallbackRequest, background_tasks: BackgroundTasks, session: AsyncSession = Depends(get_async_session), # noqa: B008 ) -> CallbackResponse: @@ -124,4 +126,4 @@ async def create_seasons_callback_request( * Receive a delay after which the callback will be sent. * Receive a notification back to the API, as a callback. """ - return await process_seasons_callback(season_id, season_request, session, background_tasks) + return await CallbackResponse.process(session, Season, request, season_id, background_tasks) diff --git a/futuramaapi/routers/callbacks/schemas.py b/futuramaapi/routers/callbacks/schemas.py new file mode 100644 index 0000000..0222606 --- /dev/null +++ b/futuramaapi/routers/callbacks/schemas.py @@ -0,0 +1,118 @@ +from asyncio import sleep +from random import randint +from typing import Literal, Self + +from fastapi import BackgroundTasks, HTTPException +from httpx import AsyncClient, Response +from pydantic import Field, HttpUrl +from sqlalchemy.ext.asyncio.session import AsyncSession + +from futuramaapi.helpers.pydantic import BaseModel +from futuramaapi.routers.characters.schemas import Character +from futuramaapi.routers.episodes.schemas import Episode +from futuramaapi.routers.seasons.schemas import Season + +MIN_DELAY: int = 5 +MAX_DELAY: int = 10 + + +class DoesNotExist(BaseModel): + id: int = Field( + description="Requested Object ID.", + ) + detail: str = Field( + "Not found", + examples=[ + "Not found", + ], + ) + + +class CallbackObjectResponse(BaseModel): + # Can't use type even with noqa: A003, cause native type is being used for a arg typing below. + kind: Literal["Character", "Episode", "Season"] = Field( + alias="type", + description="Requested Object type.", + ) + item: Character | Episode | Season | DoesNotExist + + @classmethod + async def from_item( + cls, + session: AsyncSession, + requested_object: type[Character | Episode | Season], + id_: int, + /, + ) -> Self: + item: Character | Episode | Season | DoesNotExist + try: + item = await requested_object.get(session, id_) + except HTTPException: + item = DoesNotExist( + id=id_, + ) + return cls( + kind=requested_object.__name__, + item=item, + ) + + async def send_callback(self, url: HttpUrl, /) -> None: + async with AsyncClient(http2=True) as client: + callback_response: Response = await client.post( + f"{url}", + json=self.to_dict(), + ) + callback_response.raise_for_status() + + +class CallbackRequest(BaseModel): + callback_url: HttpUrl + + +class CallbackResponse(BaseModel): + @staticmethod + def _generate_random_delay() -> int: + return randint(MIN_DELAY, MAX_DELAY) # noqa: S311 + + delay: int = Field( + default_factory=_generate_random_delay, + ge=MIN_DELAY, + le=MAX_DELAY, + description="Delay after which the callback will be sent.", + ) + + async def process_background_task( + self, + session: AsyncSession, + requested_object: type[Character | Episode | Season], + request: CallbackRequest, + id_: int, + /, + ) -> None: + await sleep(self.delay) + callback_response: CallbackObjectResponse = await CallbackObjectResponse.from_item( + session, + requested_object, + id_, + ) + await callback_response.send_callback(request.callback_url) + + @classmethod + async def process( # noqa: PLR0913 + cls, + session: AsyncSession, + requested_object: type[Character | Episode | Season], + request: CallbackRequest, + id_: int, + background_tasks: BackgroundTasks, + /, + ) -> Self: + response: Self = cls() + background_tasks.add_task( + response.process_background_task, + session, + requested_object, + request, + id_, + ) + return response diff --git a/futuramaapi/routers/characters/__init__.py b/futuramaapi/routers/characters/__init__.py new file mode 100644 index 0000000..9726651 --- /dev/null +++ b/futuramaapi/routers/characters/__init__.py @@ -0,0 +1,5 @@ +from .api import router + +__all__ = [ + "router", +] diff --git a/app/routers/characters.py b/futuramaapi/routers/characters/api.py similarity index 52% rename from app/routers/characters.py rename to futuramaapi/routers/characters/api.py index 21b83e4..4c91938 100644 --- a/app/routers/characters.py +++ b/futuramaapi/routers/characters/api.py @@ -1,31 +1,29 @@ -from typing import Annotated +from typing import Annotated, Literal -from fastapi import APIRouter, Depends, Query, status +from fastapi import APIRouter, Depends, HTTPException, Query, status from fastapi_pagination import Page from sqlalchemy.ext.asyncio.session import AsyncSession -from app.repositories.base import OrderByDirection -from app.repositories.models import ( - Character as CharacterModel, -) -from app.repositories.models import ( - CharacterGenderFilter, - CharacterSpeciesFilter, - CharacterStatusFilter, -) -from app.repositories.sessions import get_async_session -from app.services.characters import ( - Character, - process_get_character, - process_get_characters, -) +from futuramaapi.repositories.base import FilterStatementKwargs +from futuramaapi.repositories.session import get_async_session +from futuramaapi.routers.exceptions import ModelNotFoundError, NotFoundResponse -router = APIRouter(prefix="/characters") +from .schemas import Character + +router = APIRouter( + prefix="/characters", + tags=["characters"], +) @router.get( "/{character_id}", status_code=status.HTTP_200_OK, + responses={ + status.HTTP_404_NOT_FOUND: { + "model": NotFoundResponse, + }, + }, response_model=Character, name="character", ) @@ -42,7 +40,10 @@ async def get_character( Can be used to utilize this endpoint to obtain in-depth insights into a particular character from the Futurama universe. """ - return await process_get_character(character_id, session) + try: + return await Character.get(session, character_id) + except ModelNotFoundError: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) from None @router.get( @@ -52,20 +53,52 @@ async def get_character( name="characters", ) async def get_characters( # noqa: PLR0913 - gender: CharacterGenderFilter | None = None, + gender: Literal[ + "male", + "!male", + "female", + "!female", + "unknown", + "!unknown", + ] + | None = None, character_status: Annotated[ - CharacterStatusFilter | None, + Literal[ + "alive", + "!alive", + "dead", + "!dead", + "unknown", + "!unknown", + ] + | None, Query(alias="status"), ] = None, - species: CharacterSpeciesFilter | None = None, + species: Literal[ + "human", + "!human", + "robot", + "!robot", + "head", + "!head", + "alien", + "!alien", + "mutant", + "!mutant", + "monster", + "!monster", + "unknown", + "!unknown", + ] + | None = None, order_by: Annotated[ - CharacterModel.order_by | None, + Literal["id"] | None, Query(alias="orderBy"), - ] = CharacterModel.order_by.ID, + ] = "id", direction: Annotated[ - OrderByDirection | None, + Literal["asc", "desc"], Query(alias="orderByDirection"), - ] = OrderByDirection.ASC, + ] = "asc", query: Annotated[ str | None, Query( @@ -87,12 +120,16 @@ async def get_characters( # noqa: PLR0913 `/api/characters/?gender=!unknown&status=!unknown&species=alien`. Check query Parameters to more info. """ - return await process_get_characters( + return await Character.paginate( session, - gender=gender, - character_status=character_status, - species=species, - order_by=order_by, - direction=direction, - query=query, + filter_params=FilterStatementKwargs( + order_by=order_by, + order_by_direction=direction, + extra={ + "gender": gender, + "species": species, + "status": character_status, + "query": query, + }, + ), ) diff --git a/futuramaapi/routers/characters/schemas.py b/futuramaapi/routers/characters/schemas.py new file mode 100644 index 0000000..036d310 --- /dev/null +++ b/futuramaapi/routers/characters/schemas.py @@ -0,0 +1,31 @@ +from datetime import datetime +from typing import ClassVar + +from fastapi_storages import StorageImage +from pydantic import HttpUrl, field_validator + +from futuramaapi.core import settings +from futuramaapi.helpers.pydantic import BaseModel +from futuramaapi.mixins.pydantic import BaseModelDatabaseMixin +from futuramaapi.repositories.models import CharacterModel + + +class Character(BaseModel, BaseModelDatabaseMixin): + model: ClassVar[type[CharacterModel]] = CharacterModel + + id: int + name: str + gender: CharacterModel.CharacterGender + status: CharacterModel.CharacterStatus + species: CharacterModel.CharacterSpecies + created_at: datetime + image: HttpUrl | None = None + + @field_validator("image", mode="before") + @classmethod + def make_url(cls, value: StorageImage | str | None, /) -> HttpUrl | None: + if value is None: + return None + if isinstance(value, StorageImage): + return settings.build_url(path=value._name) + return HttpUrl(value) diff --git a/futuramaapi/routers/episodes/__init__.py b/futuramaapi/routers/episodes/__init__.py new file mode 100644 index 0000000..9726651 --- /dev/null +++ b/futuramaapi/routers/episodes/__init__.py @@ -0,0 +1,5 @@ +from .api import router + +__all__ = [ + "router", +] diff --git a/app/routers/episodes.py b/futuramaapi/routers/episodes/api.py similarity index 69% rename from app/routers/episodes.py rename to futuramaapi/routers/episodes/api.py index 4ef1f8f..b6098ca 100644 --- a/app/routers/episodes.py +++ b/futuramaapi/routers/episodes/api.py @@ -1,20 +1,26 @@ -from fastapi import APIRouter, Depends, status +from fastapi import APIRouter, Depends, HTTPException, status from fastapi_pagination import Page from sqlalchemy.ext.asyncio.session import AsyncSession -from app.repositories.sessions import get_async_session -from app.services.episodes import ( - Episode, - process_get_episode, - process_get_episodes, -) +from futuramaapi.repositories.session import get_async_session +from futuramaapi.routers.exceptions import ModelNotFoundError, NotFoundResponse + +from .schemas import Episode -router = APIRouter(prefix="/episodes") +router = APIRouter( + prefix="/episodes", + tags=["episodes"], +) @router.get( "/{episode_id}", status_code=status.HTTP_200_OK, + responses={ + status.HTTP_404_NOT_FOUND: { + "model": NotFoundResponse, + }, + }, response_model=Episode, name="episode", ) @@ -31,7 +37,10 @@ async def get_episode( Can be used to get in-depth information about a particular episode of Futurama. """ - return await process_get_episode(episode_id, session) + try: + return await Episode.get(session, episode_id) + except ModelNotFoundError: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) from None @router.get( @@ -52,4 +61,4 @@ async def get_episodes( and other relevant details. It's particularly useful for those who want to explore the entire catalog of Futurama episodes or implement features such as episode browsing on your site. """ - return await process_get_episodes(session) + return await Episode.paginate(session) diff --git a/futuramaapi/routers/episodes/schemas.py b/futuramaapi/routers/episodes/schemas.py new file mode 100644 index 0000000..8487cf7 --- /dev/null +++ b/futuramaapi/routers/episodes/schemas.py @@ -0,0 +1,41 @@ +from datetime import date, datetime +from typing import ClassVar + +from pydantic import Field, computed_field + +from futuramaapi.helpers.pydantic import BaseModel +from futuramaapi.mixins.pydantic import BaseModelDatabaseMixin +from futuramaapi.repositories.models import EpisodeModel + + +class EpisodeBase(BaseModel, BaseModelDatabaseMixin): + model: ClassVar[type[EpisodeModel]] = EpisodeModel + + id: int + name: str + broadcast_number: int = Field(alias="number") + production_code: str = Field( + examples=[ + "1ACV01", + ], + ) + + +class Episode(EpisodeBase): + class Season(BaseModel): + id: int + + air_date: date | None + duration: int | None + created_at: datetime + season: Season + + @computed_field( # type: ignore[misc] + examples=[ + "S01E01", + ], + return_type=str, + ) + @property + def broadcast_code(self) -> str: + return f"S{self.season.id:02d}E{self.broadcast_number:02d}" diff --git a/futuramaapi/routers/exceptions.py b/futuramaapi/routers/exceptions.py new file mode 100644 index 0000000..c3e1eb9 --- /dev/null +++ b/futuramaapi/routers/exceptions.py @@ -0,0 +1,17 @@ +from pydantic import Field + +from futuramaapi.helpers.pydantic import BaseModel + + +class ModelNotFoundError(Exception): ... + + +class ModelExistsError(Exception): ... + + +class NotFoundResponse(BaseModel): + detail: str = Field("Not Found") + + +class UnauthorizedResponse(BaseModel): + detail: str = Field("Unauthorized") diff --git a/futuramaapi/routers/graphql/__init__.py b/futuramaapi/routers/graphql/__init__.py new file mode 100644 index 0000000..9726651 --- /dev/null +++ b/futuramaapi/routers/graphql/__init__.py @@ -0,0 +1,5 @@ +from .api import router + +__all__ = [ + "router", +] diff --git a/futuramaapi/routers/graphql/api.py b/futuramaapi/routers/graphql/api.py new file mode 100644 index 0000000..6a1efef --- /dev/null +++ b/futuramaapi/routers/graphql/api.py @@ -0,0 +1,14 @@ +import strawberry +from strawberry.fastapi import GraphQLRouter + +from .dependencies import get_context +from .schemas import Query + +schema = strawberry.Schema(Query) + +router = GraphQLRouter( + schema, + path="/graphql", + context_getter=get_context, + include_in_schema=False, +) diff --git a/futuramaapi/routers/graphql/context.py b/futuramaapi/routers/graphql/context.py new file mode 100644 index 0000000..1854541 --- /dev/null +++ b/futuramaapi/routers/graphql/context.py @@ -0,0 +1,19 @@ +from fastapi import Depends +from sqlalchemy.ext.asyncio.session import AsyncSession +from strawberry.fastapi import BaseContext + +from futuramaapi.repositories.session import get_async_session + + +class Context(BaseContext): + def __init__(self, session: AsyncSession): + self.session: AsyncSession = session + + super().__init__() + + @classmethod + async def from_dependency( + cls, + session: AsyncSession = Depends(get_async_session), # noqa: B008 + ): + return cls(session) diff --git a/futuramaapi/routers/graphql/conversion.py b/futuramaapi/routers/graphql/conversion.py new file mode 100644 index 0000000..4b4625f --- /dev/null +++ b/futuramaapi/routers/graphql/conversion.py @@ -0,0 +1,136 @@ +""" +Actually wanted to move this code to ``futuramaapi.routers.graphql.mixins.StrawberryDatabaseMixin``, but happened +as happened, in this case there won't be a need to pass type, cls and so on. +""" + +from abc import ABC, abstractmethod +from functools import singledispatch +from typing import TYPE_CHECKING, Any, cast + +from fastapi_storages.base import StorageImage +from strawberry.enum import EnumDefinition +from strawberry.type import ( + StrawberryList, + StrawberryOptional, + has_object_definition, +) +from strawberry.union import StrawberryUnion + +from futuramaapi.core import settings +from futuramaapi.repositories.base import Base + +if TYPE_CHECKING: + from strawberry.field import StrawberryField + + +@singledispatch +def _convert( + type_: Any, + data: Any, + /, +): + if has_object_definition(type_): + if hasattr(type(data), "_strawberry_type"): + type_ = type(data)._strawberry_type + if hasattr(type_, "from_model"): + return type_.from_model(data) + return _convert(type_, data) + + if isinstance(data, StorageImage): + if data is None: + return None + + return settings.build_url(path=data._name) + + return data + + +@_convert.register +def _(type_: StrawberryOptional, data: Any, /): + if data is None: + return data + + return _convert(type_.of_type, data) + + +@_convert.register +def _(type_: StrawberryUnion, data: Any, /): + for option_type in type_.types: + if hasattr(option_type, "_pydantic_type"): + source_type = option_type._pydantic_type + else: + source_type = cast(type, option_type) + if isinstance(data, source_type): + return _convert(option_type, data) + + +@_convert.register +def _(type_: EnumDefinition, data: Any, /): + return data + + +@_convert.register +def _(type_: StrawberryList, data: Any, /) -> list: + items: list = [] + for item in data: + items.append(_convert(type_.of_type, item)) + + return items + + +class ConverterBase(ABC): + @staticmethod + @abstractmethod + def to_strawberry[S]( # type: ignore[valid-type] + cls: type[S], # type: ignore[name-defined] + model_instance: Base, + /, + ) -> S: # type: ignore[name-defined] + ... + + @staticmethod + @abstractmethod + def get_edges[S]( # type: ignore[valid-type] + cls: type[S], # type: ignore[name-defined] + model_instance: list[Base], + /, + ) -> S: # type: ignore[name-defined] + ... + + +class ModelConverter(ConverterBase): + @staticmethod + def to_strawberry[S]( # type: ignore[valid-type] + cls: type[S], # type: ignore[name-defined] + model_instance: Base, + /, + ) -> S: # type: ignore[name-defined] + kwargs: dict = {} + + field: StrawberryField + for field in cls.__strawberry_definition__.fields: + data: Any = getattr(model_instance, field.python_name, None) + if field.init: + kwargs[field.python_name] = _convert( + field.type, + data, + ) + + return cls(**kwargs) + + @staticmethod + def get_edges[S]( # type: ignore[valid-type] + cls: type[S], # type: ignore[name-defined] + data: list[Base], + /, + ) -> list[S] | None: # type: ignore[name-defined] + field: StrawberryField = next(f for f in cls.__strawberry_definition__.fields if f.python_name == "edges") + if field.init: + return _convert( + field.type, + data, + ) + return None + + +converter: ConverterBase = ModelConverter() diff --git a/futuramaapi/routers/graphql/dependencies.py b/futuramaapi/routers/graphql/dependencies.py new file mode 100644 index 0000000..5509a0e --- /dev/null +++ b/futuramaapi/routers/graphql/dependencies.py @@ -0,0 +1,9 @@ +from fastapi import Depends + +from .context import Context + + +async def get_context( + context: Context = Depends(Context.from_dependency), # noqa: B008 +) -> Context: + return context diff --git a/futuramaapi/routers/graphql/mixins.py b/futuramaapi/routers/graphql/mixins.py new file mode 100644 index 0000000..70c4f3e --- /dev/null +++ b/futuramaapi/routers/graphql/mixins.py @@ -0,0 +1,55 @@ +from typing import ClassVar, Self, cast + +from fastapi_storages.base import StorageImage +from sqlalchemy.ext.asyncio.session import AsyncSession +from strawberry.field import StrawberryField + +from futuramaapi.core import settings +from futuramaapi.repositories.base import Base, FilterStatementKwargs, ModelDoesNotExistError + +from .conversion import ConverterBase, converter + + +class StrawberryDatabaseMixin: + model: ClassVar[type[Base]] + + converter: ClassVar[ConverterBase] = converter + + @classmethod + def get_fields(cls) -> list[StrawberryField]: + return cls.__strawberry_definition__.fields # type: ignore[attr-defined] + + @staticmethod + def to_img(field: StorageImage | None, /) -> str | None: + if field is None: + return None + + return settings.build_url(path=field._name) + + @classmethod + def from_model(cls, instance: Base, /) -> Self: + return cls.converter.to_strawberry(cls, instance) + + @classmethod + async def get(cls, session: AsyncSession, id_: int, /) -> Self | None: + try: + obj: Base = await cls.model.get( + session, + id_, + ) + except ModelDoesNotExistError: + return None + + return cls.from_model(obj) + + @classmethod + async def paginate(cls, session: AsyncSession, kwargs: FilterStatementKwargs, /) -> Self: + total: int = await cls.model.count(session) + edges: list[Base] = cast(list[Base], await cls.model.filter(session, kwargs)) + + return cls( + limit=kwargs.limit, # type: ignore[call-arg] + offset=kwargs.offset, # type: ignore[call-arg] + total=total, # type: ignore[call-arg] + edges=cls.converter.get_edges(cls, edges), # type: ignore[call-arg] + ) diff --git a/futuramaapi/routers/graphql/schemas.py b/futuramaapi/routers/graphql/schemas.py new file mode 100644 index 0000000..72adec3 --- /dev/null +++ b/futuramaapi/routers/graphql/schemas.py @@ -0,0 +1,218 @@ +from datetime import date, datetime +from enum import StrEnum +from typing import Any, ClassVar + +import strawberry +from strawberry.types import Info + +from futuramaapi.repositories.base import Base, FilterStatementKwargs +from futuramaapi.repositories.models import CharacterModel, EpisodeModel, SeasonModel + +from .mixins import StrawberryDatabaseMixin +from .validators import LimitsRule + + +@strawberry.type +class PageBase(StrawberryDatabaseMixin): + limit: int + offset: int + total: int + edges: list[Any] + + +@strawberry.type +class Character(StrawberryDatabaseMixin): + model: ClassVar[type[Base]] = CharacterModel + + id: int + name: str + status: strawberry.enum(CharacterModel.CharacterStatus) # type: ignore[valid-type] + gender: strawberry.enum(CharacterModel.CharacterGender) # type: ignore[valid-type] + species: strawberry.enum(CharacterModel.CharacterSpecies) # type: ignore[valid-type] + image: str | None + + +@strawberry.type +class Characters(PageBase, StrawberryDatabaseMixin): + model: ClassVar[type[Base]] = CharacterModel + + edges: list[Character] + + +@strawberry.type +class Episode(StrawberryDatabaseMixin): + model: ClassVar[type[Base]] = EpisodeModel + + @strawberry.type + class SeasonEpisode(StrawberryDatabaseMixin): + model: ClassVar[type[Base]] = SeasonModel + + id: int + + id: int + air_date: date | None + duration: int | None + created_at: datetime + season: SeasonEpisode + + +@strawberry.type +class Episodes(PageBase, StrawberryDatabaseMixin): + model: ClassVar[type[Base]] = EpisodeModel + + edges: list[Episode] + + +@strawberry.type +class Season(StrawberryDatabaseMixin): + model: ClassVar[type[Base]] = SeasonModel + + @strawberry.type + class EpisodeSeason(StrawberryDatabaseMixin): + model: ClassVar[type[Base]] = EpisodeModel + + id: int + air_date: date | None + duration: int | None + created_at: datetime + + id: int + episodes: list[EpisodeSeason] + + +@strawberry.type +class Seasons(PageBase, StrawberryDatabaseMixin): + model: ClassVar[type[Base]] = SeasonModel + + edges: list[Season] + + +@strawberry.type +class Query: + @strawberry.enum + class GenderFilter(StrEnum): + male = "male" + not_male = "!male" + female = "female" + not_femail = "!female" + unknown = "unknown" + not_unknown = "!unknown" + + @strawberry.enum + class StatusFilter(StrEnum): + alive = "alive" + not_alive = "!alive" + dead = "dead" + not_dead = "!dead" + unknown = "unknown" + not_unknown = "!unknown" + + @strawberry.enum + class SpeciesFilter(StrEnum): + human = "human" + not_human = "!human" + robot = "robot" + not_robot = "!robot" + head = "head" + not_head = "!head" + alien = "alien" + not_alien = "!alien" + mutant = "mutant" + not_mutant = "!mutant" + monster = "monster" + not_monster = "!monster" + unknown = "unknown" + not_unknown = "!unknown" + + @strawberry.field() + async def character( + self, + info: Info, + character_id: int, + /, + ) -> Character | None: + return await Character.get(info.context.session, character_id) + + @strawberry.field( + extensions=[ + LimitsRule(), + ], + ) + async def characters( # noqa: PLR0913 + self, + info: Info, + /, + *, + limit: int | None = 50, + offset: int | None = 0, + gender: GenderFilter | None = None, + status: StatusFilter | None = None, + species: SpeciesFilter | None = None, + ) -> Characters: + kwargs: FilterStatementKwargs = FilterStatementKwargs( + offset=offset, + limit=limit, + extra={ + "gender": gender, + "species": species, + "status": status, + }, + ) + + return await Characters.paginate(info.context.session, kwargs) + + @strawberry.field() + async def episode( + self, + info: Info, + episode_id: int, + /, + ) -> Episode | None: + return await Episode.get(info.context.session, episode_id) + + @strawberry.field( + extensions=[ + LimitsRule(), + ], + ) + async def episodes( + self, + info: Info, + *, + limit: int | None = 50, + offset: int | None = 0, + ) -> Episodes: + kwargs: FilterStatementKwargs = FilterStatementKwargs( + offset=offset, + limit=limit, + ) + + return await Episodes.paginate(info.context.session, kwargs) + + @strawberry.field() + async def season( + self, + info: Info, + season_id: int, + /, + ) -> Season | None: + return await Season.get(info.context.session, season_id) + + @strawberry.field( + extensions=[ + LimitsRule(), + ], + ) + async def seasons( + self, + info: Info, + *, + limit: int | None = 50, + offset: int | None = 0, + ) -> Seasons: + kwargs: FilterStatementKwargs = FilterStatementKwargs( + offset=offset, + limit=limit, + ) + + return await Seasons.paginate(info.context.session, kwargs) diff --git a/futuramaapi/routers/graphql/validators.py b/futuramaapi/routers/graphql/validators.py new file mode 100644 index 0000000..13e96e9 --- /dev/null +++ b/futuramaapi/routers/graphql/validators.py @@ -0,0 +1,26 @@ +from typing import Any + +from strawberry import Info +from strawberry.extensions.field_extension import AsyncExtensionResolver, FieldExtension + + +class LimitsRule(FieldExtension): + @staticmethod + def validate_range(name: str, value: int, min_: int, max_: int, /) -> None: + if not min_ <= value <= max_: + raise ValueError(f"{name} can be more than {min_} and less than {max_}") from None + + def validate_limits(self, kwargs: dict): + for limit in ["limit", "offset"]: + self.validate_range(limit, kwargs[limit], 0, 50) + + async def resolve_async( + self, + next_: AsyncExtensionResolver, + source: Any, + info: Info, + **kwargs, + ): + self.validate_limits(kwargs) + + return await next_(source, info, **kwargs) diff --git a/futuramaapi/routers/notifications/__init__.py b/futuramaapi/routers/notifications/__init__.py new file mode 100644 index 0000000..9726651 --- /dev/null +++ b/futuramaapi/routers/notifications/__init__.py @@ -0,0 +1,5 @@ +from .api import router + +__all__ = [ + "router", +] diff --git a/app/routers/notifications.py b/futuramaapi/routers/notifications/api.py similarity index 58% rename from app/routers/notifications.py rename to futuramaapi/routers/notifications/api.py index 2942789..e8031f8 100644 --- a/app/routers/notifications.py +++ b/futuramaapi/routers/notifications/api.py @@ -1,12 +1,15 @@ -from fastapi import APIRouter, Depends, Request, status +from fastapi import APIRouter, Depends, HTTPException, Request, status from sqlalchemy.ext.asyncio.session import AsyncSession from sse_starlette.sse import EventSourceResponse -from app.repositories.sessions import get_async_session -from app.services.notifications import CharacterMove, process_character_sse +from futuramaapi.repositories.session import get_async_session +from futuramaapi.routers.exceptions import ModelNotFoundError + +from .schemas import CharacterNotification router = APIRouter( prefix="/notifications", + tags=["notifications"], ) @@ -15,7 +18,7 @@ response_class=EventSourceResponse, responses={ status.HTTP_200_OK: { - "model": CharacterMove, + "model": CharacterNotification, } }, status_code=status.HTTP_200_OK, @@ -33,4 +36,10 @@ async def character_sse( It facilitates real-time updates on character path. Exercise caution when using this endpoint to ensure responsible and accurate data retrieval. """ - return await process_character_sse(character_id, request, session) + try: + return await CharacterNotification.from_request(character_id, request, session) + except ModelNotFoundError: + raise HTTPException( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + detail=f"Character with id={character_id} not found", + ) from None diff --git a/futuramaapi/routers/notifications/schemas.py b/futuramaapi/routers/notifications/schemas.py new file mode 100644 index 0000000..87cec79 --- /dev/null +++ b/futuramaapi/routers/notifications/schemas.py @@ -0,0 +1,57 @@ +from asyncio import sleep +from datetime import datetime +from random import randint + +from fastapi import Request +from pydantic import Field +from sqlalchemy.ext.asyncio.session import AsyncSession +from sse_starlette import EventSourceResponse, ServerSentEvent + +from futuramaapi.helpers.pydantic import BaseModel +from futuramaapi.routers.characters.schemas import Character + +MIN_COORDINATE: int = 0 +MAX_COORDINATE: int = 2**6 + + +class CharacterNotification(BaseModel): + class Notification(BaseModel): + time: datetime = Field(default_factory=datetime.now) + x: int = Field( + description="Character X coordinate", + ge=MIN_COORDINATE, + le=MAX_COORDINATE, + ) + y: int = Field( + description="Character Y coordinate", + ge=MIN_COORDINATE, + le=MAX_COORDINATE, + ) + + item: Character + notification: Notification + + @classmethod + async def get_move(cls, request: Request, character: Character, /): + while True: + if await request.is_disconnected(): + # Can be removed. Do not trust lib, force connection close. + break + + yield ServerSentEvent( + data=cls( + item=character, + notification=cls.Notification( + x=randint(MIN_COORDINATE, MAX_COORDINATE), # noqa: S311 + y=randint(MIN_COORDINATE, MAX_COORDINATE), # noqa: S311 + ), + ).model_dump() + ) + await sleep( + randint(1, 3), # noqa: S311 + ) + + @classmethod + async def from_request(cls, id_: int, request: Request, session: AsyncSession, /): + character: Character = await Character.get(session, id_) + return EventSourceResponse(cls.get_move(request, character)) diff --git a/futuramaapi/routers/root/__init__.py b/futuramaapi/routers/root/__init__.py new file mode 100644 index 0000000..9726651 --- /dev/null +++ b/futuramaapi/routers/root/__init__.py @@ -0,0 +1,5 @@ +from .api import router + +__all__ = [ + "router", +] diff --git a/futuramaapi/routers/root/api.py b/futuramaapi/routers/root/api.py new file mode 100644 index 0000000..056b1a2 --- /dev/null +++ b/futuramaapi/routers/root/api.py @@ -0,0 +1,82 @@ +from fastapi import APIRouter, Depends, Request, Response, status +from fastapi.openapi.docs import get_redoc_html, get_swagger_ui_html +from fastapi.responses import FileResponse +from sqlalchemy.ext.asyncio.session import AsyncSession + +from futuramaapi.repositories.session import get_async_session + +from .schemas import About, Root + +router = APIRouter() + + +@router.get( + "/health", + tags=[ + "health", + ], + include_in_schema=False, +) +def health() -> Response: + return Response(status_code=status.HTTP_200_OK) + + +@router.get( + "/favicon.ico", + include_in_schema=False, +) +async def favicon(): + return FileResponse("favicon.ico") + + +@router.get( + "/swagger", + include_in_schema=False, + name="swagger", +) +async def get_swagger(): + return get_swagger_ui_html( + openapi_url="/openapi.json", + title="Documentation | Futurama API", + swagger_favicon_url="/favicon.ico", + ) + + +@router.get( + "/docs", + include_in_schema=False, + name="redoc_html", +) +async def get_redoc(): + return get_redoc_html( + openapi_url="/openapi.json", + title="Documentation | Futurama API", + redoc_favicon_url="/favicon.ico", + ) + + +@router.get( + "/", + include_in_schema=False, + status_code=status.HTTP_200_OK, + name="root", +) +async def get_root( + request: Request, + session: AsyncSession = Depends(get_async_session), # noqa: B008 +) -> Response: + obj: Root = await Root.from_request(session, request) + return obj.get_response(request) + + +@router.get( + "/about", + include_in_schema=False, + name="about", +) +async def about( + request: Request, + session: AsyncSession = Depends(get_async_session), # noqa: B008 +) -> Response: + obj: About = await About.from_request(session, request) + return obj.get_response(request) diff --git a/futuramaapi/routers/root/schemas.py b/futuramaapi/routers/root/schemas.py new file mode 100644 index 0000000..e6b93da --- /dev/null +++ b/futuramaapi/routers/root/schemas.py @@ -0,0 +1,40 @@ +from typing import ClassVar, Self + +from sqlalchemy.ext.asyncio import AsyncSession +from starlette.requests import Request + +from futuramaapi.helpers.pydantic import BaseModel, Field +from futuramaapi.mixins.pydantic import BaseModelTemplateMixin +from futuramaapi.repositories.base import FilterStatementKwargs +from futuramaapi.routers.characters.schemas import Character +from futuramaapi.routers.users.schemas import User + + +class Root(BaseModel, BaseModelTemplateMixin): + characters: list[Character] + user_count: int = Field(alias="user_count") + + template_name: ClassVar[str] = "index.html" + + @classmethod + async def from_request(cls, session: AsyncSession, request: Request, /) -> Self: + user_count: int = await User.count(session) + characters: list[Character] = await Character.filter( + session, + FilterStatementKwargs( + limit=6, + ), + ) + + return cls( + characters=characters, + user_count=user_count, + ) + + +class About(BaseModel, BaseModelTemplateMixin): + template_name: ClassVar[str] = "about.html" + + @classmethod + async def from_request(cls, session: AsyncSession, request: Request, /) -> Self: + return cls() diff --git a/futuramaapi/routers/seasons/__init__.py b/futuramaapi/routers/seasons/__init__.py new file mode 100644 index 0000000..9726651 --- /dev/null +++ b/futuramaapi/routers/seasons/__init__.py @@ -0,0 +1,5 @@ +from .api import router + +__all__ = [ + "router", +] diff --git a/app/routers/seasons.py b/futuramaapi/routers/seasons/api.py similarity index 68% rename from app/routers/seasons.py rename to futuramaapi/routers/seasons/api.py index bf2d003..23422a2 100644 --- a/app/routers/seasons.py +++ b/futuramaapi/routers/seasons/api.py @@ -1,20 +1,26 @@ -from fastapi import APIRouter, Depends, status +from fastapi import APIRouter, Depends, HTTPException, status from fastapi_pagination import Page from sqlalchemy.ext.asyncio.session import AsyncSession -from app.repositories.sessions import get_async_session -from app.services.seasons import ( - Season, - process_get_season, - process_get_seasons, -) +from futuramaapi.repositories.session import get_async_session +from futuramaapi.routers.exceptions import ModelNotFoundError, NotFoundResponse + +from .schemas import Season -router = APIRouter(prefix="/seasons") +router = APIRouter( + prefix="/seasons", + tags=["seasons"], +) @router.get( "/{season_id}", status_code=status.HTTP_200_OK, + responses={ + status.HTTP_404_NOT_FOUND: { + "model": NotFoundResponse, + }, + }, response_model=Season, name="season", ) @@ -29,7 +35,10 @@ async def get_season( Can be used to gain in-depth insights into a particular season of Futurama. """ - return await process_get_season(season_id, session) + try: + return await Season.get(session, season_id) + except ModelNotFoundError: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) from None @router.get( @@ -50,4 +59,4 @@ async def get_seasons( This endpoint is valuable for those interested in exploring the entirety of Futurama's seasons or implementing features like season browsing on your site. """ - return await process_get_seasons(session) + return await Season.paginate(session) diff --git a/futuramaapi/routers/seasons/schemas.py b/futuramaapi/routers/seasons/schemas.py new file mode 100644 index 0000000..2ca4a4d --- /dev/null +++ b/futuramaapi/routers/seasons/schemas.py @@ -0,0 +1,15 @@ +from typing import ClassVar + +from futuramaapi.helpers.pydantic import BaseModel +from futuramaapi.mixins.pydantic import BaseModelDatabaseMixin +from futuramaapi.repositories.models import SeasonModel +from futuramaapi.routers.episodes.schemas import EpisodeBase + + +class Season(BaseModel, BaseModelDatabaseMixin): + model: ClassVar[type[SeasonModel]] = SeasonModel + + class Episode(EpisodeBase): ... + + id: int + episodes: list[Episode] diff --git a/futuramaapi/routers/tokens/__init__.py b/futuramaapi/routers/tokens/__init__.py new file mode 100644 index 0000000..9726651 --- /dev/null +++ b/futuramaapi/routers/tokens/__init__.py @@ -0,0 +1,5 @@ +from .api import router + +__all__ = [ + "router", +] diff --git a/app/routers/tokens.py b/futuramaapi/routers/tokens/api.py similarity index 55% rename from app/routers/tokens.py rename to futuramaapi/routers/tokens/api.py index c8a15cd..b8bc36b 100644 --- a/app/routers/tokens.py +++ b/futuramaapi/routers/tokens/api.py @@ -1,23 +1,17 @@ from typing import Annotated from fastapi import APIRouter, Depends, status -from sqlalchemy.ext.asyncio.session import AsyncSession -from app.repositories.sessions import get_async_session -from app.services.auth import oauth2_refresh_scheme -from app.services.security import ( - OAuth2PasswordRequestJson, - RefreshTokenData, - UnauthorizedResponse, -) -from app.services.tokens import ( - RefreshToken, - Token, - process_refresh_token_auth_user, - process_token_auth_user, -) +from futuramaapi.routers.exceptions import UnauthorizedResponse +from futuramaapi.routers.users.schemas import User -router = APIRouter(prefix="/tokens") +from .dependencies import from_form_data, refresh_token +from .schemas import UserToken + +router = APIRouter( + prefix="/tokens", + tags=["tokens"], +) @router.post( @@ -27,13 +21,12 @@ "model": UnauthorizedResponse, }, }, - response_model=Token, + response_model=UserToken, name="user_token_auth", ) async def token_auth_user( - form_data: Annotated[OAuth2PasswordRequestJson, Depends()], - session: AsyncSession = Depends(get_async_session), # noqa: B008 -) -> Token: + user: Annotated[User, Depends(from_form_data)], +) -> UserToken: """Authenticate user. JSON Web Token (JWT) authentication is a popular method for securing web applications and APIs. @@ -42,7 +35,7 @@ async def token_auth_user( Use a token in a response to get secured stored data of your user. """ - return await process_token_auth_user(session, form_data) + return UserToken.from_user(user) @router.post( @@ -52,15 +45,15 @@ async def token_auth_user( "model": UnauthorizedResponse, }, }, - response_model=RefreshToken, + response_model=UserToken, name="user_token_auth_refresh", ) async def refresh_token_auth_user( - token: Annotated[RefreshTokenData, Depends(oauth2_refresh_scheme)], -) -> RefreshToken: + token: Annotated[UserToken, Depends(refresh_token)], +) -> UserToken: """Refresh JWT. The Refresh JWT Token endpoint extends the lifespan of JSON Web Tokens (JWTs) without requiring user reauthentication. This API feature ensures uninterrupted access to secured resources. """ - return await process_refresh_token_auth_user(token) + return token diff --git a/futuramaapi/routers/tokens/dependencies.py b/futuramaapi/routers/tokens/dependencies.py new file mode 100644 index 0000000..bad8504 --- /dev/null +++ b/futuramaapi/routers/tokens/dependencies.py @@ -0,0 +1,42 @@ +from typing import Annotated + +from fastapi import Depends, HTTPException, status +from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm +from sqlalchemy.ext.asyncio.session import AsyncSession + +from futuramaapi.repositories.session import get_async_session +from futuramaapi.routers.exceptions import ModelNotFoundError +from futuramaapi.routers.users.schemas import User, UserPasswordError + +from .schemas import DecodedTokenError, UserToken, UserTokenRefreshRequest + +_oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/tokens/users/auth") + + +async def from_form_data( + form_data: Annotated[OAuth2PasswordRequestForm, Depends()], + session: AsyncSession = Depends(get_async_session), # noqa: B008 +) -> User: + try: + user: User = await User.auth(session, form_data.username, form_data.password) + except (ModelNotFoundError, UserPasswordError): + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) from None + + return user + + +def refresh_token( + token: Annotated[str, Depends(_oauth2_scheme)], + data: UserTokenRefreshRequest, +) -> UserToken: + token_: UserToken = UserToken( + access_token=token, + refresh_token=data.refresh_token, + ) + + try: + token_.refresh() + except DecodedTokenError: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) from None + + return token_ diff --git a/futuramaapi/routers/tokens/schemas.py b/futuramaapi/routers/tokens/schemas.py new file mode 100644 index 0000000..e99f3bb --- /dev/null +++ b/futuramaapi/routers/tokens/schemas.py @@ -0,0 +1,84 @@ +from typing import TYPE_CHECKING, ClassVar, Literal, Self + +from pydantic import Field + +from futuramaapi.helpers.pydantic import BaseModel, BaseTokenModel +from futuramaapi.mixins.pydantic import BaseModelTokenMixin, DecodedTokenError + +if TYPE_CHECKING: + from futuramaapi.routers.users.schemas import User + + +class _DecodedTokenBase(BaseTokenModel): + type: Literal["access", "refresh"] + + +class DecodedUserToken(_DecodedTokenBase, BaseModelTokenMixin): + id: int + + @classmethod + def from_user(cls, user: "User", type_: Literal["access", "refresh"], /) -> Self: + return cls(type=type_, **user.model_dump()) + + @classmethod + def decode( + cls, + token: str, + /, + *, + algorithm="HS256", + allowed_type: Literal["access", "refresh"] = "access", + ) -> Self: + decoded_token: Self = super().decode(token, algorithm=algorithm) + if decoded_token.type != allowed_type: + raise DecodedTokenError() from None + + return decoded_token + + +class UserTokenRefreshRequest(BaseModel): + refresh_token: str + + +class UserToken(BaseModel): + access_token: str = Field( + alias="access_token", + description="Keep in mind, that the field is not in a camel case. That's the standard.", + ) + refresh_token: str = Field( + alias="refresh_token", + description="Keep in mind, that the field is not in a camel case. That's the standard.", + ) + + _default_access_seconds: ClassVar[int] = 15 * 60 + _default_refresh_seconds: ClassVar[int] = 5 * 24 * 60 * 60 + + @classmethod + def from_user( + cls, + user: "User", + /, + ) -> Self: + access: DecodedUserToken = DecodedUserToken.from_user(user, "access") + refresh: DecodedUserToken = DecodedUserToken.from_user(user, "refresh") + return cls( + access_token=access.tokenize(cls._default_access_seconds), + refresh_token=refresh.tokenize(cls._default_refresh_seconds), + ) + + def refresh(self) -> None: + try: + access: DecodedUserToken = DecodedUserToken.decode(self.access_token) + except DecodedTokenError: + raise + + try: + refresh: DecodedUserToken = DecodedUserToken.decode(self.refresh_token, allowed_type="refresh") + except DecodedTokenError: + raise + + access.refresh_nonce() + refresh.refresh_nonce() + + self.access_token = access.tokenize(self._default_access_seconds) + self.refresh_token = refresh.tokenize(self._default_refresh_seconds) diff --git a/futuramaapi/routers/users/__init__.py b/futuramaapi/routers/users/__init__.py new file mode 100644 index 0000000..9726651 --- /dev/null +++ b/futuramaapi/routers/users/__init__.py @@ -0,0 +1,5 @@ +from .api import router + +__all__ = [ + "router", +] diff --git a/futuramaapi/routers/users/api.py b/futuramaapi/routers/users/api.py new file mode 100644 index 0000000..7eb7b15 --- /dev/null +++ b/futuramaapi/routers/users/api.py @@ -0,0 +1,137 @@ +from typing import Annotated + +from fastapi import APIRouter, Depends, HTTPException, status +from sqlalchemy.ext.asyncio.session import AsyncSession + +from futuramaapi.repositories.session import get_async_session +from futuramaapi.routers.exceptions import ModelExistsError, UnauthorizedResponse + +from .dependencies import from_signature, from_token +from .schemas import User, UserAlreadyActivatedError, UserCreateRequest, UserUpdateRequest + +router = APIRouter( + prefix="/users", + tags=["users"], +) + + +@router.post( + "", + status_code=status.HTTP_201_CREATED, + response_model=User, + name="user", +) +async def create_user( + data: UserCreateRequest, + session: AsyncSession = Depends(get_async_session), # noqa: B008 +) -> User: + """Create User. + + The user add endpoint is an API function allowing the creation of new user accounts. + It receives user details via HTTP requests, validates the information, + and stores it in the system's database. + This endpoint is essential for user registration and onboarding. + + Please note that currently endpoint is not protected. + However, if there are a lot of spam requests, the endpoint will be blocked or limited. + """ + try: + return await User.create(session, data) + except ModelExistsError: + raise HTTPException( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + detail="User already exists.", + ) from None + + +@router.get( + "/me", + responses={ + status.HTTP_401_UNAUTHORIZED: { + "model": UnauthorizedResponse, + }, + }, + name="user_me", +) +async def get_me( + user: Annotated[User, Depends(from_token)], +) -> User: + """Get user details. + + Retrieve authenticated user profile information, including username, email, and account details. + Personalize user experiences within the application using the JSON response containing user-specific data. + """ + return user + + +@router.get( + "/activate", + responses={ + status.HTTP_401_UNAUTHORIZED: { + "model": UnauthorizedResponse, + } + }, + status_code=status.HTTP_200_OK, + name="activate_user", +) +async def activate_user( + user: User = Depends(from_signature), # noqa: B008 + session: AsyncSession = Depends(get_async_session), # noqa: B008 +) -> None: + """Activate user.""" + try: + await user.activate(session) + except UserAlreadyActivatedError: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="User already activated.", + ) from None + + +@router.put( + "", + responses={ + status.HTTP_401_UNAUTHORIZED: { + "model": UnauthorizedResponse, + }, + }, + name="update_user", +) +async def update_user( + user: Annotated[User, Depends(from_token)], + data: UserUpdateRequest, + session: AsyncSession = Depends(get_async_session), # noqa: B008 +) -> User: + """Update user details. + + This endpoint is crucial for users to manage and maintain accurate profile information, + often including authentication and authorization checks for security. + """ + await user.update(session, data) + return user + + +@router.post( + "/confirmations/resend", + responses={ + status.HTTP_401_UNAUTHORIZED: { + "model": UnauthorizedResponse, + }, + }, + status_code=status.HTTP_200_OK, + name="resend_user_confirmation", +) +async def resend_user_confirmation( + user: Annotated[User, Depends(from_token)], +) -> None: + """Resend user confirmation. + + If the confirmation message is not delivered or got lost, user can request another message. + """ + try: + await user.send_confirmation_email() + except UserAlreadyActivatedError: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="User already activated.", + ) from None diff --git a/futuramaapi/routers/users/dependencies.py b/futuramaapi/routers/users/dependencies.py new file mode 100644 index 0000000..fd9fb44 --- /dev/null +++ b/futuramaapi/routers/users/dependencies.py @@ -0,0 +1,47 @@ +from typing import Annotated + +from fastapi import Depends, HTTPException, status +from fastapi.security import OAuth2PasswordBearer +from sqlalchemy.ext.asyncio.session import AsyncSession + +from futuramaapi.mixins.pydantic import DecodedTokenError +from futuramaapi.repositories.session import get_async_session +from futuramaapi.routers.exceptions import ModelNotFoundError +from futuramaapi.routers.tokens.schemas import DecodedUserToken +from futuramaapi.routers.users.schemas import DecodedUserSignature, User + +_oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/tokens/users/auth") + + +async def from_token( + token: Annotated[str, Depends(_oauth2_scheme)], + session: AsyncSession = Depends(get_async_session), # noqa: B008 +) -> User: + try: + decoded_token: DecodedUserToken = DecodedUserToken.decode(token, allowed_type="access") + except DecodedTokenError: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) from None + + try: + user: User = await User.get(session, decoded_token.id) + except ModelNotFoundError: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) from None + + return user + + +async def from_signature( + sig: str, + session: AsyncSession = Depends(get_async_session), # noqa: B008 +) -> User: + try: + decoded_signature: DecodedUserSignature = DecodedUserSignature.decode(sig) + except DecodedTokenError: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) from None + + try: + user: User = await User.get(session, decoded_signature.id) + except ModelNotFoundError: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) from None + + return user diff --git a/futuramaapi/routers/users/schemas.py b/futuramaapi/routers/users/schemas.py new file mode 100644 index 0000000..4e81008 --- /dev/null +++ b/futuramaapi/routers/users/schemas.py @@ -0,0 +1,188 @@ +from datetime import datetime +from typing import ClassVar, Self + +from pydantic import EmailStr, Field, HttpUrl, SecretStr, field_validator, model_validator +from sqlalchemy.ext.asyncio.session import AsyncSession + +from futuramaapi.core import feature_flags, settings +from futuramaapi.helpers.pydantic import BaseModel, BaseTokenModel +from futuramaapi.mixins.pydantic import BaseModelDatabaseMixin, BaseModelTokenMixin +from futuramaapi.repositories.base import ModelDoesNotExistError +from futuramaapi.repositories.models import UserModel +from futuramaapi.routers.exceptions import ModelNotFoundError + + +class UserBase(BaseModel): + model: ClassVar[type[UserModel]] = UserModel + + name: str = Field( + min_length=1, + max_length=64, + ) + surname: str = Field( + min_length=1, + max_length=64, + ) + middle_name: str | None = Field( + default=None, + min_length=1, + max_length=64, + ) + email: EmailStr + username: str = Field( + min_length=5, + max_length=64, + ) + password: SecretStr = Field( + min_length=8, + max_length=128, + ) + is_subscribed: bool = Field( + default=True, + ) + + +class UserCreateRequest(UserBase): + @field_validator("password", mode="after") + @classmethod + def hash_password(cls, value: SecretStr, /) -> SecretStr: + return SecretStr(cls.hasher.encode(value.get_secret_value())) + + +class UserBaseError(Exception): ... + + +class UserPasswordError(UserBaseError): ... + + +class UserAlreadyActivatedError(UserBaseError): ... + + +class UserUpdateRequest(BaseModel): + name: str | None = Field( + min_length=1, + max_length=64, + default=None, + ) + surname: str | None = Field( + min_length=1, + max_length=64, + default=None, + ) + middle_name: str | None = Field( + default=None, + min_length=1, + max_length=64, + ) + password: SecretStr | None = Field( + default=None, + min_length=8, + max_length=128, + ) + is_subscribed: bool | None = None + + @field_validator("password", mode="after") + @classmethod + def hash_password(cls, value: SecretStr | None, /) -> SecretStr | None: + if value is None: + return None + return SecretStr(cls.hasher.encode(value.get_secret_value())) + + +class DecodedUserSignature(BaseTokenModel, BaseModelTokenMixin): + id: int + + +class UserActivateRequest(BaseModel): + is_confirmed: bool + + +class ConfirmationBody(BaseModel): + class _User(BaseModel): + id: int + name: str + surname: str + + user: _User + url: HttpUrl = HttpUrl.build( + scheme="https", + host=settings.trusted_host, + ) + + @property + def signature(self) -> str: + return DecodedUserSignature(id=self.user.id).tokenize(3 * 24 * 60 * 60) + + @model_validator(mode="after") + def build_confirmation_url(self) -> Self: + self.url = HttpUrl.build( + scheme=self.url.scheme, + host=self.url.host, + path="api/users/activate", + query=f"sig={self.signature}", + ) + return self + + @classmethod + def from_user(cls, user: "User", /) -> Self: + return cls( + user=user.model_dump(), + ) + + +class User(UserBase, BaseModelDatabaseMixin): + id: int + is_confirmed: bool + created_at: datetime + + def verify_password(self, password: str, /): + if not self.hasher.verify(password, self.password.get_secret_value()): + raise UserPasswordError() from None + + @classmethod + async def from_username(cls, session: AsyncSession, username: str, /) -> Self: + try: + obj: UserModel = await cls.model.get(session, username, field=UserModel.username) + except ModelDoesNotExistError: + raise ModelNotFoundError() from None + return cls.model_validate(obj) + + @classmethod + async def auth(cls, session: AsyncSession, username: str, password: str, /) -> Self: + try: + user: User = await User.from_username(session, username) + except ModelNotFoundError: + raise + + try: + user.verify_password(password) + except UserPasswordError: + raise + + return user + + async def activate(self, session: AsyncSession, /) -> None: + if self.is_confirmed is True: + raise UserAlreadyActivatedError() from None + + await self.update(session, UserActivateRequest(is_confirmed=True)) + + async def send_confirmation_email(self) -> None: + if self.is_confirmed is True: + raise UserAlreadyActivatedError() from None + + if feature_flags.activate_users is False: + return + + await settings.email.send( + [self.email], + "FuturamaAPI - Account Activation", + ConfirmationBody.from_user(self), + "emails/confirmation.html", + ) + + @classmethod + async def create(cls, session: AsyncSession, data: BaseModel, /) -> Self: + user: Self = await super().create(session, data) + await user.send_confirmation_email() + return user diff --git a/install-dependencies.sh b/install-dependencies.sh deleted file mode 100644 index f3abe4f..0000000 --- a/install-dependencies.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -apt-get update -apt-get install --assume-yes --no-install-recommends \ - make - -apt-get clean -rm -rf /var/lib/apt/lists/* diff --git a/locale/en_US/LC_MESSAGES/messages.po b/locale/en_US/LC_MESSAGES/messages.po deleted file mode 100644 index d4a22c7..0000000 --- a/locale/en_US/LC_MESSAGES/messages.po +++ /dev/null @@ -1,229 +0,0 @@ -# English (United States) translations for FuturamaAPI. -# Copyright (C) 2023 FuturamaAPI -# This file is distributed under the same license as the FuturamaAPI -# project. -# FIRST AUTHOR , 2023. -# -msgid "" -msgstr "" -"Project-Id-Version: FuturamaAPI 0.0.1\n" -"Report-Msgid-Bugs-To: coldie322@gmail.com\n" -"POT-Creation-Date: 2024-03-13 21:39+0100\n" -"PO-Revision-Date: 2023-11-24 14:14+0100\n" -"Last-Translator: FULL NAME \n" -"Language: en_US\n" -"Language-Team: en_US \n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 2.13.1\n" - -#: app/routers/root.py:52 templates/base.html:89 -msgid "FB00003" -msgstr "Documentation" - -#: app/routers/root.py:52 templates/about.html:5 templates/base.html:8 -#: templates/base.html:25 templates/base.html:77 -msgid "FB00001" -msgstr "Futurama API" - -#: app/services/notifications.py:18 -msgid "FB00007" -msgstr "Character name" - -#: app/services/notifications.py:20 -msgid "FB00008" -msgstr "Character X coordinate" - -#: app/services/notifications.py:25 -msgid "FB00009" -msgstr "Character Y coordinate" - -#: app/services/users.py:130 -msgid "FuturamaAPI - Account Activation" -msgstr "" - -#: templates/about.html:5 templates/about.html:13 templates/base.html:54 -msgid "FB00012" -msgstr "About" - -#: templates/about.html:14 -msgid "What is this?" -msgstr "" - -#: templates/about.html:16 -msgid "" -"\n" -" Welcome to\n" -" Hulurama, a REST API and GraphQL API dedicated to " -"celebrating the iconic TV show Futurama.\n" -" Created with the intention of providing a learning playground for " -"enthusiasts like you,\n" -" FuturamaAPI opens the door for exploration and experimentation with " -"new technologies.\n" -" Dive into the world of Futurama, study, and try out the latest in web" -" development.\n" -" " -msgstr "" - -#: templates/about.html:28 -msgid "Check our" -msgstr "" - -#: templates/about.html:29 -msgid "documentation" -msgstr "" - -#: templates/about.html:29 -msgid "to get started." -msgstr "" - -#: templates/about.html:31 -msgid "Key Features" -msgstr "" - -#: templates/about.html:32 -msgid "" -"\n" -"
\n" -"

Comprehensive Access

\n" -"

\n" -" Explore detailed information about characters, seasons, episodes, " -"and more.\n" -"

\n" -"

Real-time Updates

\n" -"

\n" -" Experience the excitement of Server Sent Events (SSE) as " -"FuturamaAPI\n" -" keeps you in the loop with the latest happenings.\n" -"

\n" -"

Documentation

\n" -"

Get started quickly by checking out our documentation for a " -"seamless learning experience.

\n" -"
\n" -" " -msgstr "" - -#: templates/about.html:47 -msgid "Why?" -msgstr "" - -#: templates/about.html:48 -msgid "" -"\n" -"

\n" -" FuturamaAPI was born out of the idea that even lesser-known TV shows " -"deserve dedicated projects.\n" -" It's a space to have fun with Futurama and simultaneously push the " -"boundaries of technology.\n" -" Embrace the challenge and discover the possibilities with FastAPI, " -"SSE, WebSockets, GraphQL, Hypercorn,\n" -" HTTP/2.0, and more.\n" -"

\n" -" " -msgstr "" - -#: templates/about.html:56 -msgid "Technical Stack?" -msgstr "" - -#: templates/about.html:57 -msgid "" -"\n" -"

Futurama is powered by a robust technical stack, including

\n" -"
    \n" -"
  • Python
  • \n" -"
  • FastAPI
  • \n" -"
  • Docker
  • \n" -"
  • Hypercorn
  • \n" -"
  • PostgreSQL + SQLAlchemy (async)
  • \n" -"
  • Alembic for Migrations
  • \n" -"
  • Strawberry for GraphQL
  • \n" -"
\n" -" " -msgstr "" - -#: templates/about.html:69 -msgid "Can I contribute?" -msgstr "" - -#: templates/about.html:70 -msgid "" -"\n" -"

\n" -" Absolutely! Feel free to contribute to the project.\n" -" Here is the link to get started.\n" -" Whether you're a seasoned developer or just starting, your " -"contributions are valuable.\n" -"

\n" -" " -msgstr "" - -#: templates/about.html:77 -msgid "Design?" -msgstr "" - -#: templates/about.html:78 -msgid "" -"\n" -"

\n" -" Admittedly, design is not my strong suit. If you have HTML skills and" -" a passion for design,\n" -" please consider creating a\n" -" pull request for this project.\n" -" Any help is greatly appreciated.\n" -"

\n" -" " -msgstr "" - -#: templates/about.html:86 -msgid "Who are you?" -msgstr "" - -#: templates/about.html:87 -msgid "" -"\n" -"

\n" -" I'm Ivan " -"Koldakov,\n" -" a Python developer and enthusiast.\n" -" Connect with me on LinkedIn\n" -" to stay in the loop and share your thoughts.\n" -"

\n" -" " -msgstr "" - -#: templates/base.html:8 -msgid "FB00002" -msgstr "Welcome" - -#: templates/base.html:62 -msgid "FB00011" -msgstr "GraphQL" - -#: templates/base.html:95 -#, python-format -msgid "" -"Join our community today! Already, %(user_count)s have registered, and " -"we're excited to welcome even more members." -msgstr "" - -#: templates/index.html:31 -msgid "FB00004" -msgstr "Gender" - -#: templates/index.html:33 -msgid "FB00005" -msgstr "Status" - -#: templates/index.html:47 -msgid "FB00006" -msgstr "View" diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..a660a3a --- /dev/null +++ b/poetry.lock @@ -0,0 +1,2004 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "aiosmtplib" +version = "2.0.2" +description = "asyncio SMTP client" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "aiosmtplib-2.0.2-py3-none-any.whl", hash = "sha256:1e631a7a3936d3e11c6a144fb8ffd94bb4a99b714f2cb433e825d88b698e37bc"}, + {file = "aiosmtplib-2.0.2.tar.gz", hash = "sha256:138599a3227605d29a9081b646415e9e793796ca05322a78f69179f0135016a3"}, +] + +[package.extras] +docs = ["sphinx (>=5.3.0,<6.0.0)", "sphinx_autodoc_typehints (>=1.7.0,<2.0.0)"] +uvloop = ["uvloop (>=0.14,<0.15)", "uvloop (>=0.14,<0.15)", "uvloop (>=0.17,<0.18)"] + +[[package]] +name = "alembic" +version = "1.13.1" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.8" +files = [ + {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"}, + {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["backports.zoneinfo"] + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.4.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + +[[package]] +name = "asyncpg" +version = "0.29.0" +description = "An asyncio PostgreSQL driver" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "asyncpg-0.29.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72fd0ef9f00aeed37179c62282a3d14262dbbafb74ec0ba16e1b1864d8a12169"}, + {file = "asyncpg-0.29.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52e8f8f9ff6e21f9b39ca9f8e3e33a5fcdceaf5667a8c5c32bee158e313be385"}, + {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e6823a7012be8b68301342ba33b4740e5a166f6bbda0aee32bc01638491a22"}, + {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:746e80d83ad5d5464cfbf94315eb6744222ab00aa4e522b704322fb182b83610"}, + {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ff8e8109cd6a46ff852a5e6bab8b0a047d7ea42fcb7ca5ae6eaae97d8eacf397"}, + {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97eb024685b1d7e72b1972863de527c11ff87960837919dac6e34754768098eb"}, + {file = "asyncpg-0.29.0-cp310-cp310-win32.whl", hash = "sha256:5bbb7f2cafd8d1fa3e65431833de2642f4b2124be61a449fa064e1a08d27e449"}, + {file = "asyncpg-0.29.0-cp310-cp310-win_amd64.whl", hash = "sha256:76c3ac6530904838a4b650b2880f8e7af938ee049e769ec2fba7cd66469d7772"}, + {file = "asyncpg-0.29.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4900ee08e85af01adb207519bb4e14b1cae8fd21e0ccf80fac6aa60b6da37b4"}, + {file = "asyncpg-0.29.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a65c1dcd820d5aea7c7d82a3fdcb70e096f8f70d1a8bf93eb458e49bfad036ac"}, + {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b52e46f165585fd6af4863f268566668407c76b2c72d366bb8b522fa66f1870"}, + {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc600ee8ef3dd38b8d67421359779f8ccec30b463e7aec7ed481c8346decf99f"}, + {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:039a261af4f38f949095e1e780bae84a25ffe3e370175193174eb08d3cecab23"}, + {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6feaf2d8f9138d190e5ec4390c1715c3e87b37715cd69b2c3dfca616134efd2b"}, + {file = "asyncpg-0.29.0-cp311-cp311-win32.whl", hash = "sha256:1e186427c88225ef730555f5fdda6c1812daa884064bfe6bc462fd3a71c4b675"}, + {file = "asyncpg-0.29.0-cp311-cp311-win_amd64.whl", hash = "sha256:cfe73ffae35f518cfd6e4e5f5abb2618ceb5ef02a2365ce64f132601000587d3"}, + {file = "asyncpg-0.29.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6011b0dc29886ab424dc042bf9eeb507670a3b40aece3439944006aafe023178"}, + {file = "asyncpg-0.29.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b544ffc66b039d5ec5a7454667f855f7fec08e0dfaf5a5490dfafbb7abbd2cfb"}, + {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d84156d5fb530b06c493f9e7635aa18f518fa1d1395ef240d211cb563c4e2364"}, + {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54858bc25b49d1114178d65a88e48ad50cb2b6f3e475caa0f0c092d5f527c106"}, + {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bde17a1861cf10d5afce80a36fca736a86769ab3579532c03e45f83ba8a09c59"}, + {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:37a2ec1b9ff88d8773d3eb6d3784dc7e3fee7756a5317b67f923172a4748a175"}, + {file = "asyncpg-0.29.0-cp312-cp312-win32.whl", hash = "sha256:bb1292d9fad43112a85e98ecdc2e051602bce97c199920586be83254d9dafc02"}, + {file = "asyncpg-0.29.0-cp312-cp312-win_amd64.whl", hash = "sha256:2245be8ec5047a605e0b454c894e54bf2ec787ac04b1cb7e0d3c67aa1e32f0fe"}, + {file = "asyncpg-0.29.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0009a300cae37b8c525e5b449233d59cd9868fd35431abc470a3e364d2b85cb9"}, + {file = "asyncpg-0.29.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cad1324dbb33f3ca0cd2074d5114354ed3be2b94d48ddfd88af75ebda7c43cc"}, + {file = "asyncpg-0.29.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:012d01df61e009015944ac7543d6ee30c2dc1eb2f6b10b62a3f598beb6531548"}, + {file = "asyncpg-0.29.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000c996c53c04770798053e1730d34e30cb645ad95a63265aec82da9093d88e7"}, + {file = "asyncpg-0.29.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e0bfe9c4d3429706cf70d3249089de14d6a01192d617e9093a8e941fea8ee775"}, + {file = "asyncpg-0.29.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:642a36eb41b6313ffa328e8a5c5c2b5bea6ee138546c9c3cf1bffaad8ee36dd9"}, + {file = "asyncpg-0.29.0-cp38-cp38-win32.whl", hash = "sha256:a921372bbd0aa3a5822dd0409da61b4cd50df89ae85150149f8c119f23e8c408"}, + {file = "asyncpg-0.29.0-cp38-cp38-win_amd64.whl", hash = "sha256:103aad2b92d1506700cbf51cd8bb5441e7e72e87a7b3a2ca4e32c840f051a6a3"}, + {file = "asyncpg-0.29.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5340dd515d7e52f4c11ada32171d87c05570479dc01dc66d03ee3e150fb695da"}, + {file = "asyncpg-0.29.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e17b52c6cf83e170d3d865571ba574577ab8e533e7361a2b8ce6157d02c665d3"}, + {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f100d23f273555f4b19b74a96840aa27b85e99ba4b1f18d4ebff0734e78dc090"}, + {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48e7c58b516057126b363cec8ca02b804644fd012ef8e6c7e23386b7d5e6ce83"}, + {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f9ea3f24eb4c49a615573724d88a48bd1b7821c890c2effe04f05382ed9e8810"}, + {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8d36c7f14a22ec9e928f15f92a48207546ffe68bc412f3be718eedccdf10dc5c"}, + {file = "asyncpg-0.29.0-cp39-cp39-win32.whl", hash = "sha256:797ab8123ebaed304a1fad4d7576d5376c3a006a4100380fb9d517f0b59c1ab2"}, + {file = "asyncpg-0.29.0-cp39-cp39-win_amd64.whl", hash = "sha256:cce08a178858b426ae1aa8409b5cc171def45d4293626e7aa6510696d46decd8"}, + {file = "asyncpg-0.29.0.tar.gz", hash = "sha256:d1c49e1f44fffafd9a55e1a9b101590859d881d639ea2922516f5d9c512d354e"}, +] + +[package.extras] +docs = ["Sphinx (>=5.3.0,<5.4.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["flake8 (>=6.1,<7.0)", "uvloop (>=0.15.3)"] + +[[package]] +name = "blinker" +version = "1.8.2" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.8" +files = [ + {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, + {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, +] + +[[package]] +name = "boto3" +version = "1.34.117" +description = "The AWS SDK for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "boto3-1.34.117-py3-none-any.whl", hash = "sha256:1506589e30566bbb2f4997b60968ff7d4ef8a998836c31eedd36437ac3b7408a"}, + {file = "boto3-1.34.117.tar.gz", hash = "sha256:c8a383b904d6faaf7eed0c06e31b423db128e4c09ce7bd2afc39d1cd07030a51"}, +] + +[package.dependencies] +botocore = ">=1.34.117,<1.35.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.10.0,<0.11.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.34.117" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">=3.8" +files = [ + {file = "botocore-1.34.117-py3-none-any.whl", hash = "sha256:26a431997f882bcdd1e835f44c24b2a1752b1c4e5183c2ce62999ce95d518d6c"}, + {file = "botocore-1.34.117.tar.gz", hash = "sha256:4637ca42e6c51aebc4d9a2d92f97bf4bdb042e3f7985ff31a659a11e4c170e73"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} + +[package.extras] +crt = ["awscrt (==0.20.9)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + +[[package]] +name = "dnspython" +version = "2.6.1" +description = "DNS toolkit" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, + {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, +] + +[package.extras] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=41)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=0.9.25)"] +idna = ["idna (>=3.6)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] + +[[package]] +name = "email-validator" +version = "2.1.1" +description = "A robust email address syntax and deliverability validation library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "email_validator-2.1.1-py3-none-any.whl", hash = "sha256:97d882d174e2a65732fb43bfce81a3a834cbc1bde8bf419e30ef5ea976370a05"}, + {file = "email_validator-2.1.1.tar.gz", hash = "sha256:200a70680ba08904be6d1eef729205cc0d687634399a5924d842533efb824b84"}, +] + +[package.dependencies] +dnspython = ">=2.0.0" +idna = ">=2.0.0" + +[[package]] +name = "fastapi" +version = "0.111.0" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi-0.111.0-py3-none-any.whl", hash = "sha256:97ecbf994be0bcbdadedf88c3150252bed7b2087075ac99735403b1b76cc8fc0"}, + {file = "fastapi-0.111.0.tar.gz", hash = "sha256:b9db9dd147c91cb8b769f7183535773d8741dd46f9dc6676cd82eab510228cd7"}, +] + +[package.dependencies] +email_validator = ">=2.0.0" +fastapi-cli = ">=0.0.2" +httpx = ">=0.23.0" +jinja2 = ">=2.11.2" +orjson = ">=3.2.1" +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +python-multipart = ">=0.0.7" +starlette = ">=0.37.2,<0.38.0" +typing-extensions = ">=4.8.0" +ujson = ">=4.0.1,<4.0.2 || >4.0.2,<4.1.0 || >4.1.0,<4.2.0 || >4.2.0,<4.3.0 || >4.3.0,<5.0.0 || >5.0.0,<5.1.0 || >5.1.0" +uvicorn = {version = ">=0.12.0", extras = ["standard"]} + +[package.extras] +all = ["email_validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "fastapi-cli" +version = "0.0.4" +description = "Run and manage FastAPI apps from the command line with FastAPI CLI. 🚀" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi_cli-0.0.4-py3-none-any.whl", hash = "sha256:a2552f3a7ae64058cdbb530be6fa6dbfc975dc165e4fa66d224c3d396e25e809"}, + {file = "fastapi_cli-0.0.4.tar.gz", hash = "sha256:e2e9ffaffc1f7767f488d6da34b6f5a377751c996f397902eb6abb99a67bde32"}, +] + +[package.dependencies] +typer = ">=0.12.3" + +[package.extras] +standard = ["fastapi", "uvicorn[standard] (>=0.15.0)"] + +[[package]] +name = "fastapi-mail" +version = "1.4.1" +description = "Simple lightweight mail library for FastApi" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "fastapi_mail-1.4.1-py3-none-any.whl", hash = "sha256:fa5ef23b2dea4d3ba4587f4bbb53f8f15274124998fb4e40629b3b636c76c398"}, + {file = "fastapi_mail-1.4.1.tar.gz", hash = "sha256:9095b713bd9d3abb02fe6d7abb637502aaf680b52e177d60f96273ef6bc8bb70"}, +] + +[package.dependencies] +aiosmtplib = ">=2.0,<3.0" +blinker = ">=1.5,<2.0" +email-validator = ">=2.0,<3.0" +httpx = {version = ">=0.23,<0.24", extras = ["httpx"], optional = true, markers = "extra == \"httpx\""} +Jinja2 = ">=3.0,<4.0" +pydantic = ">=2.0,<3.0" +pydantic_settings = ">=2.0,<3.0" +starlette = ">=0.24,<1.0" + +[package.extras] +httpx = ["httpx[httpx] (>=0.23,<0.24)"] +redis = ["redis[redis] (>=4.3,<5.0)"] + +[[package]] +name = "fastapi-pagination" +version = "0.12.24" +description = "FastAPI pagination" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "fastapi_pagination-0.12.24-py3-none-any.whl", hash = "sha256:a639df7301a89414244c6763bb97cff043815cb839070b8a38c58c007cf75d48"}, + {file = "fastapi_pagination-0.12.24.tar.gz", hash = "sha256:c9c6508e0182aab679a13b1de261d4923e3b530b410500dcb271638ff714fb14"}, +] + +[package.dependencies] +fastapi = ">=0.93.0" +pydantic = ">=1.9.1" +typing-extensions = ">=4.8.0,<5.0.0" + +[package.extras] +all = ["SQLAlchemy (>=1.3.20)", "asyncpg (>=0.24.0)", "beanie (>=1.25.0)", "bunnet (>=1.1.0,<2.0.0)", "databases (>=0.6.0)", "django (<5.0.0)", "mongoengine (>=0.23.1,<0.29.0)", "motor (>=2.5.1,<4.0.0)", "orm (>=0.3.1)", "ormar (>=0.11.2)", "piccolo (>=0.89,<0.122)", "pony (>=0.7.16,<0.8.0)", "scylla-driver (>=3.25.6,<4.0.0)", "sqlakeyset (>=2.0.1680321678,<3.0.0)", "sqlmodel (>=0.0.8,<0.0.15)", "tortoise-orm (>=0.16.18,<0.21.0)"] +asyncpg = ["SQLAlchemy (>=1.3.20)", "asyncpg (>=0.24.0)"] +beanie = ["beanie (>=1.25.0)"] +bunnet = ["bunnet (>=1.1.0,<2.0.0)"] +databases = ["databases (>=0.6.0)"] +django = ["databases (>=0.6.0)", "django (<5.0.0)"] +mongoengine = ["mongoengine (>=0.23.1,<0.29.0)"] +motor = ["motor (>=2.5.1,<4.0.0)"] +orm = ["databases (>=0.6.0)", "orm (>=0.3.1)"] +ormar = ["ormar (>=0.11.2)"] +piccolo = ["piccolo (>=0.89,<0.122)"] +scylla-driver = ["scylla-driver (>=3.25.6,<4.0.0)"] +sqlalchemy = ["SQLAlchemy (>=1.3.20)", "sqlakeyset (>=2.0.1680321678,<3.0.0)"] +sqlmodel = ["sqlakeyset (>=2.0.1680321678,<3.0.0)", "sqlmodel (>=0.0.8,<0.0.15)"] +tortoise = ["tortoise-orm (>=0.16.18,<0.21.0)"] + +[[package]] +name = "fastapi-storages" +version = "0.3.0" +description = "FastAPI Storages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi_storages-0.3.0-py3-none-any.whl", hash = "sha256:91adb41a80fdef2a84c0f8244c27ade7ff8bd5db9b7fa95c496c06c03e192477"}, + {file = "fastapi_storages-0.3.0.tar.gz", hash = "sha256:f784335fff9cd163b783e842da04c6d9ed1b306fce8995fda109b170d6d453df"}, +] + +[package.dependencies] +boto3 = ">=1.25,<2.0" + +[package.extras] +full = ["peewee (>=3)", "pillow (>=9.4,<10.0)", "sqlalchemy (>=1.4)"] + +[[package]] +name = "filelock" +version = "3.14.0" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"}, + {file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "graphql-core" +version = "3.2.3" +description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "graphql-core-3.2.3.tar.gz", hash = "sha256:06d2aad0ac723e35b1cb47885d3e5c45e956a53bc1b209a9fc5369007fe46676"}, + {file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"}, +] + +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "h2" +version = "4.1.0" +description = "HTTP/2 State-Machine based protocol implementation" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, + {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, +] + +[package.dependencies] +hpack = ">=4.0,<5" +hyperframe = ">=6.0,<7" + +[[package]] +name = "hpack" +version = "4.0.0" +description = "Pure-Python HPACK header compression" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, + {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, +] + +[[package]] +name = "httpcore" +version = "0.16.3" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"}, + {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"}, +] + +[package.dependencies] +anyio = ">=3.0,<5.0" +certifi = "*" +h11 = ">=0.13,<0.15" +sniffio = "==1.*" + +[package.extras] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "httptools" +version = "0.6.1" +description = "A collection of framework independent HTTP protocol utils." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, + {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, + {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, + {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, + {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, + {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, + {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, + {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, + {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, + {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, + {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, + {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, + {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, + {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, + {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, + {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, + {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, + {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, + {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, + {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, + {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, + {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, + {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, + {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, + {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, + {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, + {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, + {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, + {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, + {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, + {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, + {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, + {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, + {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, + {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, + {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, +] + +[package.extras] +test = ["Cython (>=0.29.24,<0.30.0)"] + +[[package]] +name = "httpx" +version = "0.23.3" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"}, + {file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"}, +] + +[package.dependencies] +certifi = "*" +httpcore = ">=0.15.0,<0.17.0" +rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<13)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "hypercorn" +version = "0.16.0" +description = "A ASGI Server based on Hyper libraries and inspired by Gunicorn" +optional = false +python-versions = ">=3.8" +files = [ + {file = "hypercorn-0.16.0-py3-none-any.whl", hash = "sha256:929e45c4acde3fbf7c58edf55336d30a009d2b4cb1f1eb96e6a515d61b663f58"}, + {file = "hypercorn-0.16.0.tar.gz", hash = "sha256:3b17d1dcf4992c1f262d9f9dd799c374125d0b9a8e40e1e2d11e2938b0adfe03"}, +] + +[package.dependencies] +h11 = "*" +h2 = ">=3.1.0" +priority = "*" +uvloop = {version = "*", optional = true, markers = "platform_system != \"Windows\" and extra == \"uvloop\""} +wsproto = ">=0.14.0" + +[package.extras] +docs = ["pydata_sphinx_theme", "sphinxcontrib_mermaid"] +h3 = ["aioquic (>=0.9.0,<1.0)"] +trio = ["exceptiongroup (>=1.1.0)", "trio (>=0.22.0)"] +uvloop = ["uvloop"] + +[[package]] +name = "hyperframe" +version = "6.0.1" +description = "HTTP/2 framing layer for Python" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, + {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, +] + +[[package]] +name = "identify" +version = "2.5.36" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"}, + {file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "mako" +version = "1.3.5" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, + {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "nodeenv" +version = "1.9.0" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "nodeenv-1.9.0-py2.py3-none-any.whl", hash = "sha256:508ecec98f9f3330b636d4448c0f1a56fc68017c68f1e7857ebc52acf0eb879a"}, + {file = "nodeenv-1.9.0.tar.gz", hash = "sha256:07f144e90dae547bf0d4ee8da0ee42664a42a04e02ed68e06324348dafe4bdb1"}, +] + +[[package]] +name = "orjson" +version = "3.10.3" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.3-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9fb6c3f9f5490a3eb4ddd46fc1b6eadb0d6fc16fb3f07320149c3286a1409dd8"}, + {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:252124b198662eee80428f1af8c63f7ff077c88723fe206a25df8dc57a57b1fa"}, + {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9f3e87733823089a338ef9bbf363ef4de45e5c599a9bf50a7a9b82e86d0228da"}, + {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8334c0d87103bb9fbbe59b78129f1f40d1d1e8355bbed2ca71853af15fa4ed3"}, + {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1952c03439e4dce23482ac846e7961f9d4ec62086eb98ae76d97bd41d72644d7"}, + {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c0403ed9c706dcd2809f1600ed18f4aae50be263bd7112e54b50e2c2bc3ebd6d"}, + {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:382e52aa4270a037d41f325e7d1dfa395b7de0c367800b6f337d8157367bf3a7"}, + {file = "orjson-3.10.3-cp310-none-win32.whl", hash = "sha256:be2aab54313752c04f2cbaab4515291ef5af8c2256ce22abc007f89f42f49109"}, + {file = "orjson-3.10.3-cp310-none-win_amd64.whl", hash = "sha256:416b195f78ae461601893f482287cee1e3059ec49b4f99479aedf22a20b1098b"}, + {file = "orjson-3.10.3-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:73100d9abbbe730331f2242c1fc0bcb46a3ea3b4ae3348847e5a141265479700"}, + {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a12eee96e3ab828dbfcb4d5a0023aa971b27143a1d35dc214c176fdfb29b3"}, + {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520de5e2ef0b4ae546bea25129d6c7c74edb43fc6cf5213f511a927f2b28148b"}, + {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccaa0a401fc02e8828a5bedfd80f8cd389d24f65e5ca3954d72c6582495b4bcf"}, + {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7bc9e8bc11bac40f905640acd41cbeaa87209e7e1f57ade386da658092dc16"}, + {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3582b34b70543a1ed6944aca75e219e1192661a63da4d039d088a09c67543b08"}, + {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c23dfa91481de880890d17aa7b91d586a4746a4c2aa9a145bebdbaf233768d5"}, + {file = "orjson-3.10.3-cp311-none-win32.whl", hash = "sha256:1770e2a0eae728b050705206d84eda8b074b65ee835e7f85c919f5705b006c9b"}, + {file = "orjson-3.10.3-cp311-none-win_amd64.whl", hash = "sha256:93433b3c1f852660eb5abdc1f4dd0ced2be031ba30900433223b28ee0140cde5"}, + {file = "orjson-3.10.3-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a39aa73e53bec8d410875683bfa3a8edf61e5a1c7bb4014f65f81d36467ea098"}, + {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0943a96b3fa09bee1afdfccc2cb236c9c64715afa375b2af296c73d91c23eab2"}, + {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e852baafceff8da3c9defae29414cc8513a1586ad93e45f27b89a639c68e8176"}, + {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18566beb5acd76f3769c1d1a7ec06cdb81edc4d55d2765fb677e3eaa10fa99e0"}, + {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bd2218d5a3aa43060efe649ec564ebedec8ce6ae0a43654b81376216d5ebd42"}, + {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cf20465e74c6e17a104ecf01bf8cd3b7b252565b4ccee4548f18b012ff2f8069"}, + {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ba7f67aa7f983c4345eeda16054a4677289011a478ca947cd69c0a86ea45e534"}, + {file = "orjson-3.10.3-cp312-none-win32.whl", hash = "sha256:17e0713fc159abc261eea0f4feda611d32eabc35708b74bef6ad44f6c78d5ea0"}, + {file = "orjson-3.10.3-cp312-none-win_amd64.whl", hash = "sha256:4c895383b1ec42b017dd2c75ae8a5b862fc489006afde06f14afbdd0309b2af0"}, + {file = "orjson-3.10.3-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:be2719e5041e9fb76c8c2c06b9600fe8e8584e6980061ff88dcbc2691a16d20d"}, + {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0175a5798bdc878956099f5c54b9837cb62cfbf5d0b86ba6d77e43861bcec2"}, + {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:978be58a68ade24f1af7758626806e13cff7748a677faf95fbb298359aa1e20d"}, + {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16bda83b5c61586f6f788333d3cf3ed19015e3b9019188c56983b5a299210eb5"}, + {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ad1f26bea425041e0a1adad34630c4825a9e3adec49079b1fb6ac8d36f8b754"}, + {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9e253498bee561fe85d6325ba55ff2ff08fb5e7184cd6a4d7754133bd19c9195"}, + {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0a62f9968bab8a676a164263e485f30a0b748255ee2f4ae49a0224be95f4532b"}, + {file = "orjson-3.10.3-cp38-none-win32.whl", hash = "sha256:8d0b84403d287d4bfa9bf7d1dc298d5c1c5d9f444f3737929a66f2fe4fb8f134"}, + {file = "orjson-3.10.3-cp38-none-win_amd64.whl", hash = "sha256:8bc7a4df90da5d535e18157220d7915780d07198b54f4de0110eca6b6c11e290"}, + {file = "orjson-3.10.3-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9059d15c30e675a58fdcd6f95465c1522b8426e092de9fff20edebfdc15e1cb0"}, + {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d40c7f7938c9c2b934b297412c067936d0b54e4b8ab916fd1a9eb8f54c02294"}, + {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4a654ec1de8fdaae1d80d55cee65893cb06494e124681ab335218be6a0691e7"}, + {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:831c6ef73f9aa53c5f40ae8f949ff7681b38eaddb6904aab89dca4d85099cb78"}, + {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99b880d7e34542db89f48d14ddecbd26f06838b12427d5a25d71baceb5ba119d"}, + {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e5e176c994ce4bd434d7aafb9ecc893c15f347d3d2bbd8e7ce0b63071c52e25"}, + {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b69a58a37dab856491bf2d3bbf259775fdce262b727f96aafbda359cb1d114d8"}, + {file = "orjson-3.10.3-cp39-none-win32.whl", hash = "sha256:b8d4d1a6868cde356f1402c8faeb50d62cee765a1f7ffcfd6de732ab0581e063"}, + {file = "orjson-3.10.3-cp39-none-win_amd64.whl", hash = "sha256:5102f50c5fc46d94f2033fe00d392588564378260d64377aec702f21a7a22912"}, + {file = "orjson-3.10.3.tar.gz", hash = "sha256:2b166507acae7ba2f7c315dcf185a9111ad5e992ac81f2d507aac39193c2c818"}, +] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pillow" +version = "10.3.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, + {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, + {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, + {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, + {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, + {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, + {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, + {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, + {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, + {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, + {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, + {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, + {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, + {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, + {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, + {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + +[[package]] +name = "platformdirs" +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "3.7.1" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pre_commit-3.7.1-py2.py3-none-any.whl", hash = "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5"}, + {file = "pre_commit-3.7.1.tar.gz", hash = "sha256:8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "priority" +version = "2.0.0" +description = "A pure-Python implementation of the HTTP/2 priority tree" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "priority-2.0.0-py3-none-any.whl", hash = "sha256:6f8eefce5f3ad59baf2c080a664037bb4725cd0a790d53d59ab4059288faf6aa"}, + {file = "priority-2.0.0.tar.gz", hash = "sha256:c965d54f1b8d0d0b19479db3924c7c36cf672dbf2aec92d43fbdaf4492ba18c0"}, +] + +[[package]] +name = "pydantic" +version = "2.7.2" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.7.2-py3-none-any.whl", hash = "sha256:834ab954175f94e6e68258537dc49402c4a5e9d0409b9f1b86b7e934a8372de7"}, + {file = "pydantic-2.7.2.tar.gz", hash = "sha256:71b2945998f9c9b7919a45bde9a50397b289937d215ae141c1d0903ba7149fd7"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""} +pydantic-core = "2.18.3" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.18.3" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.18.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:744697428fcdec6be5670460b578161d1ffe34743a5c15656be7ea82b008197c"}, + {file = "pydantic_core-2.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37b40c05ced1ba4218b14986fe6f283d22e1ae2ff4c8e28881a70fb81fbfcda7"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a9a75622357076efb6b311983ff190fbfb3c12fc3a853122b34d3d358126c"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e2e253af04ceaebde8eb201eb3f3e3e7e390f2d275a88300d6a1959d710539e2"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:855ec66589c68aa367d989da5c4755bb74ee92ccad4fdb6af942c3612c067e34"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d3e42bb54e7e9d72c13ce112e02eb1b3b55681ee948d748842171201a03a98a"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6ac9ffccc9d2e69d9fba841441d4259cb668ac180e51b30d3632cd7abca2b9b"}, + {file = "pydantic_core-2.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c56eca1686539fa0c9bda992e7bd6a37583f20083c37590413381acfc5f192d6"}, + {file = "pydantic_core-2.18.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:17954d784bf8abfc0ec2a633108207ebc4fa2df1a0e4c0c3ccbaa9bb01d2c426"}, + {file = "pydantic_core-2.18.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:98ed737567d8f2ecd54f7c8d4f8572ca7c7921ede93a2e52939416170d357812"}, + {file = "pydantic_core-2.18.3-cp310-none-win32.whl", hash = "sha256:9f9e04afebd3ed8c15d67a564ed0a34b54e52136c6d40d14c5547b238390e779"}, + {file = "pydantic_core-2.18.3-cp310-none-win_amd64.whl", hash = "sha256:45e4ffbae34f7ae30d0047697e724e534a7ec0a82ef9994b7913a412c21462a0"}, + {file = "pydantic_core-2.18.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b9ebe8231726c49518b16b237b9fe0d7d361dd221302af511a83d4ada01183ab"}, + {file = "pydantic_core-2.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b8e20e15d18bf7dbb453be78a2d858f946f5cdf06c5072453dace00ab652e2b2"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0d9ff283cd3459fa0bf9b0256a2b6f01ac1ff9ffb034e24457b9035f75587cb"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f7ef5f0ebb77ba24c9970da18b771711edc5feaf00c10b18461e0f5f5949231"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73038d66614d2e5cde30435b5afdced2b473b4c77d4ca3a8624dd3e41a9c19be"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6afd5c867a74c4d314c557b5ea9520183fadfbd1df4c2d6e09fd0d990ce412cd"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd7df92f28d351bb9f12470f4c533cf03d1b52ec5a6e5c58c65b183055a60106"}, + {file = "pydantic_core-2.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:80aea0ffeb1049336043d07799eace1c9602519fb3192916ff525b0287b2b1e4"}, + {file = "pydantic_core-2.18.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:aaee40f25bba38132e655ffa3d1998a6d576ba7cf81deff8bfa189fb43fd2bbe"}, + {file = "pydantic_core-2.18.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9128089da8f4fe73f7a91973895ebf2502539d627891a14034e45fb9e707e26d"}, + {file = "pydantic_core-2.18.3-cp311-none-win32.whl", hash = "sha256:fec02527e1e03257aa25b1a4dcbe697b40a22f1229f5d026503e8b7ff6d2eda7"}, + {file = "pydantic_core-2.18.3-cp311-none-win_amd64.whl", hash = "sha256:58ff8631dbab6c7c982e6425da8347108449321f61fe427c52ddfadd66642af7"}, + {file = "pydantic_core-2.18.3-cp311-none-win_arm64.whl", hash = "sha256:3fc1c7f67f34c6c2ef9c213e0f2a351797cda98249d9ca56a70ce4ebcaba45f4"}, + {file = "pydantic_core-2.18.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f0928cde2ae416a2d1ebe6dee324709c6f73e93494d8c7aea92df99aab1fc40f"}, + {file = "pydantic_core-2.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bee9bb305a562f8b9271855afb6ce00223f545de3d68560b3c1649c7c5295e9"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e862823be114387257dacbfa7d78547165a85d7add33b446ca4f4fae92c7ff5c"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a36f78674cbddc165abab0df961b5f96b14461d05feec5e1f78da58808b97e7"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba905d184f62e7ddbb7a5a751d8a5c805463511c7b08d1aca4a3e8c11f2e5048"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7fdd362f6a586e681ff86550b2379e532fee63c52def1c666887956748eaa326"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24b214b7ee3bd3b865e963dbed0f8bc5375f49449d70e8d407b567af3222aae4"}, + {file = "pydantic_core-2.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:691018785779766127f531674fa82bb368df5b36b461622b12e176c18e119022"}, + {file = "pydantic_core-2.18.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:60e4c625e6f7155d7d0dcac151edf5858102bc61bf959d04469ca6ee4e8381bd"}, + {file = "pydantic_core-2.18.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4e651e47d981c1b701dcc74ab8fec5a60a5b004650416b4abbef13db23bc7be"}, + {file = "pydantic_core-2.18.3-cp312-none-win32.whl", hash = "sha256:ffecbb5edb7f5ffae13599aec33b735e9e4c7676ca1633c60f2c606beb17efc5"}, + {file = "pydantic_core-2.18.3-cp312-none-win_amd64.whl", hash = "sha256:2c8333f6e934733483c7eddffdb094c143b9463d2af7e6bd85ebcb2d4a1b82c6"}, + {file = "pydantic_core-2.18.3-cp312-none-win_arm64.whl", hash = "sha256:7a20dded653e516a4655f4c98e97ccafb13753987434fe7cf044aa25f5b7d417"}, + {file = "pydantic_core-2.18.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:eecf63195be644b0396f972c82598cd15693550f0ff236dcf7ab92e2eb6d3522"}, + {file = "pydantic_core-2.18.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c44efdd3b6125419c28821590d7ec891c9cb0dff33a7a78d9d5c8b6f66b9702"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e59fca51ffbdd1638b3856779342ed69bcecb8484c1d4b8bdb237d0eb5a45e2"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70cf099197d6b98953468461d753563b28e73cf1eade2ffe069675d2657ed1d5"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:63081a49dddc6124754b32a3774331467bfc3d2bd5ff8f10df36a95602560361"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:370059b7883485c9edb9655355ff46d912f4b03b009d929220d9294c7fd9fd60"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a64faeedfd8254f05f5cf6fc755023a7e1606af3959cfc1a9285744cc711044"}, + {file = "pydantic_core-2.18.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19d2e725de0f90d8671f89e420d36c3dd97639b98145e42fcc0e1f6d492a46dc"}, + {file = "pydantic_core-2.18.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:67bc078025d70ec5aefe6200ef094576c9d86bd36982df1301c758a9fff7d7f4"}, + {file = "pydantic_core-2.18.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:adf952c3f4100e203cbaf8e0c907c835d3e28f9041474e52b651761dc248a3c0"}, + {file = "pydantic_core-2.18.3-cp38-none-win32.whl", hash = "sha256:9a46795b1f3beb167eaee91736d5d17ac3a994bf2215a996aed825a45f897558"}, + {file = "pydantic_core-2.18.3-cp38-none-win_amd64.whl", hash = "sha256:200ad4e3133cb99ed82342a101a5abf3d924722e71cd581cc113fe828f727fbc"}, + {file = "pydantic_core-2.18.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:304378b7bf92206036c8ddd83a2ba7b7d1a5b425acafff637172a3aa72ad7083"}, + {file = "pydantic_core-2.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c826870b277143e701c9ccf34ebc33ddb4d072612683a044e7cce2d52f6c3fef"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e201935d282707394f3668380e41ccf25b5794d1b131cdd96b07f615a33ca4b1"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5560dda746c44b48bf82b3d191d74fe8efc5686a9ef18e69bdabccbbb9ad9442"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b32c2a1f8032570842257e4c19288eba9a2bba4712af542327de9a1204faff8"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:929c24e9dea3990bc8bcd27c5f2d3916c0c86f5511d2caa69e0d5290115344a9"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1a8376fef60790152564b0eab376b3e23dd6e54f29d84aad46f7b264ecca943"}, + {file = "pydantic_core-2.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dccf3ef1400390ddd1fb55bf0632209d39140552d068ee5ac45553b556780e06"}, + {file = "pydantic_core-2.18.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:41dbdcb0c7252b58fa931fec47937edb422c9cb22528f41cb8963665c372caf6"}, + {file = "pydantic_core-2.18.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:666e45cf071669fde468886654742fa10b0e74cd0fa0430a46ba6056b24fb0af"}, + {file = "pydantic_core-2.18.3-cp39-none-win32.whl", hash = "sha256:f9c08cabff68704a1b4667d33f534d544b8a07b8e5d039c37067fceb18789e78"}, + {file = "pydantic_core-2.18.3-cp39-none-win_amd64.whl", hash = "sha256:4afa5f5973e8572b5c0dcb4e2d4fda7890e7cd63329bd5cc3263a25c92ef0026"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:77319771a026f7c7d29c6ebc623de889e9563b7087911b46fd06c044a12aa5e9"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:df11fa992e9f576473038510d66dd305bcd51d7dd508c163a8c8fe148454e059"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d531076bdfb65af593326ffd567e6ab3da145020dafb9187a1d131064a55f97c"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d33ce258e4e6e6038f2b9e8b8a631d17d017567db43483314993b3ca345dcbbb"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1f9cd7f5635b719939019be9bda47ecb56e165e51dd26c9a217a433e3d0d59a9"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cd4a032bb65cc132cae1fe3e52877daecc2097965cd3914e44fbd12b00dae7c5"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f2718430098bcdf60402136c845e4126a189959d103900ebabb6774a5d9fdb"}, + {file = "pydantic_core-2.18.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c0037a92cf0c580ed14e10953cdd26528e8796307bb8bb312dc65f71547df04d"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b95a0972fac2b1ff3c94629fc9081b16371dad870959f1408cc33b2f78ad347a"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a62e437d687cc148381bdd5f51e3e81f5b20a735c55f690c5be94e05da2b0d5c"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b367a73a414bbb08507da102dc2cde0fa7afe57d09b3240ce82a16d608a7679c"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ecce4b2360aa3f008da3327d652e74a0e743908eac306198b47e1c58b03dd2b"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd4435b8d83f0c9561a2a9585b1de78f1abb17cb0cef5f39bf6a4b47d19bafe3"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:616221a6d473c5b9aa83fa8982745441f6a4a62a66436be9445c65f241b86c94"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7e6382ce89a92bc1d0c0c5edd51e931432202b9080dc921d8d003e616402efd1"}, + {file = "pydantic_core-2.18.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff58f379345603d940e461eae474b6bbb6dab66ed9a851ecd3cb3709bf4dcf6a"}, + {file = "pydantic_core-2.18.3.tar.gz", hash = "sha256:432e999088d85c8f36b9a3f769a8e2b57aabd817bbb729a90d1fe7f18f6f1f39"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydantic-settings" +version = "2.2.1" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_settings-2.2.1-py3-none-any.whl", hash = "sha256:0235391d26db4d2190cb9b31051c4b46882d28a51533f97440867f012d4da091"}, + {file = "pydantic_settings-2.2.1.tar.gz", hash = "sha256:00b9f6a5e95553590434c0fa01ead0b216c3e10bc54ae02e37f359948643c5ed"}, +] + +[package.dependencies] +pydantic = ">=2.3.0" +python-dotenv = ">=0.21.0" + +[package.extras] +toml = ["tomli (>=2.0.1)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "pydash" +version = "8.0.1" +description = "The kitchen sink of Python utility libraries for doing \"stuff\" in a functional way. Based on the Lo-Dash Javascript library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydash-8.0.1-py3-none-any.whl", hash = "sha256:60265bab97fd00d5afd27dfbff9b6abfa22d0a965e166476b9066d84cd44c940"}, + {file = "pydash-8.0.1.tar.gz", hash = "sha256:a24619643d3c054bfd56a9ae1cb7bd00e9774eaf369d7bb8d62b3daa2462bdbd"}, +] + +[package.dependencies] +typing-extensions = ">3.10,<4.6.0 || >4.6.0" + +[package.extras] +dev = ["build", "coverage", "furo", "invoke", "mypy", "pytest", "pytest-cov", "pytest-mypy-testing", "ruff", "sphinx", "sphinx-autodoc-typehints", "tox", "twine", "wheel"] + +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyjwt" +version = "2.8.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, + {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, +] + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pytest" +version = "8.2.1" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.2.1-py3-none-any.whl", hash = "sha256:faccc5d332b8c3719f40283d0d44aa5cf101cec36f88cde9ed8f2bc0538612b1"}, + {file = "pytest-8.2.1.tar.gz", hash = "sha256:5046e5b46d8e4cac199c373041f26be56fdb81eb4e67dc11d4e10811fc3408fd"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2.0" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-multipart" +version = "0.0.9" +description = "A streaming multipart parser for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python_multipart-0.0.9-py3-none-any.whl", hash = "sha256:97ca7b8ea7b05f977dc3849c3ba99d51689822fab725c3703af7c866a0c2b215"}, + {file = "python_multipart-0.0.9.tar.gz", hash = "sha256:03f54688c663f1b7977105f021043b0793151e4cb1c1a9d4a11fc13d622c4026"}, +] + +[package.extras] +dev = ["atomicwrites (==1.4.1)", "attrs (==23.2.0)", "coverage (==7.4.1)", "hatch", "invoke (==2.2.0)", "more-itertools (==10.2.0)", "pbr (==6.0.0)", "pluggy (==1.4.0)", "py (==1.11.0)", "pytest (==8.0.0)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.2.0)", "pyyaml (==6.0.1)", "ruff (==0.2.1)"] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "rfc3986" +version = "1.5.0" +description = "Validating URI References per RFC 3986" +optional = false +python-versions = "*" +files = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, +] + +[package.dependencies] +idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} + +[package.extras] +idna2008 = ["idna"] + +[[package]] +name = "rich" +version = "13.7.1" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "s3transfer" +version = "0.10.1" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">= 3.8" +files = [ + {file = "s3transfer-0.10.1-py3-none-any.whl", hash = "sha256:ceb252b11bcf87080fb7850a224fb6e05c8a776bab8f2b64b7f25b969464839d"}, + {file = "s3transfer-0.10.1.tar.gz", hash = "sha256:5683916b4c724f799e600f41dd9e10a9ff19871bf87623cc8f491cb4f5fa0a19"}, +] + +[package.dependencies] +botocore = ">=1.33.2,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] + +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.30" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.30-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3b48154678e76445c7ded1896715ce05319f74b1e73cf82d4f8b59b46e9c0ddc"}, + {file = "SQLAlchemy-2.0.30-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2753743c2afd061bb95a61a51bbb6a1a11ac1c44292fad898f10c9839a7f75b2"}, + {file = "SQLAlchemy-2.0.30-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7bfc726d167f425d4c16269a9a10fe8630ff6d14b683d588044dcef2d0f6be7"}, + {file = "SQLAlchemy-2.0.30-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4f61ada6979223013d9ab83a3ed003ded6959eae37d0d685db2c147e9143797"}, + {file = "SQLAlchemy-2.0.30-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a365eda439b7a00732638f11072907c1bc8e351c7665e7e5da91b169af794af"}, + {file = "SQLAlchemy-2.0.30-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bba002a9447b291548e8d66fd8c96a6a7ed4f2def0bb155f4f0a1309fd2735d5"}, + {file = "SQLAlchemy-2.0.30-cp310-cp310-win32.whl", hash = "sha256:0138c5c16be3600923fa2169532205d18891b28afa817cb49b50e08f62198bb8"}, + {file = "SQLAlchemy-2.0.30-cp310-cp310-win_amd64.whl", hash = "sha256:99650e9f4cf3ad0d409fed3eec4f071fadd032e9a5edc7270cd646a26446feeb"}, + {file = "SQLAlchemy-2.0.30-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:955991a09f0992c68a499791a753523f50f71a6885531568404fa0f231832aa0"}, + {file = "SQLAlchemy-2.0.30-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f69e4c756ee2686767eb80f94c0125c8b0a0b87ede03eacc5c8ae3b54b99dc46"}, + {file = "SQLAlchemy-2.0.30-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69c9db1ce00e59e8dd09d7bae852a9add716efdc070a3e2068377e6ff0d6fdaa"}, + {file = "SQLAlchemy-2.0.30-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1429a4b0f709f19ff3b0cf13675b2b9bfa8a7e79990003207a011c0db880a13"}, + {file = "SQLAlchemy-2.0.30-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:efedba7e13aa9a6c8407c48facfdfa108a5a4128e35f4c68f20c3407e4376aa9"}, + {file = "SQLAlchemy-2.0.30-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:16863e2b132b761891d6c49f0a0f70030e0bcac4fd208117f6b7e053e68668d0"}, + {file = "SQLAlchemy-2.0.30-cp311-cp311-win32.whl", hash = "sha256:2ecabd9ccaa6e914e3dbb2aa46b76dede7eadc8cbf1b8083c94d936bcd5ffb49"}, + {file = "SQLAlchemy-2.0.30-cp311-cp311-win_amd64.whl", hash = "sha256:0b3f4c438e37d22b83e640f825ef0f37b95db9aa2d68203f2c9549375d0b2260"}, + {file = "SQLAlchemy-2.0.30-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5a79d65395ac5e6b0c2890935bad892eabb911c4aa8e8015067ddb37eea3d56c"}, + {file = "SQLAlchemy-2.0.30-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9a5baf9267b752390252889f0c802ea13b52dfee5e369527da229189b8bd592e"}, + {file = "SQLAlchemy-2.0.30-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cb5a646930c5123f8461f6468901573f334c2c63c795b9af350063a736d0134"}, + {file = "SQLAlchemy-2.0.30-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:296230899df0b77dec4eb799bcea6fbe39a43707ce7bb166519c97b583cfcab3"}, + {file = "SQLAlchemy-2.0.30-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c62d401223f468eb4da32627bffc0c78ed516b03bb8a34a58be54d618b74d472"}, + {file = "SQLAlchemy-2.0.30-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3b69e934f0f2b677ec111b4d83f92dc1a3210a779f69bf905273192cf4ed433e"}, + {file = "SQLAlchemy-2.0.30-cp312-cp312-win32.whl", hash = "sha256:77d2edb1f54aff37e3318f611637171e8ec71472f1fdc7348b41dcb226f93d90"}, + {file = "SQLAlchemy-2.0.30-cp312-cp312-win_amd64.whl", hash = "sha256:b6c7ec2b1f4969fc19b65b7059ed00497e25f54069407a8701091beb69e591a5"}, + {file = "SQLAlchemy-2.0.30-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5a8e3b0a7e09e94be7510d1661339d6b52daf202ed2f5b1f9f48ea34ee6f2d57"}, + {file = "SQLAlchemy-2.0.30-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b60203c63e8f984df92035610c5fb76d941254cf5d19751faab7d33b21e5ddc0"}, + {file = "SQLAlchemy-2.0.30-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1dc3eabd8c0232ee8387fbe03e0a62220a6f089e278b1f0aaf5e2d6210741ad"}, + {file = "SQLAlchemy-2.0.30-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:40ad017c672c00b9b663fcfcd5f0864a0a97828e2ee7ab0c140dc84058d194cf"}, + {file = "SQLAlchemy-2.0.30-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e42203d8d20dc704604862977b1470a122e4892791fe3ed165f041e4bf447a1b"}, + {file = "SQLAlchemy-2.0.30-cp37-cp37m-win32.whl", hash = "sha256:2a4f4da89c74435f2bc61878cd08f3646b699e7d2eba97144030d1be44e27584"}, + {file = "SQLAlchemy-2.0.30-cp37-cp37m-win_amd64.whl", hash = "sha256:b6bf767d14b77f6a18b6982cbbf29d71bede087edae495d11ab358280f304d8e"}, + {file = "SQLAlchemy-2.0.30-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc0c53579650a891f9b83fa3cecd4e00218e071d0ba00c4890f5be0c34887ed3"}, + {file = "SQLAlchemy-2.0.30-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:311710f9a2ee235f1403537b10c7687214bb1f2b9ebb52702c5aa4a77f0b3af7"}, + {file = "SQLAlchemy-2.0.30-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:408f8b0e2c04677e9c93f40eef3ab22f550fecb3011b187f66a096395ff3d9fd"}, + {file = "SQLAlchemy-2.0.30-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37a4b4fb0dd4d2669070fb05b8b8824afd0af57587393015baee1cf9890242d9"}, + {file = "SQLAlchemy-2.0.30-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a943d297126c9230719c27fcbbeab57ecd5d15b0bd6bfd26e91bfcfe64220621"}, + {file = "SQLAlchemy-2.0.30-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0a089e218654e740a41388893e090d2e2c22c29028c9d1353feb38638820bbeb"}, + {file = "SQLAlchemy-2.0.30-cp38-cp38-win32.whl", hash = "sha256:fa561138a64f949f3e889eb9ab8c58e1504ab351d6cf55259dc4c248eaa19da6"}, + {file = "SQLAlchemy-2.0.30-cp38-cp38-win_amd64.whl", hash = "sha256:7d74336c65705b986d12a7e337ba27ab2b9d819993851b140efdf029248e818e"}, + {file = "SQLAlchemy-2.0.30-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae8c62fe2480dd61c532ccafdbce9b29dacc126fe8be0d9a927ca3e699b9491a"}, + {file = "SQLAlchemy-2.0.30-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2383146973a15435e4717f94c7509982770e3e54974c71f76500a0136f22810b"}, + {file = "SQLAlchemy-2.0.30-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8409de825f2c3b62ab15788635ccaec0c881c3f12a8af2b12ae4910a0a9aeef6"}, + {file = "SQLAlchemy-2.0.30-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0094c5dc698a5f78d3d1539853e8ecec02516b62b8223c970c86d44e7a80f6c7"}, + {file = "SQLAlchemy-2.0.30-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:edc16a50f5e1b7a06a2dcc1f2205b0b961074c123ed17ebda726f376a5ab0953"}, + {file = "SQLAlchemy-2.0.30-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f7703c2010355dd28f53deb644a05fc30f796bd8598b43f0ba678878780b6e4c"}, + {file = "SQLAlchemy-2.0.30-cp39-cp39-win32.whl", hash = "sha256:1f9a727312ff6ad5248a4367358e2cf7e625e98b1028b1d7ab7b806b7d757513"}, + {file = "SQLAlchemy-2.0.30-cp39-cp39-win_amd64.whl", hash = "sha256:a0ef36b28534f2a5771191be6edb44cc2673c7b2edf6deac6562400288664221"}, + {file = "SQLAlchemy-2.0.30-py3-none-any.whl", hash = "sha256:7108d569d3990c71e26a42f60474b4c02c8586c4681af5fd67e51a044fdea86a"}, + {file = "SQLAlchemy-2.0.30.tar.gz", hash = "sha256:2b1708916730f4830bc69d6f49d37f7698b5bd7530aca7f04f785f8849e95255"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "sse-starlette" +version = "2.1.0" +description = "SSE plugin for Starlette" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sse_starlette-2.1.0-py3-none-any.whl", hash = "sha256:ea92bcb366c12482c1e23cab6b5afed19eb1320efe9ddfba8a0cf1f7f73ffba9"}, + {file = "sse_starlette-2.1.0.tar.gz", hash = "sha256:ffff6e7d948f925f347e662be77af5783a6b93efce15d42c03004dcd7d6d91d3"}, +] + +[package.dependencies] +anyio = "*" +starlette = "*" +uvicorn = "*" + +[package.extras] +examples = ["fastapi"] + +[[package]] +name = "starlette" +version = "0.37.2" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.8" +files = [ + {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, + {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5" + +[package.extras] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] + +[[package]] +name = "strawberry-graphql" +version = "0.230.0" +description = "A library for creating GraphQL APIs" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "strawberry_graphql-0.230.0-py3-none-any.whl", hash = "sha256:83700255420102d7192a31223e165f83b140191f7800d18314e533dc73b723f8"}, + {file = "strawberry_graphql-0.230.0.tar.gz", hash = "sha256:34672bab677ce35fe92837dc6c2f194b018daa5b11cf330e71104bec18dc2922"}, +] + +[package.dependencies] +graphql-core = ">=3.2.0,<3.3.0" +python-dateutil = ">=2.7.0,<3.0.0" +typing-extensions = ">=4.5.0" + +[package.extras] +aiohttp = ["aiohttp (>=3.7.4.post0,<4.0.0)"] +asgi = ["python-multipart (>=0.0.7)", "starlette (>=0.18.0)"] +chalice = ["chalice (>=1.22,<2.0)"] +channels = ["asgiref (>=3.2,<4.0)", "channels (>=3.0.5)"] +cli = ["graphlib_backport", "libcst (>=0.4.7)", "pygments (>=2.3,<3.0)", "rich (>=12.0.0)", "typer (>=0.7.0)"] +debug = ["libcst (>=0.4.7)", "rich (>=12.0.0)"] +debug-server = ["libcst (>=0.4.7)", "pygments (>=2.3,<3.0)", "python-multipart (>=0.0.7)", "rich (>=12.0.0)", "starlette (>=0.18.0)", "typer (>=0.7.0)", "uvicorn (>=0.11.6)"] +django = ["Django (>=3.2)", "asgiref (>=3.2,<4.0)"] +fastapi = ["fastapi (>=0.65.2)", "python-multipart (>=0.0.7)"] +flask = ["flask (>=1.1)"] +litestar = ["litestar (>=2)"] +opentelemetry = ["opentelemetry-api (<2)", "opentelemetry-sdk (<2)"] +pydantic = ["pydantic (>1.6.1)"] +pyinstrument = ["pyinstrument (>=4.0.0)"] +quart = ["quart (>=0.19.3)"] +sanic = ["sanic (>=20.12.2)"] +starlite = ["starlite (>=1.48.0)"] + +[[package]] +name = "typer" +version = "0.12.3" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.7" +files = [ + {file = "typer-0.12.3-py3-none-any.whl", hash = "sha256:070d7ca53f785acbccba8e7d28b08dcd88f79f1fbda035ade0aecec71ca5c914"}, + {file = "typer-0.12.3.tar.gz", hash = "sha256:49e73131481d804288ef62598d97a1ceef3058905aa536a1134f90891ba35482"}, +] + +[package.dependencies] +click = ">=8.0.0" +rich = ">=10.11.0" +shellingham = ">=1.3.0" +typing-extensions = ">=3.7.4.3" + +[[package]] +name = "typing-extensions" +version = "4.12.1" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.1-py3-none-any.whl", hash = "sha256:6024b58b69089e5a89c347397254e35f1bf02a907728ec7fee9bf0fe837d203a"}, + {file = "typing_extensions-4.12.1.tar.gz", hash = "sha256:915f5e35ff76f56588223f15fdd5938f9a1cf9195c0de25130c627e4d597f6d1"}, +] + +[[package]] +name = "ujson" +version = "5.10.0" +description = "Ultra fast JSON encoder and decoder for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"}, + {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22cffecf73391e8abd65ef5f4e4dd523162a3399d5e84faa6aebbf9583df86d6"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26b0e2d2366543c1bb4fbd457446f00b0187a2bddf93148ac2da07a53fe51569"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caf270c6dba1be7a41125cd1e4fc7ba384bf564650beef0df2dd21a00b7f5770"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a245d59f2ffe750446292b0094244df163c3dc96b3ce152a2c837a44e7cda9d1"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:94a87f6e151c5f483d7d54ceef83b45d3a9cca7a9cb453dbdbb3f5a6f64033f5"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:29b443c4c0a113bcbb792c88bea67b675c7ca3ca80c3474784e08bba01c18d51"}, + {file = "ujson-5.10.0-cp310-cp310-win32.whl", hash = "sha256:c18610b9ccd2874950faf474692deee4223a994251bc0a083c114671b64e6518"}, + {file = "ujson-5.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:924f7318c31874d6bb44d9ee1900167ca32aa9b69389b98ecbde34c1698a250f"}, + {file = "ujson-5.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a5b366812c90e69d0f379a53648be10a5db38f9d4ad212b60af00bd4048d0f00"}, + {file = "ujson-5.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:502bf475781e8167f0f9d0e41cd32879d120a524b22358e7f205294224c71126"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b91b5d0d9d283e085e821651184a647699430705b15bf274c7896f23fe9c9d8"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:129e39af3a6d85b9c26d5577169c21d53821d8cf68e079060602e861c6e5da1b"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f77b74475c462cb8b88680471193064d3e715c7c6074b1c8c412cb526466efe9"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7ec0ca8c415e81aa4123501fee7f761abf4b7f386aad348501a26940beb1860f"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab13a2a9e0b2865a6c6db9271f4b46af1c7476bfd51af1f64585e919b7c07fd4"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:57aaf98b92d72fc70886b5a0e1a1ca52c2320377360341715dd3933a18e827b1"}, + {file = "ujson-5.10.0-cp311-cp311-win32.whl", hash = "sha256:2987713a490ceb27edff77fb184ed09acdc565db700ee852823c3dc3cffe455f"}, + {file = "ujson-5.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:f00ea7e00447918ee0eff2422c4add4c5752b1b60e88fcb3c067d4a21049a720"}, + {file = "ujson-5.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98ba15d8cbc481ce55695beee9f063189dce91a4b08bc1d03e7f0152cd4bbdd5"}, + {file = "ujson-5.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9d2edbf1556e4f56e50fab7d8ff993dbad7f54bac68eacdd27a8f55f433578e"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6627029ae4f52d0e1a2451768c2c37c0c814ffc04f796eb36244cf16b8e57043"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ccb77b3e40b151e20519c6ae6d89bfe3f4c14e8e210d910287f778368bb3d1"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3caf9cd64abfeb11a3b661329085c5e167abbe15256b3b68cb5d914ba7396f3"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6e32abdce572e3a8c3d02c886c704a38a1b015a1fb858004e03d20ca7cecbb21"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a65b6af4d903103ee7b6f4f5b85f1bfd0c90ba4eeac6421aae436c9988aa64a2"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:604a046d966457b6cdcacc5aa2ec5314f0e8c42bae52842c1e6fa02ea4bda42e"}, + {file = "ujson-5.10.0-cp312-cp312-win32.whl", hash = "sha256:6dea1c8b4fc921bf78a8ff00bbd2bfe166345f5536c510671bccececb187c80e"}, + {file = "ujson-5.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:38665e7d8290188b1e0d57d584eb8110951a9591363316dd41cf8686ab1d0abc"}, + {file = "ujson-5.10.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:618efd84dc1acbd6bff8eaa736bb6c074bfa8b8a98f55b61c38d4ca2c1f7f287"}, + {file = "ujson-5.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38d5d36b4aedfe81dfe251f76c0467399d575d1395a1755de391e58985ab1c2e"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67079b1f9fb29ed9a2914acf4ef6c02844b3153913eb735d4bf287ee1db6e557"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7d0e0ceeb8fe2468c70ec0c37b439dd554e2aa539a8a56365fd761edb418988"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59e02cd37bc7c44d587a0ba45347cc815fb7a5fe48de16bf05caa5f7d0d2e816"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a890b706b64e0065f02577bf6d8ca3b66c11a5e81fb75d757233a38c07a1f20"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:621e34b4632c740ecb491efc7f1fcb4f74b48ddb55e65221995e74e2d00bbff0"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9500e61fce0cfc86168b248104e954fead61f9be213087153d272e817ec7b4f"}, + {file = "ujson-5.10.0-cp313-cp313-win32.whl", hash = "sha256:4c4fc16f11ac1612f05b6f5781b384716719547e142cfd67b65d035bd85af165"}, + {file = "ujson-5.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:4573fd1695932d4f619928fd09d5d03d917274381649ade4328091ceca175539"}, + {file = "ujson-5.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a984a3131da7f07563057db1c3020b1350a3e27a8ec46ccbfbf21e5928a43050"}, + {file = "ujson-5.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73814cd1b9db6fc3270e9d8fe3b19f9f89e78ee9d71e8bd6c9a626aeaeaf16bd"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61e1591ed9376e5eddda202ec229eddc56c612b61ac6ad07f96b91460bb6c2fb"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2c75269f8205b2690db4572a4a36fe47cd1338e4368bc73a7a0e48789e2e35a"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7223f41e5bf1f919cd8d073e35b229295aa8e0f7b5de07ed1c8fddac63a6bc5d"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d4dc2fd6b3067c0782e7002ac3b38cf48608ee6366ff176bbd02cf969c9c20fe"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:232cc85f8ee3c454c115455195a205074a56ff42608fd6b942aa4c378ac14dd7"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cc6139531f13148055d691e442e4bc6601f6dba1e6d521b1585d4788ab0bfad4"}, + {file = "ujson-5.10.0-cp38-cp38-win32.whl", hash = "sha256:e7ce306a42b6b93ca47ac4a3b96683ca554f6d35dd8adc5acfcd55096c8dfcb8"}, + {file = "ujson-5.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:e82d4bb2138ab05e18f089a83b6564fee28048771eb63cdecf4b9b549de8a2cc"}, + {file = "ujson-5.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dfef2814c6b3291c3c5f10065f745a1307d86019dbd7ea50e83504950136ed5b"}, + {file = "ujson-5.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4734ee0745d5928d0ba3a213647f1c4a74a2a28edc6d27b2d6d5bd9fa4319e27"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ebb01bd865fdea43da56254a3930a413f0c5590372a1241514abae8aa7c76"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dee5e97c2496874acbf1d3e37b521dd1f307349ed955e62d1d2f05382bc36dd5"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7490655a2272a2d0b072ef16b0b58ee462f4973a8f6bbe64917ce5e0a256f9c0"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba17799fcddaddf5c1f75a4ba3fd6441f6a4f1e9173f8a786b42450851bd74f1"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2aff2985cef314f21d0fecc56027505804bc78802c0121343874741650a4d3d1"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad88ac75c432674d05b61184178635d44901eb749786c8eb08c102330e6e8996"}, + {file = "ujson-5.10.0-cp39-cp39-win32.whl", hash = "sha256:2544912a71da4ff8c4f7ab5606f947d7299971bdd25a45e008e467ca638d13c9"}, + {file = "ujson-5.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ff201d62b1b177a46f113bb43ad300b424b7847f9c5d38b1b4ad8f75d4a282a"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5b6fee72fa77dc172a28f21693f64d93166534c263adb3f96c413ccc85ef6e64"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:61d0af13a9af01d9f26d2331ce49bb5ac1fb9c814964018ac8df605b5422dcb3"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecb24f0bdd899d368b715c9e6664166cf694d1e57be73f17759573a6986dd95a"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbd8fd427f57a03cff3ad6574b5e299131585d9727c8c366da4624a9069ed746"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beeaf1c48e32f07d8820c705ff8e645f8afa690cca1544adba4ebfa067efdc88"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:baed37ea46d756aca2955e99525cc02d9181de67f25515c468856c38d52b5f3b"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7663960f08cd5a2bb152f5ee3992e1af7690a64c0e26d31ba7b3ff5b2ee66337"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8640fb4072d36b08e95a3a380ba65779d356b2fee8696afeb7794cf0902d0a1"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78778a3aa7aafb11e7ddca4e29f46bc5139131037ad628cc10936764282d6753"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0111b27f2d5c820e7f2dbad7d48e3338c824e7ac4d2a12da3dc6061cc39c8e6"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:c66962ca7565605b355a9ed478292da628b8f18c0f2793021ca4425abf8b01e5"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba43cc34cce49cf2d4bc76401a754a81202d8aa926d0e2b79f0ee258cb15d3a4"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ac56eb983edce27e7f51d05bc8dd820586c6e6be1c5216a6809b0c668bb312b8"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44bd4b23a0e723bf8b10628288c2c7c335161d6840013d4d5de20e48551773b"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c10f4654e5326ec14a46bcdeb2b685d4ada6911050aa8baaf3501e57024b804"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0de4971a89a762398006e844ae394bd46991f7c385d7a6a3b93ba229e6dac17e"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e1402f0564a97d2a52310ae10a64d25bcef94f8dd643fcf5d310219d915484f7"}, + {file = "ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1"}, +] + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uvicorn" +version = "0.30.0" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.8" +files = [ + {file = "uvicorn-0.30.0-py3-none-any.whl", hash = "sha256:78fa0b5f56abb8562024a59041caeb555c86e48d0efdd23c3fe7de7a4075bdab"}, + {file = "uvicorn-0.30.0.tar.gz", hash = "sha256:f678dec4fa3a39706bbf49b9ec5fc40049d42418716cea52b53f07828a60aa37"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} +h11 = ">=0.8" +httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} +python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} + +[package.extras] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "uvloop" +version = "0.19.0" +description = "Fast implementation of asyncio event loop on top of libuv" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"}, + {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"}, + {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"}, + {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"}, + {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"}, + {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"}, + {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"}, + {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"}, + {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"}, + {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"}, + {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"}, + {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"}, + {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"}, + {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"}, + {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"}, + {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"}, + {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"}, + {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"}, + {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd"}, + {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd"}, + {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be"}, + {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797"}, + {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d"}, + {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7"}, + {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b"}, + {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67"}, + {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7"}, + {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256"}, + {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17"}, + {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5"}, + {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"}, +] + +[package.extras] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] + +[[package]] +name = "virtualenv" +version = "20.26.2" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.26.2-py3-none-any.whl", hash = "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b"}, + {file = "virtualenv-20.26.2.tar.gz", hash = "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[[package]] +name = "watchfiles" +version = "0.22.0" +description = "Simple, modern and high performance file watching and code reload in python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "watchfiles-0.22.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:da1e0a8caebf17976e2ffd00fa15f258e14749db5e014660f53114b676e68538"}, + {file = "watchfiles-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61af9efa0733dc4ca462347becb82e8ef4945aba5135b1638bfc20fad64d4f0e"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d9188979a58a096b6f8090e816ccc3f255f137a009dd4bbec628e27696d67c1"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2bdadf6b90c099ca079d468f976fd50062905d61fae183f769637cb0f68ba59a"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:067dea90c43bf837d41e72e546196e674f68c23702d3ef80e4e816937b0a3ffd"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbf8a20266136507abf88b0df2328e6a9a7c7309e8daff124dda3803306a9fdb"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1235c11510ea557fe21be5d0e354bae2c655a8ee6519c94617fe63e05bca4171"}, + {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2444dc7cb9d8cc5ab88ebe792a8d75709d96eeef47f4c8fccb6df7c7bc5be71"}, + {file = "watchfiles-0.22.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c5af2347d17ab0bd59366db8752d9e037982e259cacb2ba06f2c41c08af02c39"}, + {file = "watchfiles-0.22.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9624a68b96c878c10437199d9a8b7d7e542feddda8d5ecff58fdc8e67b460848"}, + {file = "watchfiles-0.22.0-cp310-none-win32.whl", hash = "sha256:4b9f2a128a32a2c273d63eb1fdbf49ad64852fc38d15b34eaa3f7ca2f0d2b797"}, + {file = "watchfiles-0.22.0-cp310-none-win_amd64.whl", hash = "sha256:2627a91e8110b8de2406d8b2474427c86f5a62bf7d9ab3654f541f319ef22bcb"}, + {file = "watchfiles-0.22.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8c39987a1397a877217be1ac0fb1d8b9f662c6077b90ff3de2c05f235e6a8f96"}, + {file = "watchfiles-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a927b3034d0672f62fb2ef7ea3c9fc76d063c4b15ea852d1db2dc75fe2c09696"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:052d668a167e9fc345c24203b104c313c86654dd6c0feb4b8a6dfc2462239249"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e45fb0d70dda1623a7045bd00c9e036e6f1f6a85e4ef2c8ae602b1dfadf7550"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c49b76a78c156979759d759339fb62eb0549515acfe4fd18bb151cc07366629c"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4a65474fd2b4c63e2c18ac67a0c6c66b82f4e73e2e4d940f837ed3d2fd9d4da"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc0cba54f47c660d9fa3218158b8963c517ed23bd9f45fe463f08262a4adae1"}, + {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94ebe84a035993bb7668f58a0ebf998174fb723a39e4ef9fce95baabb42b787f"}, + {file = "watchfiles-0.22.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e0f0a874231e2839abbf473256efffe577d6ee2e3bfa5b540479e892e47c172d"}, + {file = "watchfiles-0.22.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:213792c2cd3150b903e6e7884d40660e0bcec4465e00563a5fc03f30ea9c166c"}, + {file = "watchfiles-0.22.0-cp311-none-win32.whl", hash = "sha256:b44b70850f0073b5fcc0b31ede8b4e736860d70e2dbf55701e05d3227a154a67"}, + {file = "watchfiles-0.22.0-cp311-none-win_amd64.whl", hash = "sha256:00f39592cdd124b4ec5ed0b1edfae091567c72c7da1487ae645426d1b0ffcad1"}, + {file = "watchfiles-0.22.0-cp311-none-win_arm64.whl", hash = "sha256:3218a6f908f6a276941422b035b511b6d0d8328edd89a53ae8c65be139073f84"}, + {file = "watchfiles-0.22.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c7b978c384e29d6c7372209cbf421d82286a807bbcdeb315427687f8371c340a"}, + {file = "watchfiles-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd4c06100bce70a20c4b81e599e5886cf504c9532951df65ad1133e508bf20be"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:425440e55cd735386ec7925f64d5dde392e69979d4c8459f6bb4e920210407f2"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68fe0c4d22332d7ce53ad094622b27e67440dacefbaedd29e0794d26e247280c"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8a31bfd98f846c3c284ba694c6365620b637debdd36e46e1859c897123aa232"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc2e8fe41f3cac0660197d95216c42910c2b7e9c70d48e6d84e22f577d106fc1"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b7cc10261c2786c41d9207193a85c1db1b725cf87936df40972aab466179b6"}, + {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28585744c931576e535860eaf3f2c0ec7deb68e3b9c5a85ca566d69d36d8dd27"}, + {file = "watchfiles-0.22.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00095dd368f73f8f1c3a7982a9801190cc88a2f3582dd395b289294f8975172b"}, + {file = "watchfiles-0.22.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:52fc9b0dbf54d43301a19b236b4a4614e610605f95e8c3f0f65c3a456ffd7d35"}, + {file = "watchfiles-0.22.0-cp312-none-win32.whl", hash = "sha256:581f0a051ba7bafd03e17127735d92f4d286af941dacf94bcf823b101366249e"}, + {file = "watchfiles-0.22.0-cp312-none-win_amd64.whl", hash = "sha256:aec83c3ba24c723eac14225194b862af176d52292d271c98820199110e31141e"}, + {file = "watchfiles-0.22.0-cp312-none-win_arm64.whl", hash = "sha256:c668228833c5619f6618699a2c12be057711b0ea6396aeaece4ded94184304ea"}, + {file = "watchfiles-0.22.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d47e9ef1a94cc7a536039e46738e17cce058ac1593b2eccdede8bf72e45f372a"}, + {file = "watchfiles-0.22.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28f393c1194b6eaadcdd8f941307fc9bbd7eb567995232c830f6aef38e8a6e88"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd64f3a4db121bc161644c9e10a9acdb836853155a108c2446db2f5ae1778c3d"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2abeb79209630da981f8ebca30a2c84b4c3516a214451bfc5f106723c5f45843"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cc382083afba7918e32d5ef12321421ef43d685b9a67cc452a6e6e18920890e"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d048ad5d25b363ba1d19f92dcf29023988524bee6f9d952130b316c5802069cb"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:103622865599f8082f03af4214eaff90e2426edff5e8522c8f9e93dc17caee13"}, + {file = "watchfiles-0.22.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3e1f3cf81f1f823e7874ae563457828e940d75573c8fbf0ee66818c8b6a9099"}, + {file = "watchfiles-0.22.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8597b6f9dc410bdafc8bb362dac1cbc9b4684a8310e16b1ff5eee8725d13dcd6"}, + {file = "watchfiles-0.22.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b04a2cbc30e110303baa6d3ddce8ca3664bc3403be0f0ad513d1843a41c97d1"}, + {file = "watchfiles-0.22.0-cp38-none-win32.whl", hash = "sha256:b610fb5e27825b570554d01cec427b6620ce9bd21ff8ab775fc3a32f28bba63e"}, + {file = "watchfiles-0.22.0-cp38-none-win_amd64.whl", hash = "sha256:fe82d13461418ca5e5a808a9e40f79c1879351fcaeddbede094028e74d836e86"}, + {file = "watchfiles-0.22.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3973145235a38f73c61474d56ad6199124e7488822f3a4fc97c72009751ae3b0"}, + {file = "watchfiles-0.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:280a4afbc607cdfc9571b9904b03a478fc9f08bbeec382d648181c695648202f"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a0d883351a34c01bd53cfa75cd0292e3f7e268bacf2f9e33af4ecede7e21d1d"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9165bcab15f2b6d90eedc5c20a7f8a03156b3773e5fb06a790b54ccecdb73385"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc1b9b56f051209be458b87edb6856a449ad3f803315d87b2da4c93b43a6fe72"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dc1fc25a1dedf2dd952909c8e5cb210791e5f2d9bc5e0e8ebc28dd42fed7562"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc92d2d2706d2b862ce0568b24987eba51e17e14b79a1abcd2edc39e48e743c8"}, + {file = "watchfiles-0.22.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97b94e14b88409c58cdf4a8eaf0e67dfd3ece7e9ce7140ea6ff48b0407a593ec"}, + {file = "watchfiles-0.22.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:96eec15e5ea7c0b6eb5bfffe990fc7c6bd833acf7e26704eb18387fb2f5fd087"}, + {file = "watchfiles-0.22.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:28324d6b28bcb8d7c1041648d7b63be07a16db5510bea923fc80b91a2a6cbed6"}, + {file = "watchfiles-0.22.0-cp39-none-win32.whl", hash = "sha256:8c3e3675e6e39dc59b8fe5c914a19d30029e36e9f99468dddffd432d8a7b1c93"}, + {file = "watchfiles-0.22.0-cp39-none-win_amd64.whl", hash = "sha256:25c817ff2a86bc3de3ed2df1703e3d24ce03479b27bb4527c57e722f8554d971"}, + {file = "watchfiles-0.22.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b810a2c7878cbdecca12feae2c2ae8af59bea016a78bc353c184fa1e09f76b68"}, + {file = "watchfiles-0.22.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7e1f9c5d1160d03b93fc4b68a0aeb82fe25563e12fbcdc8507f8434ab6f823c"}, + {file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:030bc4e68d14bcad2294ff68c1ed87215fbd9a10d9dea74e7cfe8a17869785ab"}, + {file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace7d060432acde5532e26863e897ee684780337afb775107c0a90ae8dbccfd2"}, + {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5834e1f8b71476a26df97d121c0c0ed3549d869124ed2433e02491553cb468c2"}, + {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0bc3b2f93a140df6806c8467c7f51ed5e55a931b031b5c2d7ff6132292e803d6"}, + {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fdebb655bb1ba0122402352b0a4254812717a017d2dc49372a1d47e24073795"}, + {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c8e0aa0e8cc2a43561e0184c0513e291ca891db13a269d8d47cb9841ced7c71"}, + {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2f350cbaa4bb812314af5dab0eb8d538481e2e2279472890864547f3fe2281ed"}, + {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7a74436c415843af2a769b36bf043b6ccbc0f8d784814ba3d42fc961cdb0a9dc"}, + {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00ad0bcd399503a84cc688590cdffbe7a991691314dde5b57b3ed50a41319a31"}, + {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72a44e9481afc7a5ee3291b09c419abab93b7e9c306c9ef9108cb76728ca58d2"}, + {file = "watchfiles-0.22.0.tar.gz", hash = "sha256:988e981aaab4f3955209e7e28c7794acdb690be1efa7f16f8ea5aba7ffdadacb"}, +] + +[package.dependencies] +anyio = ">=3.0.0" + +[[package]] +name = "websockets" +version = "12.0" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, + {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, + {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, + {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, + {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, + {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, + {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, + {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, + {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, + {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, + {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, + {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, + {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, + {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, + {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, + {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, + {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, + {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, + {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, + {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, + {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, + {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, + {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, + {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, + {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, +] + +[[package]] +name = "wsproto" +version = "1.2.0" +description = "WebSockets state-machine based protocol implementation" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, + {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, +] + +[package.dependencies] +h11 = ">=0.9.0,<1" + +[metadata] +lock-version = "2.0" +python-versions = "^3.12" +content-hash = "65edde0001bccd4ea92cd760138a136e9122364ebf3ecc276ea4e423d3270c52" diff --git a/pyproject.toml b/pyproject.toml index d3d66f2..7012d3e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,12 +1,38 @@ -[project] -name = "FuturamaAPI" -requires-python = ">= 3.12" - -[tool.pytest.ini_options] -addopts = "-ra -p configs.plugins.env_vars" -pythonpath = [ - "." -] +[tool.poetry] +name = "futuramaapi" +version = "1.0.0" +description = "Inspired REST and GraphQL API based on TV show Futurama" +authors = ["Ivan Koldakov "] +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.12" +fastapi = "^0.111.0" +hypercorn = {extras = ["uvloop"], version = "^0.16.0"} +sqlalchemy = "^2.0.29" +pydantic-settings = "^2.2.1" +alembic = "^1.13.1" +pydantic = {extras = ["email"], version = "^2.7.1"} +fastapi-storages = "^0.3.0" +asyncpg = "^0.29.0" +pillow = "^10.3.0" +fastapi-pagination = "^0.12.24" +anyio = "^4.3.0" +sse-starlette = "^2.1.0" +pydash = "^8.0.1" +pyjwt = "^2.8.0" +fastapi-mail = {extras = ["httpx"], version = "^1.4.1"} +strawberry-graphql = "^0.230.0" + +[tool.poetry.group.dev.dependencies] +pre-commit = "^3.7.0" + +[tool.poetry.group.test.dependencies] +pytest = "^8.2.0" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" [tool.ruff] # Exclude a variety of commonly ignored directories. @@ -42,7 +68,7 @@ exclude = [ fix = true show-fixes = true -show-source = true +output-format = "full" line-length = 120 indent-width = 4 @@ -111,7 +137,7 @@ line-ending = "auto" # enabled. # docstring-code-line-length = "dynamic" -[tool.ruff.mccabe] +[tool.ruff.lint.mccabe] max-complexity = 10 [tool.ruff.lint.extend-per-file-ignores] @@ -125,7 +151,6 @@ follow_imports = "skip" ignore_missing_imports = true check_untyped_defs = true files = [ - "app", - "alembic", - "configs", + "futuramaapi", + "tests", ] diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index bea8587..0000000 --- a/requirements.txt +++ /dev/null @@ -1,72 +0,0 @@ -aiosmtplib==2.0.2 -alembic==1.12.1 -annotated-types==0.6.0 -anyio==3.7.1 -asyncpg==0.29.0 -Babel==2.13.1 -blinker==1.7.0 -boto3==1.33.10 -botocore==1.33.10 -certifi==2023.11.17 -cffi==1.16.0 -cfgv==3.4.0 -click==8.1.7 -cryptography==42.0.4 -distlib==0.3.7 -dnspython==2.5.0 -ecdsa==0.18.0 -email-validator==2.1.0.post1 -fastapi==0.109.2 -fastapi-mail==1.4.1 -fastapi-pagination==0.12.13 -fastapi-storages==0.2.1 -filelock==3.13.1 -graphql-core==3.2.3 -greenlet==3.0.1 -h11==0.14.0 -h2==4.1.0 -hpack==4.0.0 -httpcore==0.16.3 -httpx==0.23.3 -Hypercorn==0.15.0 -hyperframe==6.0.1 -identify==2.5.32 -idna==3.4 -iniconfig==2.0.0 -Jinja2==3.1.3 -jmespath==1.0.1 -Mako==1.3.0 -MarkupSafe==2.1.3 -nodeenv==1.8.0 -packaging==23.2 -pillow==10.2.0 -platformdirs==4.0.0 -pluggy==1.3.0 -pre-commit==3.5.0 -priority==2.0.0 -pyasn1==0.5.1 -pycparser==2.21 -pydantic==2.5.1 -pydantic-settings==2.1.0 -pydantic_core==2.14.3 -pytest==7.4.3 -python-dateutil==2.8.2 -python-dotenv==1.0.1 -python-jose==3.3.0 -python-multipart==0.0.9 -PyYAML==6.0.1 -rfc3986==1.5.0 -rsa==4.9 -s3transfer==0.8.2 -setuptools==69.0.2 -six==1.16.0 -sniffio==1.3.0 -SQLAlchemy==2.0.23 -sse-starlette==1.8.2 -starlette==0.36.3 -strawberry-graphql==0.217.1 -typing_extensions==4.8.0 -urllib3==2.0.7 -uvloop==0.19.0 -virtualenv==20.24.7 -wsproto==1.2.0 diff --git a/static/intro-image.jpg b/static/intro-image.jpg new file mode 100644 index 0000000..07d63e3 Binary files /dev/null and b/static/intro-image.jpg differ diff --git a/templates/about.html b/templates/about.html index 5641cfb..3dd855e 100644 --- a/templates/about.html +++ b/templates/about.html @@ -2,7 +2,7 @@ {% set active_page = "about" %} -{% block titile %}{{ _("FB00012") }}{% endblock %} | {{ _("FB00001") }} +{% block titile %}About{% endblock %} | Futurama API {% block main_info %}{% endblock %} @@ -10,10 +10,9 @@
-

{{ _("FB00012") }}

-

{{ _("What is this?") }}

+

About

+

What is this?

- {% trans %} Welcome to {{ _("What is this?") }} Created with the intention of providing a learning playground for enthusiasts like you, FuturamaAPI opens the door for exploration and experimentation with new technologies. Dive into the world of Futurama, study, and try out the latest in web development. - {% endtrans %}

- {{ _("Check our") }} - {{ _("documentation") }} {{ _("to get started.") }} + Check our + documentation to get started.

-

{{ _("Key Features") }}

- {% trans %} +

+ Or you can start with Swagger Playground. +

+

Key Features

Comprehensive Access

@@ -43,18 +43,14 @@

Real-time Updates

Documentation

Get started quickly by checking out our documentation for a seamless learning experience.

- {% endtrans %} -

{{ _("Why?") }}

- {% trans %} +

Why?

FuturamaAPI was born out of the idea that even lesser-known TV shows deserve dedicated projects. It's a space to have fun with Futurama and simultaneously push the boundaries of technology. Embrace the challenge and discover the possibilities with FastAPI, SSE, WebSockets, GraphQL, Hypercorn, HTTP/2.0, and more.

- {% endtrans %} -

{{ _("Technical Stack?") }}

- {% trans %} +

Technical Stack?

Futurama is powered by a robust technical stack, including

  • Python
  • @@ -65,32 +61,25 @@

    {{ _("Technical Stack?") }}

  • Alembic for Migrations
  • Strawberry for GraphQL
- {% endtrans %} -

{{ _("Can I contribute?") }}

- {% trans %} +

Can I contribute?

Absolutely! Feel free to contribute to the project. Here is the link to get started. Whether you're a seasoned developer or just starting, your contributions are valuable.

- {% endtrans %} -

{{ _("Design?") }}

- {% trans %} +

Design?

Admittedly, design is not my strong suit. If you have HTML skills and a passion for design, please consider creating a pull request for this project. Any help is greatly appreciated.

- {% endtrans %} -

{{ _("Who are you?") }}

- {% trans %} +

Who are you?

I'm Ivan Koldakov, a Python developer and enthusiast. Connect with me on LinkedIn to stay in the loop and share your thoughts.

- {% endtrans %}
{% endblock %} diff --git a/templates/base.html b/templates/base.html index d102ec3..2098453 100644 --- a/templates/base.html +++ b/templates/base.html @@ -5,7 +5,7 @@ - {% block titile %}{{ _("FB00002") }}{% endblock %} | {{ _("FB00001") }} + {% block title %}Welcome{% endblock %} | Futurama API {% block extra_styles %}{% endblock %} @@ -22,7 +22,7 @@ class="navbar-brand" href="{{ relative_path_for('root') }}" > - {{ _( "FB00001" ) }} + Futurama API