diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000000..5f4c751baf --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,26 @@ +// For format details, see https://aka.ms/devcontainer.json. For config options, see the +// README at: https://github.com/devcontainers/templates/tree/main/src/python +{ + "name": "Python 3", + "service": "python-3.12-faststream-studio", + "dockerComposeFile": [ + "./docker-compose.yaml" + ], + "shutdownAction": "stopCompose", + "forwardPorts": [ + "kafka-faststream:9092" + ], + "updateContentCommand": "bash .devcontainer/setup.sh", + "workspaceFolder": "/workspaces/faststream" + // Features to add to the dev container. More info: https://containers.dev/features. + // "features": {}, + + // Use 'postCreateCommand' to run commands after the container is created. + // "postCreateCommand": "pip3 install --user -r requirements.txt", + + // Configure tool-specific properties. + // "customizations": {}, + + // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. + // "remoteUser": "root" +} diff --git a/.devcontainer/docker-compose.yaml b/.devcontainer/docker-compose.yaml new file mode 100644 index 0000000000..09e8724f22 --- /dev/null +++ b/.devcontainer/docker-compose.yaml @@ -0,0 +1,33 @@ +version: '3' + +services: + python-3.12-faststream-studio: # nosemgrep + image: mcr.microsoft.com/devcontainers/python:3.12 + container_name: python-3.12-faststream-studio + volumes: + - ../:/workspaces/faststream:cached + command: sleep infinity + network_mode: "host" + + kafka-faststream: # nosemgrep + image: bitnami/kafka:3.5.0 + container_name: kafka-faststream + ports: + - 9092:9092 + environment: + KAFKA_ENABLE_KRAFT: "true" + KAFKA_CFG_NODE_ID: "1" + KAFKA_CFG_PROCESS_ROLES: "broker,controller" + KAFKA_CFG_CONTROLLER_LISTENER_NAMES: "CONTROLLER" + KAFKA_CFG_LISTENERS: "PLAINTEXT://:9092,CONTROLLER://:9093" + KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: "CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT" + KAFKA_CFG_ADVERTISED_LISTENERS: "PLAINTEXT://127.0.0.1:9092" + KAFKA_BROKER_ID: "1" + KAFKA_CFG_CONTROLLER_QUORUM_VOTERS: "1@kafka-faststream:9093" + ALLOW_PLAINTEXT_LISTENER: "true" + networks: + - faststream-network + +networks: + faststream-network: + name: "${USER}-faststream-network" \ No newline at end of file diff --git a/.devcontainer/setup.sh b/.devcontainer/setup.sh new file mode 100644 index 0000000000..fb0b3f4fc4 --- /dev/null +++ b/.devcontainer/setup.sh @@ -0,0 +1,7 @@ +pip install uv + +uv venv .venv + +uv pip install -e ".[dev]" + +source .venv/bin/activate \ No newline at end of file diff --git a/.github/dependabot.yaml b/.github/dependabot.yaml deleted file mode 100644 index a72abdca78..0000000000 --- a/.github/dependabot.yaml +++ /dev/null @@ -1,17 +0,0 @@ -# To get started with Dependabot version updates, you'll need to specify which -# package ecosystems to update and where the package manifests are located. -# Please see the documentation for all configuration options: -# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates - -version: 2 -updates: - # GitHub Actions - - package-ecosystem: "github-actions" - directory: "/" - schedule: - interval: "weekly" - # Python - - package-ecosystem: "pip" # See documentation for possible values - directory: "/" # Location of package manifests - schedule: - interval: "weekly" diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..b1cf7f5185 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,14 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "weekly" + - package-ecosystem: "devcontainers" + directory: "/" + schedule: + interval: weekly diff --git a/.github/workflows/pr_tests.yaml b/.github/workflows/pr_tests.yaml index 785c19c690..e7a87d2562 100644 --- a/.github/workflows/pr_tests.yaml +++ b/.github/workflows/pr_tests.yaml @@ -82,7 +82,7 @@ jobs: run: pip install --pre "pydantic>=2.0.0b2,<3.0.0" - run: mkdir coverage - name: Test - run: bash scripts/test.sh -m "(slow and (not nats and not kafka and not confluent and not rabbit and not redis)) or (not nats and not kafka and not confluent and not rabbit and not redis)" + run: bash scripts/test.sh -vv -m "(slow and (not nats and not kafka and not confluent and not rabbit and not redis)) or (not nats and not kafka and not confluent and not rabbit and not redis)" env: COVERAGE_FILE: coverage/.coverage.${{ runner.os }}-py${{ matrix.python-version }}-${{ matrix.pydantic-version }} CONTEXT: ${{ runner.os }}-py${{ matrix.python-version }}-${{ matrix.pydantic-version }} @@ -269,7 +269,7 @@ jobs: run: pip install .[optionals,testing] - run: mkdir coverage - name: Test - run: bash scripts/test.sh -m "(slow and confluent) or confluent" + run: bash scripts/test.sh -vv -m "(slow and confluent) or confluent" env: COVERAGE_FILE: coverage/.coverage.confluent-py CONTEXT: confluent-py diff --git a/docs/create_api_docs.py b/docs/create_api_docs.py index 883d3ef787..1d3eb224e5 100644 --- a/docs/create_api_docs.py +++ b/docs/create_api_docs.py @@ -22,6 +22,8 @@ PUBLIC_API_FILES = [ + "faststream/opentelemetry/__init__.py", + "faststream/asgi/__init__.py", "faststream/asyncapi/__init__.py", "faststream/__init__.py", "faststream/nats/__init__.py", diff --git a/docs/docs/SUMMARY.md b/docs/docs/SUMMARY.md index 393fce9d16..f37ab4e794 100644 --- a/docs/docs/SUMMARY.md +++ b/docs/docs/SUMMARY.md @@ -41,6 +41,7 @@ search: - [FastAPI Plugin](getting-started/integrations/fastapi/index.md) - [Django](getting-started/integrations/django/index.md) - [CLI commands](getting-started/cli/index.md) + - [ASGI](getting-started/asgi.md) - [OpenTelemetry](getting-started/opentelemetry/index.md) - [Logging](getting-started/logging.md) - [Config Management](getting-started/config/index.md) @@ -120,6 +121,12 @@ search: - [Response](public_api/faststream/Response.md) - [TestApp](public_api/faststream/TestApp.md) - [apply_types](public_api/faststream/apply_types.md) + - asgi + - [AsgiFastStream](public_api/faststream/asgi/AsgiFastStream.md) + - [AsgiResponse](public_api/faststream/asgi/AsgiResponse.md) + - [get](public_api/faststream/asgi/get.md) + - [make_asyncapi_asgi](public_api/faststream/asgi/make_asyncapi_asgi.md) + - [make_ping_asgi](public_api/faststream/asgi/make_ping_asgi.md) - asyncapi - [get_app_schema](public_api/faststream/asyncapi/get_app_schema.md) - [get_asyncapi_html](public_api/faststream/asyncapi/get_asyncapi_html.md) @@ -131,6 +138,7 @@ search: - [KafkaRouter](public_api/faststream/confluent/KafkaRouter.md) - [TestApp](public_api/faststream/confluent/TestApp.md) - [TestKafkaBroker](public_api/faststream/confluent/TestKafkaBroker.md) + - [TopicPartition](public_api/faststream/confluent/TopicPartition.md) - kafka - [KafkaBroker](public_api/faststream/kafka/KafkaBroker.md) - [KafkaPublisher](public_api/faststream/kafka/KafkaPublisher.md) @@ -164,6 +172,9 @@ search: - [StreamSource](public_api/faststream/nats/StreamSource.md) - [TestApp](public_api/faststream/nats/TestApp.md) - [TestNatsBroker](public_api/faststream/nats/TestNatsBroker.md) + - opentelemetry + - [TelemetryMiddleware](public_api/faststream/opentelemetry/TelemetryMiddleware.md) + - [TelemetrySettingsProvider](public_api/faststream/opentelemetry/TelemetrySettingsProvider.md) - rabbit - [ExchangeType](public_api/faststream/rabbit/ExchangeType.md) - [RabbitBroker](public_api/faststream/rabbit/RabbitBroker.md) @@ -200,6 +211,23 @@ search: - [apply_types](api/faststream/apply_types.md) - app - [FastStream](api/faststream/app/FastStream.md) + - asgi + - [AsgiFastStream](api/faststream/asgi/AsgiFastStream.md) + - [AsgiResponse](api/faststream/asgi/AsgiResponse.md) + - [get](api/faststream/asgi/get.md) + - [make_asyncapi_asgi](api/faststream/asgi/make_asyncapi_asgi.md) + - [make_ping_asgi](api/faststream/asgi/make_ping_asgi.md) + - app + - [AsgiFastStream](api/faststream/asgi/app/AsgiFastStream.md) + - factories + - [make_asyncapi_asgi](api/faststream/asgi/factories/make_asyncapi_asgi.md) + - [make_ping_asgi](api/faststream/asgi/factories/make_ping_asgi.md) + - handlers + - [get](api/faststream/asgi/handlers/get.md) + - response + - [AsgiResponse](api/faststream/asgi/response/AsgiResponse.md) + - websocket + - [WebSocketClose](api/faststream/asgi/websocket/WebSocketClose.md) - asyncapi - [get_app_schema](api/faststream/asyncapi/get_app_schema.md) - [get_asyncapi_html](api/faststream/asyncapi/get_asyncapi_html.md) @@ -214,6 +242,7 @@ search: - [get_response_schema](api/faststream/asyncapi/message/get_response_schema.md) - [parse_handler_params](api/faststream/asyncapi/message/parse_handler_params.md) - proto + - [AsyncAPIApplication](api/faststream/asyncapi/proto/AsyncAPIApplication.md) - [AsyncAPIProto](api/faststream/asyncapi/proto/AsyncAPIProto.md) - schema - [Channel](api/faststream/asyncapi/schema/Channel.md) @@ -429,6 +458,7 @@ search: - [KafkaRouter](api/faststream/confluent/KafkaRouter.md) - [TestApp](api/faststream/confluent/TestApp.md) - [TestKafkaBroker](api/faststream/confluent/TestKafkaBroker.md) + - [TopicPartition](api/faststream/confluent/TopicPartition.md) - broker - [KafkaBroker](api/faststream/confluent/broker/KafkaBroker.md) - broker @@ -441,8 +471,6 @@ search: - [AsyncConfluentConsumer](api/faststream/confluent/client/AsyncConfluentConsumer.md) - [AsyncConfluentProducer](api/faststream/confluent/client/AsyncConfluentProducer.md) - [BatchBuilder](api/faststream/confluent/client/BatchBuilder.md) - - [MsgToSend](api/faststream/confluent/client/MsgToSend.md) - - [TopicPartition](api/faststream/confluent/client/TopicPartition.md) - [check_msg_error](api/faststream/confluent/client/check_msg_error.md) - [create_topics](api/faststream/confluent/client/create_topics.md) - config @@ -496,8 +524,11 @@ search: - [KafkaRoute](api/faststream/confluent/router/KafkaRoute.md) - [KafkaRouter](api/faststream/confluent/router/KafkaRouter.md) - schemas + - [TopicPartition](api/faststream/confluent/schemas/TopicPartition.md) - params - [ConsumerConnectionParams](api/faststream/confluent/schemas/params/ConsumerConnectionParams.md) + - partition + - [TopicPartition](api/faststream/confluent/schemas/partition/TopicPartition.md) - security - [parse_security](api/faststream/confluent/security/parse_security.md) - subscriber diff --git a/docs/docs/en/api/faststream/confluent/client/MsgToSend.md b/docs/docs/en/api/faststream/asgi/AsgiFastStream.md similarity index 72% rename from docs/docs/en/api/faststream/confluent/client/MsgToSend.md rename to docs/docs/en/api/faststream/asgi/AsgiFastStream.md index 2891c52db0..49a94bd574 100644 --- a/docs/docs/en/api/faststream/confluent/client/MsgToSend.md +++ b/docs/docs/en/api/faststream/asgi/AsgiFastStream.md @@ -8,4 +8,4 @@ search: boost: 0.5 --- -::: faststream.confluent.client.MsgToSend +::: faststream.asgi.AsgiFastStream diff --git a/docs/docs/en/api/faststream/asgi/AsgiResponse.md b/docs/docs/en/api/faststream/asgi/AsgiResponse.md new file mode 100644 index 0000000000..4814f18557 --- /dev/null +++ b/docs/docs/en/api/faststream/asgi/AsgiResponse.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.asgi.AsgiResponse diff --git a/docs/docs/en/api/faststream/asgi/app/AsgiFastStream.md b/docs/docs/en/api/faststream/asgi/app/AsgiFastStream.md new file mode 100644 index 0000000000..9d58b9576c --- /dev/null +++ b/docs/docs/en/api/faststream/asgi/app/AsgiFastStream.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.asgi.app.AsgiFastStream diff --git a/docs/docs/en/api/faststream/asgi/factories/make_asyncapi_asgi.md b/docs/docs/en/api/faststream/asgi/factories/make_asyncapi_asgi.md new file mode 100644 index 0000000000..e96de51b01 --- /dev/null +++ b/docs/docs/en/api/faststream/asgi/factories/make_asyncapi_asgi.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.asgi.factories.make_asyncapi_asgi diff --git a/docs/docs/en/api/faststream/asgi/factories/make_ping_asgi.md b/docs/docs/en/api/faststream/asgi/factories/make_ping_asgi.md new file mode 100644 index 0000000000..fb163d02a1 --- /dev/null +++ b/docs/docs/en/api/faststream/asgi/factories/make_ping_asgi.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.asgi.factories.make_ping_asgi diff --git a/docs/docs/en/api/faststream/asgi/get.md b/docs/docs/en/api/faststream/asgi/get.md new file mode 100644 index 0000000000..044c05ed81 --- /dev/null +++ b/docs/docs/en/api/faststream/asgi/get.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.asgi.get diff --git a/docs/docs/en/api/faststream/asgi/handlers/get.md b/docs/docs/en/api/faststream/asgi/handlers/get.md new file mode 100644 index 0000000000..8f3c04a050 --- /dev/null +++ b/docs/docs/en/api/faststream/asgi/handlers/get.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.asgi.handlers.get diff --git a/docs/docs/en/api/faststream/asgi/make_asyncapi_asgi.md b/docs/docs/en/api/faststream/asgi/make_asyncapi_asgi.md new file mode 100644 index 0000000000..5e57a1a2db --- /dev/null +++ b/docs/docs/en/api/faststream/asgi/make_asyncapi_asgi.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.asgi.make_asyncapi_asgi diff --git a/docs/docs/en/api/faststream/asgi/make_ping_asgi.md b/docs/docs/en/api/faststream/asgi/make_ping_asgi.md new file mode 100644 index 0000000000..5c24aaef19 --- /dev/null +++ b/docs/docs/en/api/faststream/asgi/make_ping_asgi.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.asgi.make_ping_asgi diff --git a/docs/docs/en/api/faststream/asgi/response/AsgiResponse.md b/docs/docs/en/api/faststream/asgi/response/AsgiResponse.md new file mode 100644 index 0000000000..037739b09d --- /dev/null +++ b/docs/docs/en/api/faststream/asgi/response/AsgiResponse.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.asgi.response.AsgiResponse diff --git a/docs/docs/en/api/faststream/asgi/websocket/WebSocketClose.md b/docs/docs/en/api/faststream/asgi/websocket/WebSocketClose.md new file mode 100644 index 0000000000..130ee9a59a --- /dev/null +++ b/docs/docs/en/api/faststream/asgi/websocket/WebSocketClose.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.asgi.websocket.WebSocketClose diff --git a/docs/docs/en/api/faststream/asyncapi/proto/AsyncAPIApplication.md b/docs/docs/en/api/faststream/asyncapi/proto/AsyncAPIApplication.md new file mode 100644 index 0000000000..da1715119d --- /dev/null +++ b/docs/docs/en/api/faststream/asyncapi/proto/AsyncAPIApplication.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.asyncapi.proto.AsyncAPIApplication diff --git a/docs/docs/en/api/faststream/confluent/client/TopicPartition.md b/docs/docs/en/api/faststream/confluent/TopicPartition.md similarity index 70% rename from docs/docs/en/api/faststream/confluent/client/TopicPartition.md rename to docs/docs/en/api/faststream/confluent/TopicPartition.md index ab43818b7d..9b5e09bdf9 100644 --- a/docs/docs/en/api/faststream/confluent/client/TopicPartition.md +++ b/docs/docs/en/api/faststream/confluent/TopicPartition.md @@ -8,4 +8,4 @@ search: boost: 0.5 --- -::: faststream.confluent.client.TopicPartition +::: faststream.confluent.TopicPartition diff --git a/docs/docs/en/api/faststream/confluent/schemas/TopicPartition.md b/docs/docs/en/api/faststream/confluent/schemas/TopicPartition.md new file mode 100644 index 0000000000..0c52345b4e --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/schemas/TopicPartition.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.schemas.TopicPartition diff --git a/docs/docs/en/api/faststream/confluent/schemas/partition/TopicPartition.md b/docs/docs/en/api/faststream/confluent/schemas/partition/TopicPartition.md new file mode 100644 index 0000000000..11e0bc2b3c --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/schemas/partition/TopicPartition.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.schemas.partition.TopicPartition diff --git a/docs/docs/en/confluent/security.md b/docs/docs/en/confluent/security.md index 3ced3d17fc..7a1d91e411 100644 --- a/docs/docs/en/confluent/security.md +++ b/docs/docs/en/confluent/security.md @@ -54,7 +54,7 @@ This chapter discusses the security options available in **FastStream** and how ### 4. SASLOAuthBearer Object with SSL/TLS -**Purpose:** The `SASLOAuthBearer` is used for authentication using the Oauth sasl.mechanism. While using it you additionaly need to provide necessary `sasl.oauthbearer.*` values in config and provide it to `KafkaBroker`, eg. `sasl.oauthbearer.client.id`, `sasl.oauthbearer.client.secret`. Full list is available in the [confluent doc](https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md){.external-link target="_blank"} +**Purpose:** The `SASLOAuthBearer` is used for authentication using the Oauth sasl.mechanism. While using it you additionally need to provide necessary `sasl.oauthbearer.*` values in config and provide it to `KafkaBroker`, eg. `sasl.oauthbearer.client.id`, `sasl.oauthbearer.client.secret`. Full list is available in the [confluent doc](https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md){.external-link target="_blank"} **Usage:** diff --git a/docs/docs/en/getting-started/asgi.md b/docs/docs/en/getting-started/asgi.md new file mode 100644 index 0000000000..9006c37e23 --- /dev/null +++ b/docs/docs/en/getting-started/asgi.md @@ -0,0 +1,144 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 10 +--- + +# ASGI Support + +Often you need to not just run your application to consume messages, but make it an actual part of your services ecosystem with *Prometheus metrics*, K8S *liveness* and *readiness probes*, *traces* and other observability features. + +Unfortunately, such functionalilty can't be implemented by broker features alone, and you have to provide several **HTTP** endpoints in your app. + +Of course, you can use **FastStream** as a part of any **ASGI** frameworks ([integrations](./integrations/frameworks/index.md){.internal-link}), but fewer the dependencies, the better, right? + +## AsgiFastStream + +Fortunately, we have built-in **ASGI** support. It is very limited but good enough to provide you with basic functionality for metrics and healthcheck endpoint implementation. + +Let's take a look at the following example: + +```python linenums="1" hl_lines="2 5" title="main.py" +from faststream.nats import NatsBroker +from faststream.asgi import AsgiFastStream + +broker = NatsBroker() +app = AsgiFastStream(broker) +``` + +This simple example allows you to run the app using regular **ASGI** servers: + +```shell +uvicorn main:app +``` + +It does nothing but launching the app itself as an **ASGI lifespan**. + +### ASGI Routes + +It doesn't look very helpful, so let's add some **HTTP** endpoints. + +First, we have already written a wrapper on top of the broker to make a ready-to-use **ASGI** healthcheck endpoint for you: + +```python linenums="1" hl_lines="2 9" +from faststream.nats import NatsBroker +from faststream.asgi import AsgiFastStream, make_ping_asgi + +broker = NatsBroker() + +app = AsgiFastStream( + broker, + asgi_routes=[ + ("/health", make_ping_asgi(broker, timeout=5.0)), + ] +) +``` + +!!! note + This `/health` endpoint calls the `#!python broker.ping()` method and returns **HTTP 204** or **HTTP 500** statuses. + +### Custom ASGI Routes + +**AsgiFastStream** is able to call any **ASGI**-compatible callable objects, so you can use any endpoints from other libraries if they are compatible with the protocol. + +If you want to write your own simple **HTTP**-endpoint, you can use our `#!python @get` decorator as in the following example: + +```python linenums="1" hl_lines="2 6-8 12" +from faststream.nats import NatsBroker +from faststream.asgi import AsgiFastStream, AsgiResponse, get + +broker = NatsBroker() + +@get +async def liveness_ping(scope): + return AsgiResponse(b"", status_code=200) + +app = AsgiFastStream( + broker, + asgi_routes=[("/health", liveness_ping)] +) +``` + +Or you can write the **ASGI** endpoint yourself: + +```python +async def liveness_ping(scope, receive, send): + return AsgiResponse(b"", status_code=200) +``` + +!!! tip + You do not need to setup all routes using the `asgi_routes=[]` parameter.
+ You can use the `#!python app.mount("/healh", asgi_endpoint)` method also. + +### AsyncAPI Documentation + +You can also host your **AsyncAPI** documentation in the same process, by running [`#!shell faststream docs serve ...`](./asyncapi/hosting.md){.internal-link}, in the same container and runtime. + +Just create an `AsgiFastStream` object with a special option: + +```python linenums="1" hl_lines="8" +from faststream.nats import NatsBroker +from faststream.asgi import AsgiFastStream + +broker = NatsBroker() + +app = AsgiFastStream( + broker, + asyncapi_path="/docs", +) +``` + +Now, your **AsyncAPI HTML** representation can be found by the `/docs` url. + +## Other ASGI Compatibility + +Moreover, our wrappers can be used as ready-to-use endpoins for other **ASGI** frameworks. This can be very helpful When you are running **FastStream** in the same runtime as any other **ASGI** frameworks. + +Just follow the following example in such cases: + +```python linenums="1" hl_lines="6 19-20" +from contextlib import asynccontextmanager + +from fastapi import FastAPI +from faststream import FastStream +from faststream.nats import NatsBroker +from faststream.asgi import make_ping_asgi, make_asyncapi_asgi + +broker = NatsBroker() + +@asynccontextmanager +async def start_broker(app): + """Start the broker with the app.""" + async with broker: + await broker.start() + yield + +app = FastAPI(lifespan=start_broker) + +app.mount("/health", make_ping_asgi(broker, timeout=5.0)) +app.mount("/asyncapi", make_asyncapi_asgi(FastStream(broker))) +``` diff --git a/docs/docs/navigation_template.txt b/docs/docs/navigation_template.txt index 431bb4eb59..da4e20e45e 100644 --- a/docs/docs/navigation_template.txt +++ b/docs/docs/navigation_template.txt @@ -41,6 +41,7 @@ search: - [FastAPI Plugin](getting-started/integrations/fastapi/index.md) - [Django](getting-started/integrations/django/index.md) - [CLI commands](getting-started/cli/index.md) + - [ASGI](getting-started/asgi.md) - [OpenTelemetry](getting-started/opentelemetry/index.md) - [Logging](getting-started/logging.md) - [Config Management](getting-started/config/index.md) diff --git a/docs/docs_src/getting_started/subscription/confluent/real_testing.py b/docs/docs_src/getting_started/subscription/confluent/real_testing.py index 19d061c095..43973935b9 100644 --- a/docs/docs_src/getting_started/subscription/confluent/real_testing.py +++ b/docs/docs_src/getting_started/subscription/confluent/real_testing.py @@ -10,7 +10,7 @@ async def test_handle(): async with TestKafkaBroker(broker, with_real=True) as br: await br.publish({"name": "John", "user_id": 1}, topic="test-topic-confluent") - await handle.wait_call(timeout=10) + await handle.wait_call(timeout=30) handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) assert handle.mock is None @@ -20,6 +20,6 @@ async def test_validation_error(): async with TestKafkaBroker(broker, with_real=True) as br: with pytest.raises(ValidationError): await br.publish("wrong message", topic="test-confluent-wrong-fields") - await wrong_handle.wait_call(timeout=10) + await wrong_handle.wait_call(timeout=30) wrong_handle.mock.assert_called_once_with("wrong message") diff --git a/faststream/__about__.py b/faststream/__about__.py index 1f865fa60f..0666ad8c80 100644 --- a/faststream/__about__.py +++ b/faststream/__about__.py @@ -1,6 +1,6 @@ """Simple and fast framework to create message brokers based microservices.""" -__version__ = "0.5.15" +__version__ = "0.5.16" SERVICE_NAME = f"faststream-{__version__}" diff --git a/faststream/_compat.py b/faststream/_compat.py index ba9d1731f0..7370f2df71 100644 --- a/faststream/_compat.py +++ b/faststream/_compat.py @@ -45,8 +45,6 @@ def json_dumps(*a: Any, **kw: Any) -> bytes: try: from fastapi import __version__ as FASTAPI_VERSION # noqa: N812 - HAS_FASTAPI = True - major, minor, *_ = map(int, FASTAPI_VERSION.split(".")) FASTAPI_V2 = major > 0 or minor > 100 FASTAPI_V106 = major > 0 or minor >= 106 @@ -70,7 +68,7 @@ def raise_fastapi_validation_error(errors: List[Any], body: AnyDict) -> Never: raise RequestValidationError(errors, ROUTER_VALIDATION_ERROR_MODEL) # type: ignore[misc] except ImportError: - HAS_FASTAPI = False + pass JsonSchemaValue = Mapping[str, Any] diff --git a/faststream/app.py b/faststream/app.py index 0c5c801c4d..e9172db2bb 100644 --- a/faststream/app.py +++ b/faststream/app.py @@ -16,6 +16,7 @@ from typing_extensions import ParamSpec from faststream._compat import ExceptionGroup +from faststream.asyncapi.proto import AsyncAPIApplication from faststream.cli.supervisors.utils import set_exit from faststream.exceptions import ValidationError from faststream.log.logging import logger @@ -48,7 +49,7 @@ ) -class FastStream: +class FastStream(AsyncAPIApplication): """A class representing a FastStream application.""" _on_startup_calling: List["AsyncFunc"] diff --git a/faststream/asgi/__init__.py b/faststream/asgi/__init__.py new file mode 100644 index 0000000000..d9006a2207 --- /dev/null +++ b/faststream/asgi/__init__.py @@ -0,0 +1,12 @@ +from faststream.asgi.app import AsgiFastStream +from faststream.asgi.factories import make_asyncapi_asgi, make_ping_asgi +from faststream.asgi.handlers import get +from faststream.asgi.response import AsgiResponse + +__all__ = ( + "AsgiFastStream", + "make_ping_asgi", + "make_asyncapi_asgi", + "AsgiResponse", + "get", +) diff --git a/faststream/asgi/app.py b/faststream/asgi/app.py new file mode 100644 index 0000000000..ac09a500bf --- /dev/null +++ b/faststream/asgi/app.py @@ -0,0 +1,141 @@ +import traceback +from contextlib import asynccontextmanager +from typing import TYPE_CHECKING, Any, AsyncIterator, Optional, Sequence, Tuple, Union + +import anyio + +from faststream.app import FastStream +from faststream.asgi.factories import make_asyncapi_asgi +from faststream.asgi.response import AsgiResponse +from faststream.asgi.websocket import WebSocketClose +from faststream.log.logging import logger + +if TYPE_CHECKING: + from faststream.asgi.types import ASGIApp, Receive, Scope, Send + from faststream.asyncapi.schema import ( + Contact, + ContactDict, + ExternalDocs, + ExternalDocsDict, + License, + LicenseDict, + Tag, + TagDict, + ) + from faststream.broker.core.usecase import BrokerUsecase + from faststream.types import ( + AnyDict, + AnyHttpUrl, + Lifespan, + LoggerProto, + ) + + +class AsgiFastStream(FastStream): + def __init__( + self, + broker: Optional["BrokerUsecase[Any, Any]"] = None, + /, + asgi_routes: Sequence[Tuple[str, "ASGIApp"]] = (), + asyncapi_path: Optional[str] = None, + # regular broker args + logger: Optional["LoggerProto"] = logger, + lifespan: Optional["Lifespan"] = None, + # AsyncAPI args, + title: str = "FastStream", + version: str = "0.1.0", + description: str = "", + terms_of_service: Optional["AnyHttpUrl"] = None, + license: Optional[Union["License", "LicenseDict", "AnyDict"]] = None, + contact: Optional[Union["Contact", "ContactDict", "AnyDict"]] = None, + tags: Optional[Sequence[Union["Tag", "TagDict", "AnyDict"]]] = None, + external_docs: Optional[ + Union["ExternalDocs", "ExternalDocsDict", "AnyDict"] + ] = None, + identifier: Optional[str] = None, + ) -> None: + super().__init__( + broker=broker, + logger=logger, + lifespan=lifespan, + title=title, + version=version, + description=description, + terms_of_service=terms_of_service, + license=license, + contact=contact, + tags=tags, + external_docs=external_docs, + identifier=identifier, + ) + + self.routes = list(asgi_routes) + if asyncapi_path: + self.mount(asyncapi_path, make_asyncapi_asgi(self)) + + def mount(self, path: str, route: "ASGIApp") -> None: + self.routes.append((path, route)) + + async def __call__(self, scope: "Scope", receive: "Receive", send: "Send") -> None: + if scope["type"] == "lifespan": + await self.lifespan(scope, receive, send) + return + + if scope["type"] == "http": + for path, app in self.routes: + if scope["path"] == path: + await app(scope, receive, send) + return + + await self.not_found(scope, receive, send) + return + + @asynccontextmanager + async def start_lifespan_context(self) -> AsyncIterator[None]: + async with anyio.create_task_group() as tg, self.lifespan_context(): + tg.start_soon(self._startup) + try: + yield + finally: + await self._shutdown() + tg.cancel_scope.cancel() + + async def lifespan(self, scope: "Scope", receive: "Receive", send: "Send") -> None: + """Handle ASGI lifespan messages to start and shutdown the app.""" + started = False + await receive() # handle `lifespan.startup` event + + try: + async with self.start_lifespan_context(): + await send({"type": "lifespan.startup.complete"}) + started = True + await receive() # handle `lifespan.shutdown` event + + except BaseException: + exc_text = traceback.format_exc() + if started: + await send({"type": "lifespan.shutdown.failed", "message": exc_text}) + else: + await send({"type": "lifespan.startup.failed", "message": exc_text}) + raise + + else: + await send({"type": "lifespan.shutdown.complete"}) + + async def not_found(self, scope: "Scope", receive: "Receive", send: "Send") -> None: + not_found_msg = "FastStream doesn't support regular HTTP protocol." + + if scope["type"] == "websocket": + websocket_close = WebSocketClose( + code=1000, + reason=not_found_msg, + ) + await websocket_close(scope, receive, send) + return + + response = AsgiResponse( + body=not_found_msg.encode(), + status_code=404, + ) + + await response(scope, receive, send) diff --git a/faststream/asgi/factories.py b/faststream/asgi/factories.py new file mode 100644 index 0000000000..54c6723e88 --- /dev/null +++ b/faststream/asgi/factories.py @@ -0,0 +1,71 @@ +from typing import ( + TYPE_CHECKING, + Any, + Optional, +) + +from faststream.asgi.handlers import get +from faststream.asgi.response import AsgiResponse +from faststream.asyncapi import get_app_schema +from faststream.asyncapi.site import ( + ASYNCAPI_CSS_DEFAULT_URL, + ASYNCAPI_JS_DEFAULT_URL, + get_asyncapi_html, +) + +if TYPE_CHECKING: + from faststream.asgi.types import ASGIApp, Scope + from faststream.asyncapi.proto import AsyncAPIApplication + from faststream.broker.core.usecase import BrokerUsecase + + +def make_ping_asgi( + broker: "BrokerUsecase[Any, Any]", + /, + timeout: Optional[float] = None, +) -> "ASGIApp": + healthy_response = AsgiResponse(b"", 204) + unhealthy_response = AsgiResponse(b"", 500) + + @get + async def ping(scope: "Scope") -> AsgiResponse: + if await broker.ping(timeout): + return healthy_response + else: + return unhealthy_response + + return ping + + +def make_asyncapi_asgi( + app: "AsyncAPIApplication", + sidebar: bool = True, + info: bool = True, + servers: bool = True, + operations: bool = True, + messages: bool = True, + schemas: bool = True, + errors: bool = True, + expand_message_examples: bool = True, + title: str = "FastStream", + asyncapi_js_url: str = ASYNCAPI_JS_DEFAULT_URL, + asyncapi_css_url: str = ASYNCAPI_CSS_DEFAULT_URL, +) -> "ASGIApp": + return AsgiResponse( + get_asyncapi_html( + get_app_schema(app), + sidebar=sidebar, + info=info, + servers=servers, + operations=operations, + messages=messages, + schemas=schemas, + errors=errors, + expand_message_examples=expand_message_examples, + title=title, + asyncapi_js_url=asyncapi_js_url, + asyncapi_css_url=asyncapi_css_url, + ).encode("utf-8"), + 200, + {"Content-Type": "text/html; charset=utf-8"}, + ) diff --git a/faststream/asgi/handlers.py b/faststream/asgi/handlers.py new file mode 100644 index 0000000000..e14234cdf6 --- /dev/null +++ b/faststream/asgi/handlers.py @@ -0,0 +1,47 @@ +from functools import wraps +from typing import ( + TYPE_CHECKING, + Sequence, +) + +from faststream.asgi.response import AsgiResponse + +if TYPE_CHECKING: + from faststream.asgi.types import ASGIApp, Receive, Scope, Send, UserApp + + +def get(func: "UserApp") -> "ASGIApp": + methods = ("GET", "HEAD") + + method_now_allowed_response = _get_method_not_allowed_response(methods) + error_response = AsgiResponse(body=b"Internal Server Error", status_code=500) + + @wraps(func) + async def asgi_wrapper( + scope: "Scope", + receive: "Receive", + send: "Send", + ) -> None: + if scope["method"] not in methods: + response: ASGIApp = method_now_allowed_response + + else: + try: + response = await func(scope) + except Exception: + response = error_response + + await response(scope, receive, send) + return + + return asgi_wrapper + + +def _get_method_not_allowed_response(methods: Sequence[str]) -> AsgiResponse: + return AsgiResponse( + body=b"Method Not Allowed", + status_code=405, + headers={ + "Allow": ", ".join(methods), + }, + ) diff --git a/faststream/asgi/response.py b/faststream/asgi/response.py new file mode 100644 index 0000000000..cfc9d37d59 --- /dev/null +++ b/faststream/asgi/response.py @@ -0,0 +1,60 @@ +from typing import TYPE_CHECKING, List, Mapping, Optional, Tuple + +if TYPE_CHECKING: + from faststream.asgi.types import Receive, Scope, Send + + +class AsgiResponse: + def __init__( + self, + body: bytes, + status_code: int, + headers: Optional[Mapping[str, str]] = None, + ) -> None: + self.status_code = status_code + self.body = body + self.raw_headers = _get_response_headers(body, headers, status_code) + + async def __call__(self, scope: "Scope", receive: "Receive", send: "Send") -> None: + prefix = "websocket." if (scope["type"] == "websocket") else "" + await send( + { + "type": f"{prefix}http.response.start", + "status": self.status_code, + "headers": self.raw_headers, + } + ) + await send( + { + "type": f"{prefix}http.response.body", + "body": self.body, + } + ) + + +def _get_response_headers( + body: bytes, + headers: Optional[Mapping[str, str]], + status_code: int, +) -> List[Tuple[bytes, bytes]]: + if headers is None: + raw_headers: List[Tuple[bytes, bytes]] = [] + populate_content_length = True + + else: + raw_headers = [ + (k.lower().encode("latin-1"), v.encode("latin-1")) + for k, v in headers.items() + ] + keys = [h[0] for h in raw_headers] + populate_content_length = b"content-length" not in keys + + if ( + body + and populate_content_length + and not (status_code < 200 or status_code in (204, 304)) + ): + content_length = str(len(body)) + raw_headers.append((b"content-length", content_length.encode("latin-1"))) + + return raw_headers diff --git a/faststream/asgi/types.py b/faststream/asgi/types.py new file mode 100644 index 0000000000..df9d96b098 --- /dev/null +++ b/faststream/asgi/types.py @@ -0,0 +1,8 @@ +from typing import Any, Awaitable, Callable, MutableMapping + +Scope = MutableMapping[str, Any] +Message = MutableMapping[str, Any] +Receive = Callable[[], Awaitable[Message]] +Send = Callable[[Message], Awaitable[None]] +ASGIApp = Callable[[Scope, Receive, Send], Awaitable[None]] +UserApp = Callable[[Scope], Awaitable[ASGIApp]] diff --git a/faststream/asgi/websocket.py b/faststream/asgi/websocket.py new file mode 100644 index 0000000000..4a7fdf45de --- /dev/null +++ b/faststream/asgi/websocket.py @@ -0,0 +1,19 @@ +from typing import TYPE_CHECKING, Optional + +if TYPE_CHECKING: + from faststream.asgi.types import Receive, Scope, Send + + +class WebSocketClose: + def __init__( + self, + code: int, + reason: Optional[str], + ) -> None: + self.code = code + self.reason = reason or "" + + async def __call__(self, scope: "Scope", receive: "Receive", send: "Send") -> None: + await send( + {"type": "websocket.close", "code": self.code, "reason": self.reason} + ) diff --git a/faststream/asyncapi/generate.py b/faststream/asyncapi/generate.py index aeb2daaeed..daec95ec00 100644 --- a/faststream/asyncapi/generate.py +++ b/faststream/asyncapi/generate.py @@ -1,6 +1,6 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Union +from typing import TYPE_CHECKING, Any, Dict, List -from faststream._compat import DEF_KEY, HAS_FASTAPI +from faststream._compat import DEF_KEY from faststream.asyncapi.schema import ( Channel, Components, @@ -13,15 +13,12 @@ from faststream.constants import ContentTypes if TYPE_CHECKING: - from faststream.app import FastStream + from faststream.asyncapi.proto import AsyncAPIApplication from faststream.broker.core.usecase import BrokerUsecase from faststream.broker.types import ConnectionType, MsgType - if HAS_FASTAPI: - from faststream.broker.fastapi.router import StreamRouter - -def get_app_schema(app: Union["FastStream", "StreamRouter[Any]"]) -> Schema: +def get_app_schema(app: "AsyncAPIApplication") -> Schema: """Get the application schema.""" broker = app.broker if broker is None: # pragma: no cover diff --git a/faststream/asyncapi/proto.py b/faststream/asyncapi/proto.py index 8628a8b837..81a76da837 100644 --- a/faststream/asyncapi/proto.py +++ b/faststream/asyncapi/proto.py @@ -1,27 +1,50 @@ from abc import abstractmethod -from typing import TYPE_CHECKING, Dict, Optional, Protocol - -from typing_extensions import Annotated, Doc +from typing import TYPE_CHECKING, Any, Dict, Optional, Protocol, Sequence, Union if TYPE_CHECKING: + from faststream.asyncapi.schema import ( + Contact, + ContactDict, + ExternalDocs, + ExternalDocsDict, + License, + LicenseDict, + Tag, + TagDict, + ) from faststream.asyncapi.schema.channels import Channel + from faststream.broker.core.usecase import BrokerUsecase + from faststream.types import ( + AnyDict, + AnyHttpUrl, + ) + + +class AsyncAPIApplication(Protocol): + broker: Optional["BrokerUsecase[Any, Any]"] + + title: str + version: str + description: str + terms_of_service: Optional["AnyHttpUrl"] + license: Optional[Union["License", "LicenseDict", "AnyDict"]] + contact: Optional[Union["Contact", "ContactDict", "AnyDict"]] + asyncapi_tags: Optional[Sequence[Union["Tag", "TagDict", "AnyDict"]]] + external_docs: Optional[Union["ExternalDocs", "ExternalDocsDict", "AnyDict"]] + identifier: Optional[str] class AsyncAPIProto(Protocol): """A class representing an asynchronous API operation.""" - title_: Annotated[ - Optional[str], - Doc("AsyncAPI object title."), - ] - description_: Annotated[ - Optional[str], - Doc("AsyncAPI object description."), - ] - include_in_schema: Annotated[ - bool, - Doc("Whetever to include operation in AsyncAPI schema or not."), - ] + title_: Optional[str] + """AsyncAPI object title.""" + + description_: Optional[str] + """AsyncAPI object description.""" + + include_in_schema: bool + """Whetever to include operation in AsyncAPI schema or not.""" @property @abstractmethod diff --git a/faststream/asyncapi/site.py b/faststream/asyncapi/site.py index fcc0aefea6..9b11565c6a 100644 --- a/faststream/asyncapi/site.py +++ b/faststream/asyncapi/site.py @@ -10,6 +10,13 @@ from faststream.asyncapi.schema import Schema +ASYNCAPI_JS_DEFAULT_URL = "https://unpkg.com/@asyncapi/react-component@1.0.0-next.54/browser/standalone/index.js" + +ASYNCAPI_CSS_DEFAULT_URL = ( + "https://unpkg.com/@asyncapi/react-component@1.0.0-next.54/styles/default.min.css" +) + + def get_asyncapi_html( schema: "Schema", sidebar: bool = True, @@ -21,8 +28,8 @@ def get_asyncapi_html( errors: bool = True, expand_message_examples: bool = True, title: str = "FastStream", - asyncapi_js_url: str = "https://unpkg.com/@asyncapi/react-component@1.0.0-next.47/browser/standalone/index.js", - asyncapi_css_url: str = "https://unpkg.com/@asyncapi/react-component@1.0.0-next.46/styles/default.min.css", + asyncapi_js_url: str = ASYNCAPI_JS_DEFAULT_URL, + asyncapi_css_url: str = ASYNCAPI_CSS_DEFAULT_URL, ) -> str: """Generate HTML for displaying an AsyncAPI document.""" schema_json = schema.to_json() diff --git a/faststream/broker/core/logging.py b/faststream/broker/core/logging.py index 86091b0308..06412bf7f2 100644 --- a/faststream/broker/core/logging.py +++ b/faststream/broker/core/logging.py @@ -1,12 +1,12 @@ import logging from abc import abstractmethod -from inspect import Parameter -from typing import TYPE_CHECKING, Any, Optional, Union, cast +from typing import TYPE_CHECKING, Any, Optional from typing_extensions import Annotated, Doc from faststream.broker.core.abc import ABCBroker from faststream.broker.types import MsgType +from faststream.types import EMPTY if TYPE_CHECKING: from faststream.types import AnyDict, LoggerProto @@ -34,7 +34,7 @@ def __init__( Doc("Logger object to use if `logger` is not set."), ], logger: Annotated[ - Union["LoggerProto", None, object], + Optional["LoggerProto"], Doc("User specified logger to pass into Context and log service messages."), ], log_level: Annotated[ @@ -47,8 +47,8 @@ def __init__( ], **kwargs: Any, ) -> None: - if logger is not Parameter.empty: - self.logger = cast(Optional["LoggerProto"], logger) + if logger is not EMPTY: + self.logger = logger self.use_custom = True else: self.logger = default_logger diff --git a/faststream/broker/core/usecase.py b/faststream/broker/core/usecase.py index d8aae6064c..e790d5a8a0 100644 --- a/faststream/broker/core/usecase.py +++ b/faststream/broker/core/usecase.py @@ -87,7 +87,7 @@ def __init__( Doc("Logger object to use if `logger` is not set."), ], logger: Annotated[ - Union["LoggerProto", None, object], + Optional["LoggerProto"], Doc("User specified logger to pass into Context and log service messages."), ], log_level: Annotated[ diff --git a/faststream/broker/fastapi/context.py b/faststream/broker/fastapi/context.py index efb554166b..25edd313b7 100644 --- a/faststream/broker/fastapi/context.py +++ b/faststream/broker/fastapi/context.py @@ -1,10 +1,10 @@ import logging -from inspect import Parameter from typing import Any, Callable, Optional from fastapi import params from typing_extensions import Annotated +from faststream.types import EMPTY from faststream.utils.context import ContextRepo as CR from faststream.utils.context.types import resolve_context_by_name @@ -12,7 +12,7 @@ def Context( # noqa: N802 name: str, *, - default: Any = Parameter.empty, + default: Any = EMPTY, initial: Optional[Callable[..., Any]] = None, ) -> Any: """Get access to objects of the Context.""" diff --git a/faststream/broker/fastapi/router.py b/faststream/broker/fastapi/router.py index 2d4153f4f0..afd070b3a5 100644 --- a/faststream/broker/fastapi/router.py +++ b/faststream/broker/fastapi/router.py @@ -30,6 +30,7 @@ from starlette.responses import JSONResponse, Response from starlette.routing import BaseRoute, _DefaultLifespan +from faststream.asyncapi.proto import AsyncAPIApplication from faststream.asyncapi.site import get_asyncapi_html from faststream.broker.fastapi.get_dependant import get_fastapi_dependant from faststream.broker.fastapi.route import StreamRoute @@ -80,6 +81,7 @@ async def after_processed( class StreamRouter( APIRouter, # type: ignore[misc] + AsyncAPIApplication, Generic[MsgType], ): """A class to route streams.""" diff --git a/faststream/broker/message.py b/faststream/broker/message.py index dbe89b089d..be590b4eab 100644 --- a/faststream/broker/message.py +++ b/faststream/broker/message.py @@ -1,7 +1,6 @@ import json from contextlib import suppress from dataclasses import dataclass, field -from inspect import Parameter from typing import ( TYPE_CHECKING, Any, @@ -18,6 +17,7 @@ from faststream._compat import dump_json, json_loads from faststream.constants import ContentTypes +from faststream.types import EMPTY if TYPE_CHECKING: from faststream.types import AnyDict, DecodedMessage, SendableMessage @@ -68,9 +68,7 @@ def decode_message(message: "StreamMessage[Any]") -> "DecodedMessage": body: Any = getattr(message, "body", message) m: DecodedMessage = body - if ( - content_type := getattr(message, "content_type", Parameter.empty) - ) is not Parameter.empty: + if (content_type := getattr(message, "content_type", EMPTY)) is not EMPTY: content_type = cast(Optional[str], content_type) if not content_type: diff --git a/faststream/broker/subscriber/usecase.py b/faststream/broker/subscriber/usecase.py index c5eab13fb2..98d2725064 100644 --- a/faststream/broker/subscriber/usecase.py +++ b/faststream/broker/subscriber/usecase.py @@ -259,7 +259,9 @@ def __call__( dependencies: Iterable["Depends"] = (), ) -> Any: if (options := self._call_options) is None: - raise SetupError("You can't create subscriber directly.") + raise SetupError( + "You can't create subscriber directly. Please, use `add_call` at first." + ) total_deps = (*options.dependencies, *dependencies) total_middlewares = (*options.middlewares, *middlewares) diff --git a/faststream/confluent/__init__.py b/faststream/confluent/__init__.py index 9566997b78..a7961b35b3 100644 --- a/faststream/confluent/__init__.py +++ b/faststream/confluent/__init__.py @@ -2,6 +2,7 @@ from faststream.confluent.broker import KafkaBroker from faststream.confluent.response import KafkaResponse from faststream.confluent.router import KafkaPublisher, KafkaRoute, KafkaRouter +from faststream.confluent.schemas import TopicPartition from faststream.confluent.testing import TestKafkaBroker from faststream.testing.app import TestApp @@ -12,6 +13,7 @@ "KafkaRoute", "KafkaPublisher", "KafkaResponse", + "TopicPartition", "TestKafkaBroker", "TestApp", ) diff --git a/faststream/confluent/broker/broker.py b/faststream/confluent/broker/broker.py index 8d83719740..0239981133 100644 --- a/faststream/confluent/broker/broker.py +++ b/faststream/confluent/broker/broker.py @@ -1,7 +1,5 @@ import logging -from asyncio import AbstractEventLoop from functools import partial -from inspect import Parameter from typing import ( TYPE_CHECKING, Any, @@ -27,13 +25,13 @@ from faststream.confluent.client import ( AsyncConfluentConsumer, AsyncConfluentProducer, - _missing, ) from faststream.confluent.config import ConfluentConfig from faststream.confluent.publisher.producer import AsyncConfluentFastProducer from faststream.confluent.schemas.params import ConsumerConnectionParams from faststream.confluent.security import parse_security from faststream.exceptions import NOT_CONNECTED_YET +from faststream.types import EMPTY from faststream.utils.data import filter_by_dict if TYPE_CHECKING: @@ -112,7 +110,6 @@ def __init__( """ ), ] = 9 * 60 * 1000, - loop: Optional[AbstractEventLoop] = None, client_id: Annotated[ Optional[str], Doc( @@ -127,20 +124,24 @@ def __init__( ] = SERVICE_NAME, allow_auto_create_topics: Annotated[ bool, - Doc(""" + Doc( + """ Allow automatic topic creation on the broker when subscribing to or assigning non-existent topics. - """), + """ + ), ] = True, config: Annotated[ Optional[ConfluentConfig], - Doc(""" + Doc( + """ Extra configuration for the confluent-kafka-python producer/consumer. See `confluent_kafka.Config `_. - """), + """ + ), ] = None, # publisher args acks: Annotated[ - Union[Literal[0, 1, -1, "all"], object], + Literal[0, 1, -1, "all"], Doc( """ One of ``0``, ``1``, ``all``. The number of acknowledgments @@ -169,7 +170,7 @@ def __init__( :data:`True` defaults to ``acks=all``. """ ), - ] = _missing, + ] = EMPTY, compression_type: Annotated[ Optional[Literal["gzip", "snappy", "lz4", "zstd"]], Doc( @@ -301,9 +302,9 @@ def __init__( ] = None, # logging args logger: Annotated[ - Union["LoggerProto", None, object], + Optional["LoggerProto"], Doc("User specified logger to pass into Context and log service messages."), - ] = Parameter.empty, + ] = EMPTY, log_level: Annotated[ int, Doc("Service messages log level."), @@ -359,7 +360,6 @@ def __init__( metadata_max_age_ms=metadata_max_age_ms, allow_auto_create_topics=allow_auto_create_topics, connections_max_idle_ms=connections_max_idle_ms, - loop=loop, # publisher args acks=acks, compression_type=compression_type, @@ -411,12 +411,12 @@ async def _close( async def connect( self, bootstrap_servers: Annotated[ - Union[str, Iterable[str], object], + Union[str, Iterable[str]], Doc("Kafka addresses to connect."), - ] = Parameter.empty, + ] = EMPTY, **kwargs: Any, ) -> Callable[..., AsyncConfluentConsumer]: - if bootstrap_servers is not Parameter.empty: + if bootstrap_servers is not EMPTY: kwargs["bootstrap_servers"] = bootstrap_servers return await super().connect(**kwargs) diff --git a/faststream/confluent/broker/logging.py b/faststream/confluent/broker/logging.py index 4fead65305..758e4285ba 100644 --- a/faststream/confluent/broker/logging.py +++ b/faststream/confluent/broker/logging.py @@ -1,10 +1,10 @@ import logging -from inspect import Parameter from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, Tuple, Union from faststream.broker.core.usecase import BrokerUsecase from faststream.confluent.client import AsyncConfluentConsumer from faststream.log.logging import get_broker_logger +from faststream.types import EMPTY if TYPE_CHECKING: import confluent_kafka @@ -27,7 +27,7 @@ class KafkaLoggingBroker( def __init__( self, *args: Any, - logger: Union["LoggerProto", object, None] = Parameter.empty, + logger: Optional["LoggerProto"] = EMPTY, log_level: int = logging.INFO, log_fmt: Optional[str] = None, **kwargs: Any, diff --git a/faststream/confluent/broker/registrator.py b/faststream/confluent/broker/registrator.py index 10c2584111..4a2844e1b5 100644 --- a/faststream/confluent/broker/registrator.py +++ b/faststream/confluent/broker/registrator.py @@ -35,6 +35,7 @@ AsyncAPIBatchPublisher, AsyncAPIDefaultPublisher, ) + from faststream.confluent.schemas import TopicPartition from faststream.confluent.subscriber.asyncapi import ( AsyncAPIBatchSubscriber, AsyncAPIDefaultSubscriber, @@ -63,6 +64,8 @@ def subscriber( str, Doc("Kafka topics to consume messages from."), ], + partitions: Sequence["TopicPartition"] = (), + polling_interval: float = 0.1, group_id: Annotated[ Optional[str], Doc( @@ -259,17 +262,6 @@ def subscriber( Literal[True], Doc("Whether to consume messages in batches or not."), ], - batch_timeout_ms: Annotated[ - int, - Doc( - """ - Milliseconds spent waiting if - data is not available in the buffer. If 0, returns immediately - with any records that are available currently in the buffer, - else returns empty. - """ - ), - ] = 200, max_records: Annotated[ Optional[int], Doc("Number of messages to consume as one batch."), @@ -341,6 +333,8 @@ def subscriber( str, Doc("Kafka topics to consume messages from."), ], + partitions: Sequence["TopicPartition"] = (), + polling_interval: float = 0.1, group_id: Annotated[ Optional[str], Doc( @@ -537,17 +531,6 @@ def subscriber( Literal[False], Doc("Whether to consume messages in batches or not."), ] = False, - batch_timeout_ms: Annotated[ - int, - Doc( - """ - Milliseconds spent waiting if - data is not available in the buffer. If 0, returns immediately - with any records that are available currently in the buffer, - else returns empty. - """ - ), - ] = 200, max_records: Annotated[ Optional[int], Doc("Number of messages to consume as one batch."), @@ -619,6 +602,8 @@ def subscriber( str, Doc("Kafka topics to consume messages from."), ], + partitions: Sequence["TopicPartition"] = (), + polling_interval: float = 0.1, group_id: Annotated[ Optional[str], Doc( @@ -815,17 +800,6 @@ def subscriber( bool, Doc("Whether to consume messages in batches or not."), ] = False, - batch_timeout_ms: Annotated[ - int, - Doc( - """ - Milliseconds spent waiting if - data is not available in the buffer. If 0, returns immediately - with any records that are available currently in the buffer, - else returns empty. - """ - ), - ] = 200, max_records: Annotated[ Optional[int], Doc("Number of messages to consume as one batch."), @@ -900,6 +874,8 @@ def subscriber( str, Doc("Kafka topics to consume messages from."), ], + partitions: Sequence["TopicPartition"] = (), + polling_interval: float = 0.1, group_id: Annotated[ Optional[str], Doc( @@ -1096,17 +1072,6 @@ def subscriber( bool, Doc("Whether to consume messages in batches or not."), ] = False, - batch_timeout_ms: Annotated[ - int, - Doc( - """ - Milliseconds spent waiting if - data is not available in the buffer. If 0, returns immediately - with any records that are available currently in the buffer, - else returns empty. - """ - ), - ] = 200, max_records: Annotated[ Optional[int], Doc("Number of messages to consume as one batch."), @@ -1179,8 +1144,9 @@ def subscriber( subscriber = super().subscriber( create_subscriber( *topics, + polling_interval=polling_interval, + partitions=partitions, batch=batch, - batch_timeout_ms=batch_timeout_ms, max_records=max_records, group_id=group_id, connection_data={ diff --git a/faststream/confluent/client.py b/faststream/confluent/client.py index 5777556d8a..5357e41a3a 100644 --- a/faststream/confluent/client.py +++ b/faststream/confluent/client.py @@ -1,4 +1,4 @@ -import asyncio +import logging from time import time from typing import ( TYPE_CHECKING, @@ -6,86 +6,25 @@ Dict, Iterable, List, - NamedTuple, Optional, + Sequence, Tuple, Union, ) +import anyio from confluent_kafka import Consumer, KafkaError, KafkaException, Message, Producer from confluent_kafka.admin import AdminClient, NewTopic -from pydantic import BaseModel -from typing_extensions import Annotated, Doc from faststream.confluent.config import ConfluentConfig -from faststream.log import logger +from faststream.exceptions import SetupError +from faststream.log import logger as faststream_logger +from faststream.types import EMPTY from faststream.utils.functions import call_or_await if TYPE_CHECKING: - from faststream.types import LoggerProto - -_missing = object() - -ADMINCLIENT_CONFIG_PARAMS = ( - "allow.auto.create.topics", - "bootstrap.servers", - "client.id", - "request.timeout.ms", - "metadata.max.age.ms", - "security.protocol", - "connections.max.idle.ms", - "sasl.mechanism", - "sasl.username", - "sasl.password", -) - - -class MsgToSend(BaseModel): - """A Pydantic model representing a message to be sent to Kafka. - - Attributes: - timestamp (int): The timestamp of the message. - key (Optional[Union[str, bytes]]): The key of the message, can be a string or bytes. - value (Optional[Union[str, bytes]]): The value of the message, can be a string or bytes. - headers (List[Tuple[str, bytes]]): A list of headers associated with the message. - """ - - timestamp: int - key: Optional[Union[str, bytes]] - value: Optional[Union[str, bytes]] - headers: List[Tuple[str, bytes]] - - -class BatchBuilder: - """A helper class to build a batch of messages to send to Kafka.""" - - def __init__(self) -> None: - """Initializes a new BatchBuilder instance.""" - self._builder: List[MsgToSend] = [] - - def append( - self, - *, - timestamp: Optional[int] = None, - key: Optional[Union[str, bytes]] = None, - value: Optional[Union[str, bytes]] = None, - headers: Optional[List[Tuple[str, bytes]]] = None, - ) -> None: - """Appends a message to the batch with optional timestamp, key, value, and headers.""" - if timestamp is None: - timestamp = round(time() * 1000) - - if key is None and value is None: - raise KafkaException( - KafkaError(40, reason="Both key and value can't be None") - ) - - if headers is None: - headers = [] - - self._builder.append( - MsgToSend(timestamp=timestamp, key=key, value=value, headers=headers) - ) + from faststream.confluent.schemas import TopicPartition + from faststream.types import AnyDict, LoggerProto class AsyncConfluentProducer: @@ -94,12 +33,12 @@ class AsyncConfluentProducer: def __init__( self, *, - loop: Optional[asyncio.AbstractEventLoop] = None, + logger: Optional["LoggerProto"], bootstrap_servers: Union[str, List[str]] = "localhost", client_id: Optional[str] = None, metadata_max_age_ms: int = 300000, request_timeout_ms: int = 40000, - acks: Any = _missing, + acks: Any = EMPTY, compression_type: Optional[str] = None, partitioner: str = "consistent_random", max_request_size: int = 1048576, @@ -115,10 +54,6 @@ def __init__( sasl_plain_password: Optional[str] = None, sasl_plain_username: Optional[str] = None, config: Optional[ConfluentConfig] = None, - logger: Annotated[ - Union["LoggerProto", None, object], - Doc("User specified logger to pass into Context and log service messages."), - ] = logger, ) -> None: self.logger = logger @@ -132,7 +67,7 @@ def __init__( if compression_type is None: compression_type = "none" - if acks is _missing or acks == "all": + if acks is EMPTY or acks == "all": acks = -1 config_from_params = { @@ -165,14 +100,11 @@ def __init__( } ) - self.producer = Producer(self.config, logger=self.logger) - # self.producer.init_transactions() - self.loop = loop or asyncio.get_event_loop() - self.loop.run_in_executor(None, self.producer.list_topics) + self.producer = Producer(self.config, logger=self.logger) # type: ignore[call-arg] async def stop(self) -> None: """Stop the Kafka producer and flush remaining messages.""" - self.producer.flush() + await call_or_await(self.producer.flush) async def send( self, @@ -184,48 +116,41 @@ async def send( headers: Optional[List[Tuple[str, Union[str, bytes]]]] = None, ) -> None: """Sends a single message to a Kafka topic.""" - kwargs = { - k: v - for k, v in { - "value": value, - "key": key, - "partition": partition, - "headers": headers, - }.items() - if v is not None + kwargs: AnyDict = { + "value": value, + "key": key, + "headers": headers, } + + if partition is not None: + kwargs["partition"] = partition + if timestamp_ms is not None: kwargs["timestamp"] = timestamp_ms - self.producer.produce( - topic, - **kwargs, - ) - self.producer.poll(0) - def create_batch(self) -> BatchBuilder: - """Creates a batch for sending multiple messages. + # should be sync to prevent segfault + self.producer.produce(topic, **kwargs) + self.producer.poll(0) - Returns: - BatchBuilder: An instance of BatchBuilder for building message batches. - """ + def create_batch(self) -> "BatchBuilder": + """Creates a batch for sending multiple messages.""" return BatchBuilder() async def send_batch( - self, batch: BatchBuilder, topic: str, *, partition: Optional[int] + self, batch: "BatchBuilder", topic: str, *, partition: Optional[int] ) -> None: """Sends a batch of messages to a Kafka topic.""" - tasks = [ - self.send( - topic=topic, - partition=partition, - timestamp_ms=msg.timestamp, - key=msg.key, - value=msg.value, - headers=msg.headers, # type: ignore[arg-type] - ) - for msg in batch._builder - ] - await asyncio.gather(*tasks) + async with anyio.create_task_group() as tg: + for msg in batch._builder: + tg.start_soon( + self.send, + topic, + msg["value"], + msg["key"], + partition, + msg["timestamp_ms"], + msg["headers"], + ) async def ping( self, @@ -247,44 +172,14 @@ async def ping( return False -class TopicPartition(NamedTuple): - """A named tuple representing a Kafka topic and partition.""" - - topic: str - partition: int - - -def create_topics( - topics: List[str], - config: Dict[str, Optional[Union[str, int, float, bool, Any]]], - logger: Union["LoggerProto", None, object] = logger, -) -> None: - """Creates Kafka topics using the provided configuration.""" - admin_client = AdminClient( - {x: config[x] for x in ADMINCLIENT_CONFIG_PARAMS if x in config} - ) - - fs = admin_client.create_topics( - [NewTopic(topic, num_partitions=1, replication_factor=1) for topic in topics] - ) - - for topic, f in fs.items(): - try: - f.result() # The result itself is None - except Exception as e: # noqa: PERF203 - if "TOPIC_ALREADY_EXISTS" not in str(e): - logger.warning(f"Failed to create topic {topic}: {e}") # type: ignore[union-attr] - else: - logger.info(f"Topic `{topic}` created.") # type: ignore[union-attr] - - class AsyncConfluentConsumer: """An asynchronous Python Kafka client for consuming messages using the "confluent-kafka" package.""" def __init__( self, *topics: str, - loop: Optional[asyncio.AbstractEventLoop] = None, + partitions: Sequence["TopicPartition"], + logger: Optional["LoggerProto"], bootstrap_servers: Union[str, List[str]] = "localhost", client_id: Optional[str] = "confluent-kafka-consumer", group_id: Optional[str] = None, @@ -311,17 +206,13 @@ def __init__( sasl_plain_password: Optional[str] = None, sasl_plain_username: Optional[str] = None, config: Optional[ConfluentConfig] = None, - logger: Annotated[ - Union["LoggerProto", None, object], - Doc("User specified logger to pass into Context and log service messages."), - ] = logger, ) -> None: self.logger = logger self.config: Dict[str, Any] = {} if config is None else dict(config) if group_id is None: - group_id = "confluent-kafka-consumer-group" + group_id = "faststream-consumer-group" if isinstance(bootstrap_servers, Iterable) and not isinstance( bootstrap_servers, str @@ -329,6 +220,7 @@ def __init__( bootstrap_servers = ",".join(bootstrap_servers) self.topics = list(topics) + self.partitions = partitions if not isinstance(partition_assignment_strategy, str): partition_assignment_strategy = ",".join( @@ -339,7 +231,7 @@ def __init__( ) config_from_params = { "allow.auto.create.topics": allow_auto_create_topics, - # "topic.metadata.refresh.interval.ms": 1000, + "topic.metadata.refresh.interval.ms": 1000, "bootstrap.servers": bootstrap_servers, "client.id": client_id, "group.id": group_id, @@ -348,7 +240,7 @@ def __init__( "fetch.max.bytes": fetch_max_bytes, "fetch.min.bytes": fetch_min_bytes, "max.partition.fetch.bytes": max_partition_fetch_bytes, - # "request.timeout.ms": request_timeout_ms, + # "request.timeout.ms": 1000, # producer only "fetch.error.backoff.ms": retry_backoff_ms, "auto.offset.reset": auto_offset_reset, "enable.auto.commit": enable_auto_commit, @@ -363,6 +255,7 @@ def __init__( "connections.max.idle.ms": connections_max_idle_ms, "isolation.level": isolation_level, } + self.allow_auto_create_topics = allow_auto_create_topics self.config = {**self.config, **config_from_params} if sasl_mechanism in ["PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512"]: @@ -374,21 +267,35 @@ def __init__( } ) - self.loop = loop or asyncio.get_event_loop() + self.consumer = Consumer(self.config, logger=self.logger) # type: ignore[call-arg] - if allow_auto_create_topics: - self.loop.run_in_executor( - None, create_topics, self.topics, self.config, logger - ) - else: - logger.warning( # type: ignore[union-attr] - "Auto create topics is disabled. Make sure the topics exist." - ) - self.consumer = Consumer(self.config, logger=self.logger) + @property + def topics_to_create(self) -> List[str]: + return list({*self.topics, *(p.topic for p in self.partitions)}) async def start(self) -> None: """Starts the Kafka consumer and subscribes to the specified topics.""" - self.consumer.subscribe(self.topics) + if self.allow_auto_create_topics: + await call_or_await( + create_topics, self.topics_to_create, self.config, self.logger + ) + + elif self.logger: + self.logger.log( + logging.WARNING, + "Auto create topics is disabled. Make sure the topics exist.", + ) + + if self.topics: + await call_or_await(self.consumer.subscribe, self.topics) + + elif self.partitions: + await call_or_await( + self.consumer.assign, [p.to_confluent() for p in self.partitions] + ) + + else: + raise SetupError("You must provide either `topics` or `partitions` option.") async def commit(self, asynchronous: bool = True) -> None: """Commits the offsets of all messages returned by the last poll operation.""" @@ -407,27 +314,31 @@ async def stop(self) -> None: # No offset stored issue is not a problem - https://github.com/confluentinc/confluent-kafka-python/issues/295#issuecomment-355907183 if "No offset stored" in str(e): pass - else: - raise e + elif self.logger: + self.logger.log( + logging.ERROR, + "Consumer closing error occurred.", + exc_info=e, + ) # Wrap calls to async to make method cancelable by timeout await call_or_await(self.consumer.close) - async def getone(self, timeout_ms: int = 1000) -> Optional[Message]: + async def getone(self, timeout: float = 0.1) -> Optional[Message]: """Consumes a single message from Kafka.""" - msg = await call_or_await(self.consumer.poll, timeout_ms / 1000) + msg = await call_or_await(self.consumer.poll, timeout) return check_msg_error(msg) async def getmany( self, - timeout_ms: int = 0, + timeout: float = 0.1, max_records: Optional[int] = 10, ) -> Tuple[Message, ...]: """Consumes a batch of messages from Kafka and groups them by topic and partition.""" raw_messages: List[Optional[Message]] = await call_or_await( - self.consumer.consume, + self.consumer.consume, # type: ignore[arg-type] num_messages=max_records or 10, - timeout=timeout_ms / 1000, + timeout=timeout, ) return tuple(x for x in map(check_msg_error, raw_messages) if x is not None) @@ -439,3 +350,74 @@ def check_msg_error(msg: Optional[Message]) -> Optional[Message]: return None return msg + + +class BatchBuilder: + """A helper class to build a batch of messages to send to Kafka.""" + + def __init__(self) -> None: + """Initializes a new BatchBuilder instance.""" + self._builder: List[AnyDict] = [] + + def append( + self, + *, + timestamp: Optional[int] = None, + key: Optional[Union[str, bytes]] = None, + value: Optional[Union[str, bytes]] = None, + headers: Optional[List[Tuple[str, bytes]]] = None, + ) -> None: + """Appends a message to the batch with optional timestamp, key, value, and headers.""" + if key is None and value is None: + raise KafkaException( + KafkaError(40, reason="Both key and value can't be None") + ) + + self._builder.append( + { + "timestamp_ms": timestamp or round(time() * 1000), + "key": key, + "value": value, + "headers": headers or [], + } + ) + + +def create_topics( + topics: List[str], + config: Dict[str, Optional[Union[str, int, float, bool, Any]]], + logger_: Optional["LoggerProto"] = None, +) -> None: + logger_ = logger_ or faststream_logger + + """Creates Kafka topics using the provided configuration.""" + admin_client = AdminClient( + {x: config[x] for x in ADMINCLIENT_CONFIG_PARAMS if x in config} + ) + + fs = admin_client.create_topics( + [NewTopic(topic, num_partitions=1, replication_factor=1) for topic in topics] + ) + + for topic, f in fs.items(): + try: + f.result() # The result itself is None + except Exception as e: # noqa: PERF203 + if "TOPIC_ALREADY_EXISTS" not in str(e): + logger_.log(logging.WARN, f"Failed to create topic {topic}: {e}") + else: + logger_.log(logging.INFO, f"Topic `{topic}` created.") + + +ADMINCLIENT_CONFIG_PARAMS = ( + "allow.auto.create.topics", + "bootstrap.servers", + "client.id", + "request.timeout.ms", + "metadata.max.age.ms", + "security.protocol", + "connections.max.idle.ms", + "sasl.mechanism", + "sasl.username", + "sasl.password", +) diff --git a/faststream/confluent/fastapi/fastapi.py b/faststream/confluent/fastapi/fastapi.py index b1f5c83590..8017680250 100644 --- a/faststream/confluent/fastapi/fastapi.py +++ b/faststream/confluent/fastapi/fastapi.py @@ -1,5 +1,4 @@ import logging -from inspect import Parameter from typing import ( TYPE_CHECKING, Any, @@ -29,6 +28,7 @@ from faststream.broker.fastapi.router import StreamRouter from faststream.broker.utils import default_filter from faststream.confluent.broker.broker import KafkaBroker as KB +from faststream.types import EMPTY if TYPE_CHECKING: from enum import Enum @@ -50,6 +50,7 @@ AsyncAPIBatchPublisher, AsyncAPIDefaultPublisher, ) + from faststream.confluent.schemas import TopicPartition from faststream.confluent.subscriber.asyncapi import ( AsyncAPIBatchSubscriber, AsyncAPIDefaultSubscriber, @@ -125,9 +126,9 @@ def __init__( ] = None, # logging args logger: Annotated[ - Union["LoggerProto", None, object], + Optional["LoggerProto"], Doc("User specified logger to pass into Context and log service messages."), - ] = Parameter.empty, + ] = EMPTY, log_level: Annotated[ int, Doc("Service messages log level."), @@ -412,6 +413,8 @@ def subscriber( str, Doc("Kafka topics to consume messages from."), ], + partitions: Sequence["TopicPartition"] = (), + polling_interval: float = 0.1, group_id: Annotated[ Optional[str], Doc( @@ -608,17 +611,6 @@ def subscriber( Literal[False], Doc("Whether to consume messages in batches or not."), ] = False, - batch_timeout_ms: Annotated[ - int, - Doc( - """ - Milliseconds spent waiting if - data is not available in the buffer. If 0, returns immediately - with any records that are available currently in the buffer, - else returns empty. - """ - ), - ] = 200, max_records: Annotated[ Optional[int], Doc("Number of messages to consume as one batch."), @@ -813,6 +805,8 @@ def subscriber( str, Doc("Kafka topics to consume messages from."), ], + partitions: Sequence["TopicPartition"] = (), + polling_interval: float = 0.1, group_id: Annotated[ Optional[str], Doc( @@ -1009,17 +1003,6 @@ def subscriber( Literal[True], Doc("Whether to consume messages in batches or not."), ], - batch_timeout_ms: Annotated[ - int, - Doc( - """ - Milliseconds spent waiting if - data is not available in the buffer. If 0, returns immediately - with any records that are available currently in the buffer, - else returns empty. - """ - ), - ] = 200, max_records: Annotated[ Optional[int], Doc("Number of messages to consume as one batch."), @@ -1200,6 +1183,8 @@ def subscriber( str, Doc("Kafka topics to consume messages from."), ], + partitions: Sequence["TopicPartition"] = (), + polling_interval: float = 0.1, group_id: Annotated[ Optional[str], Doc( @@ -1396,17 +1381,6 @@ def subscriber( bool, Doc("Whether to consume messages in batches or not."), ] = False, - batch_timeout_ms: Annotated[ - int, - Doc( - """ - Milliseconds spent waiting if - data is not available in the buffer. If 0, returns immediately - with any records that are available currently in the buffer, - else returns empty. - """ - ), - ] = 200, max_records: Annotated[ Optional[int], Doc("Number of messages to consume as one batch."), @@ -1604,6 +1578,8 @@ def subscriber( str, Doc("Kafka topics to consume messages from."), ], + partitions: Sequence["TopicPartition"] = (), + polling_interval: float = 0.1, group_id: Annotated[ Optional[str], Doc( @@ -1800,17 +1776,6 @@ def subscriber( bool, Doc("Whether to consume messages in batches or not."), ] = False, - batch_timeout_ms: Annotated[ - int, - Doc( - """ - Milliseconds spent waiting if - data is not available in the buffer. If 0, returns immediately - with any records that are available currently in the buffer, - else returns empty. - """ - ), - ] = 200, max_records: Annotated[ Optional[int], Doc("Number of messages to consume as one batch."), @@ -2001,8 +1966,13 @@ def subscriber( "AsyncAPIDefaultSubscriber", ]: subscriber = super().subscriber( - topics[0], # path + ( # path + next(iter(topics), "") + or getattr(next(iter(partitions), None), "topic", "") + ), *topics, + polling_interval=polling_interval, + partitions=partitions, group_id=group_id, fetch_max_wait_ms=fetch_max_wait_ms, fetch_max_bytes=fetch_max_bytes, @@ -2019,7 +1989,6 @@ def subscriber( isolation_level=isolation_level, batch=batch, max_records=max_records, - batch_timeout_ms=batch_timeout_ms, # broker args dependencies=dependencies, parser=parser, diff --git a/faststream/confluent/parser.py b/faststream/confluent/parser.py index 3480aee092..24f3de19ce 100644 --- a/faststream/confluent/parser.py +++ b/faststream/confluent/parser.py @@ -18,7 +18,7 @@ class AsyncConfluentParser: @staticmethod async def parse_message( message: "Message", - ) -> "StreamMessage[Message]": + ) -> KafkaMessage: """Parses a Kafka message.""" headers = _parse_msg_headers(message.headers() or ()) @@ -43,7 +43,7 @@ async def parse_message( @staticmethod async def parse_message_batch( message: Tuple["Message", ...], - ) -> "StreamMessage[Tuple[Message, ...]]": + ) -> KafkaMessage: """Parses a batch of messages from a Kafka consumer.""" body: List[Any] = [] batch_headers: List[Dict[str, str]] = [] diff --git a/faststream/confluent/router.py b/faststream/confluent/router.py index 6cff87009c..ac306a581d 100644 --- a/faststream/confluent/router.py +++ b/faststream/confluent/router.py @@ -30,6 +30,7 @@ SubscriberMiddleware, ) from faststream.confluent.message import KafkaMessage + from faststream.confluent.schemas import TopicPartition from faststream.types import SendableMessage @@ -148,6 +149,8 @@ def __init__( Iterable[KafkaPublisher], Doc("Kafka publishers to broadcast the handler result."), ] = (), + partitions: Sequence["TopicPartition"] = (), + polling_interval: float = 0.1, group_id: Annotated[ Optional[str], Doc( @@ -344,17 +347,6 @@ def __init__( bool, Doc("Whether to consume messages in batches or not."), ] = False, - batch_timeout_ms: Annotated[ - int, - Doc( - """ - Milliseconds spent waiting if - data is not available in the buffer. If 0, returns immediately - with any records that are available currently in the buffer, - else returns empty. - """ - ), - ] = 200, max_records: Annotated[ Optional[int], Doc("Number of messages to consume as one batch."), @@ -422,6 +414,8 @@ def __init__( call, *topics, publishers=publishers, + partitions=partitions, + polling_interval=polling_interval, group_id=group_id, fetch_max_wait_ms=fetch_max_wait_ms, fetch_max_bytes=fetch_max_bytes, @@ -437,7 +431,6 @@ def __init__( heartbeat_interval_ms=heartbeat_interval_ms, isolation_level=isolation_level, max_records=max_records, - batch_timeout_ms=batch_timeout_ms, batch=batch, # basic args dependencies=dependencies, diff --git a/faststream/confluent/schemas/__init__.py b/faststream/confluent/schemas/__init__.py index e69de29bb2..bc15a9aae7 100644 --- a/faststream/confluent/schemas/__init__.py +++ b/faststream/confluent/schemas/__init__.py @@ -0,0 +1,3 @@ +from faststream.confluent.schemas.partition import TopicPartition + +__all__ = ("TopicPartition",) diff --git a/faststream/confluent/schemas/params.py b/faststream/confluent/schemas/params.py index 78382ca3a4..91cb83591a 100644 --- a/faststream/confluent/schemas/params.py +++ b/faststream/confluent/schemas/params.py @@ -1,5 +1,4 @@ -from asyncio import AbstractEventLoop -from typing import List, Literal, Optional, Union +from typing import List, Literal, Union from typing_extensions import TypedDict @@ -8,7 +7,6 @@ class ConsumerConnectionParams(TypedDict, total=False): """A class to represent the connection parameters for a consumer.""" bootstrap_servers: Union[str, List[str]] - loop: Optional[AbstractEventLoop] client_id: str retry_backoff_ms: int metadata_max_age_ms: int diff --git a/faststream/confluent/schemas/partition.py b/faststream/confluent/schemas/partition.py new file mode 100644 index 0000000000..e24a5608a3 --- /dev/null +++ b/faststream/confluent/schemas/partition.py @@ -0,0 +1,39 @@ +from typing import Optional + +from confluent_kafka import TopicPartition as ConfluentPartition + + +class TopicPartition: + __slots__ = ( + "topic", + "partition", + "offset", + "metadata", + "leader_epoch", + ) + + def __init__( + self, + topic: str, + partition: int = -1, + offset: int = -1001, + metadata: Optional[str] = None, + leader_epoch: Optional[int] = None, + ) -> None: + self.topic = topic + self.partition = partition + self.offset = offset + self.metadata = metadata + self.leader_epoch = leader_epoch + + def to_confluent(self) -> ConfluentPartition: + kwargs = { + "topic": self.topic, + "partition": self.partition, + "offset": self.offset, + } + if self.metadata is not None: + kwargs["metadata"] = self.metadata + if self.leader_epoch is not None: + kwargs["leader_epoch"] = self.leader_epoch + return ConfluentPartition(**kwargs) # type: ignore[arg-type] diff --git a/faststream/confluent/subscriber/factory.py b/faststream/confluent/subscriber/factory.py index f1d001b888..dcb7e414b3 100644 --- a/faststream/confluent/subscriber/factory.py +++ b/faststream/confluent/subscriber/factory.py @@ -3,6 +3,7 @@ Iterable, Literal, Optional, + Sequence, Tuple, Union, overload, @@ -18,14 +19,16 @@ from fast_depends.dependencies import Depends from faststream.broker.types import BrokerMiddleware + from faststream.confluent.schemas import TopicPartition from faststream.types import AnyDict @overload def create_subscriber( *topics: str, + partitions: Sequence["TopicPartition"], + polling_interval: float, batch: Literal[True], - batch_timeout_ms: int, max_records: Optional[int], # Kafka information group_id: Optional[str], @@ -47,8 +50,9 @@ def create_subscriber( @overload def create_subscriber( *topics: str, + partitions: Sequence["TopicPartition"], + polling_interval: float, batch: Literal[False], - batch_timeout_ms: int, max_records: Optional[int], # Kafka information group_id: Optional[str], @@ -70,8 +74,9 @@ def create_subscriber( @overload def create_subscriber( *topics: str, + partitions: Sequence["TopicPartition"], + polling_interval: float, batch: bool, - batch_timeout_ms: int, max_records: Optional[int], # Kafka information group_id: Optional[str], @@ -97,8 +102,9 @@ def create_subscriber( def create_subscriber( *topics: str, + partitions: Sequence["TopicPartition"], + polling_interval: float, batch: bool, - batch_timeout_ms: int, max_records: Optional[int], # Kafka information group_id: Optional[str], @@ -123,7 +129,8 @@ def create_subscriber( if batch: return AsyncAPIBatchSubscriber( *topics, - batch_timeout_ms=batch_timeout_ms, + partitions=partitions, + polling_interval=polling_interval, max_records=max_records, group_id=group_id, connection_data=connection_data, @@ -140,6 +147,8 @@ def create_subscriber( else: return AsyncAPIDefaultSubscriber( *topics, + partitions=partitions, + polling_interval=polling_interval, group_id=group_id, connection_data=connection_data, is_manual=is_manual, diff --git a/faststream/confluent/subscriber/usecase.py b/faststream/confluent/subscriber/usecase.py index dde949848f..f0f7c18a3a 100644 --- a/faststream/confluent/subscriber/usecase.py +++ b/faststream/confluent/subscriber/usecase.py @@ -6,6 +6,7 @@ Callable, Dict, Iterable, + List, Optional, Sequence, Tuple, @@ -19,6 +20,7 @@ from faststream.broker.subscriber.usecase import SubscriberUsecase from faststream.broker.types import MsgType from faststream.confluent.parser import AsyncConfluentParser +from faststream.confluent.schemas import TopicPartition if TYPE_CHECKING: from fast_depends.dependencies import Depends @@ -49,6 +51,8 @@ class LogicSubscriber(ABC, SubscriberUsecase[MsgType]): def __init__( self, *topics: str, + partitions: Sequence["TopicPartition"], + polling_interval: float, # Kafka information group_id: Optional[str], connection_data: "AnyDict", @@ -81,16 +85,20 @@ def __init__( include_in_schema=include_in_schema, ) + self.__connection_data = connection_data + self.group_id = group_id self.topics = topics + self.partitions = partitions self.is_manual = is_manual - self.builder = None + self.consumer = None self.task = None + self.polling_interval = polling_interval # Setup it later self.client_id = "" - self.__connection_data = connection_data + self.builder = None @override def setup( # type: ignore[override] @@ -135,6 +143,7 @@ async def start(self) -> None: self.consumer = consumer = self.builder( *self.topics, + partitions=self.partitions, group_id=self.group_id, client_id=self.client_id, **self.__connection_data, @@ -196,12 +205,22 @@ async def _consume(self) -> None: if msg is not None: await self.consume(msg) # type: ignore[arg-type] + @property + def topic_names(self) -> List[str]: + if self.topics: + return list(self.topics) + else: + return [f"{p.topic}-{p.partition}" for p in self.partitions] + @staticmethod def get_routing_hash(topics: Iterable[str], group_id: Optional[str] = None) -> int: return hash("".join((*topics, group_id or ""))) def __hash__(self) -> int: - return self.get_routing_hash(self.topics, self.group_id) + return self.get_routing_hash( + topics=self.topic_names, + group_id=self.group_id, + ) @staticmethod def build_log_context( @@ -218,12 +237,25 @@ def build_log_context( def add_prefix(self, prefix: str) -> None: self.topics = tuple("".join((prefix, t)) for t in self.topics) + self.partitions = [ + TopicPartition( + topic="".join((prefix, p.topic)), + partition=p.partition, + offset=p.offset, + metadata=p.metadata, + leader_epoch=p.leader_epoch, + ) + for p in self.partitions + ] + class DefaultSubscriber(LogicSubscriber[Message]): def __init__( self, *topics: str, # Kafka information + partitions: Sequence["TopicPartition"], + polling_interval: float, group_id: Optional[str], connection_data: "AnyDict", is_manual: bool, @@ -240,6 +272,8 @@ def __init__( ) -> None: super().__init__( *topics, + partitions=partitions, + polling_interval=polling_interval, group_id=group_id, connection_data=connection_data, is_manual=is_manual, @@ -260,14 +294,14 @@ def __init__( async def get_msg(self) -> Optional["Message"]: assert self.consumer, "You should setup subscriber at first." # nosec B101 - return await self.consumer.getone() + return await self.consumer.getone(timeout=self.polling_interval) def get_log_context( self, message: Optional["StreamMessage[Message]"], ) -> Dict[str, str]: if message is None: - topic = ",".join(self.topics) + topic = ",".join(self.topic_names) else: topic = message.raw_message.topic() or ",".join(self.topics) @@ -282,7 +316,8 @@ class BatchSubscriber(LogicSubscriber[Tuple[Message, ...]]): def __init__( self, *topics: str, - batch_timeout_ms: int, + partitions: Sequence["TopicPartition"], + polling_interval: float, max_records: Optional[int], # Kafka information group_id: Optional[str], @@ -299,11 +334,12 @@ def __init__( description_: Optional[str], include_in_schema: bool, ) -> None: - self.batch_timeout_ms = batch_timeout_ms self.max_records = max_records super().__init__( *topics, + partitions=partitions, + polling_interval=polling_interval, group_id=group_id, connection_data=connection_data, is_manual=is_manual, @@ -326,12 +362,12 @@ async def get_msg(self) -> Optional[Tuple["Message", ...]]: assert self.consumer, "You should setup subscriber at first." # nosec B101 messages = await self.consumer.getmany( - timeout_ms=self.batch_timeout_ms, + timeout=self.polling_interval, max_records=self.max_records, ) - if not messages: # pragma: no cover - await anyio.sleep(self.batch_timeout_ms / 1000) + if not messages: # TODO: why we are sleeping here? + await anyio.sleep(self.polling_interval) return None return messages @@ -341,9 +377,9 @@ def get_log_context( message: Optional["StreamMessage[Tuple[Message, ...]]"], ) -> Dict[str, str]: if message is None: - topic = ",".join(self.topics) + topic = ",".join(self.topic_names) else: - topic = message.raw_message[0].topic() or ",".join(self.topics) + topic = message.raw_message[0].topic() or ",".join(self.topic_names) return self.build_log_context( message=message, diff --git a/faststream/confluent/testing.py b/faststream/confluent/testing.py index 1a00e829c2..8af6903dec 100644 --- a/faststream/confluent/testing.py +++ b/faststream/confluent/testing.py @@ -8,12 +8,14 @@ from faststream.confluent.broker import KafkaBroker from faststream.confluent.publisher.asyncapi import AsyncAPIBatchPublisher from faststream.confluent.publisher.producer import AsyncConfluentFastProducer +from faststream.confluent.schemas import TopicPartition from faststream.confluent.subscriber.asyncapi import AsyncAPIBatchSubscriber from faststream.testing.broker import TestBroker, call_handler if TYPE_CHECKING: from faststream.broker.wrapper.call import HandlerCallWrapper from faststream.confluent.publisher.asyncapi import AsyncAPIPublisher + from faststream.confluent.subscriber.usecase import LogicSubscriber from faststream.types import SendableMessage __all__ = ("TestKafkaBroker",) @@ -36,18 +38,38 @@ def create_publisher_fake_subscriber( broker: KafkaBroker, publisher: "AsyncAPIPublisher[Any]", ) -> "HandlerCallWrapper[Any, Any, Any]": - sub = broker.subscriber( # type: ignore[call-overload,misc] - publisher.topic, - batch=isinstance(publisher, AsyncAPIBatchPublisher), - ) + sub: Optional[Any] = None + for handler in broker._subscribers.values(): + if _is_handler_matches( + handler, topic=publisher.topic, partition=publisher.partition + ): + sub = handler + break + + if sub is None: + if publisher.partition: + tp = TopicPartition( + topic=publisher.topic, partition=publisher.partition + ) + sub = broker.subscriber( + partitions=[tp], + batch=isinstance(publisher, AsyncAPIBatchPublisher), + auto_offset_reset="earliest", + ) + else: + sub = broker.subscriber( + publisher.topic, + batch=isinstance(publisher, AsyncAPIBatchPublisher), + auto_offset_reset="earliest", + ) if not sub.calls: @sub # type: ignore[misc] - def f(msg: Any) -> None: + def publisher_response_subscriber(msg: Any) -> None: pass - broker.setup_subscriber(sub) + broker.setup_subscriber(sub) # type: ignore[arg-type] return sub.calls[0].handler @@ -101,7 +123,7 @@ async def publish( # type: ignore[override] return_value = None for handler in self.broker._subscribers.values(): # pragma: no branch - if topic in handler.topics: + if _is_handler_matches(handler, topic, partition): handle_value = await call_handler( handler=handler, message=[incoming] @@ -130,7 +152,7 @@ async def publish_batch( correlation_id = correlation_id or gen_cor_id() for handler in self.broker._subscribers.values(): # pragma: no branch - if topic in handler.topics: + if _is_handler_matches(handler, topic, partition): messages = ( build_message( message=message, @@ -248,3 +270,17 @@ def _fake_connection(*args: Any, **kwargs: Any) -> AsyncMock: mock.getone.return_value = MagicMock() mock.getmany.return_value = [MagicMock()] return mock + + +def _is_handler_matches( + handler: "LogicSubscriber[Any]", + topic: str, + partition: Optional[int], +) -> bool: + return bool( + any( + p.topic == topic and (partition is None or p.partition == partition) + for p in handler.partitions + ) + or topic in handler.topics + ) diff --git a/faststream/kafka/broker/broker.py b/faststream/kafka/broker/broker.py index 9ce9e08dfe..e8d502e5e7 100644 --- a/faststream/kafka/broker/broker.py +++ b/faststream/kafka/broker/broker.py @@ -1,6 +1,5 @@ import logging from functools import partial -from inspect import Parameter from typing import ( TYPE_CHECKING, Any, @@ -30,6 +29,7 @@ from faststream.kafka.publisher.producer import AioKafkaFastProducer from faststream.kafka.schemas.params import ConsumerConnectionParams from faststream.kafka.security import parse_security +from faststream.types import EMPTY from faststream.utils.data import filter_by_dict Partition = TypeVar("Partition") @@ -477,9 +477,9 @@ def __init__( ] = None, # logging args logger: Annotated[ - Union["LoggerProto", None, object], + Optional["LoggerProto"], Doc("User specified logger to pass into Context and log service messages."), - ] = Parameter.empty, + ] = EMPTY, log_level: Annotated[ int, Doc("Service messages log level."), @@ -594,9 +594,9 @@ async def _close( async def connect( # type: ignore[override] self, bootstrap_servers: Annotated[ - Union[str, Iterable[str], object], + Union[str, Iterable[str]], Doc("Kafka addresses to connect."), - ] = Parameter.empty, + ] = EMPTY, **kwargs: "Unpack[KafkaInitKwargs]", ) -> Callable[..., aiokafka.AIOKafkaConsumer]: """Connect to Kafka servers manually. @@ -604,7 +604,7 @@ async def connect( # type: ignore[override] Consumes the same with `KafkaBroker.__init__` arguments and overrides them. To startup subscribers too you should use `broker.start()` after/instead this method. """ - if bootstrap_servers is not Parameter.empty: + if bootstrap_servers is not EMPTY: connect_kwargs: AnyDict = { **kwargs, "bootstrap_servers": bootstrap_servers, diff --git a/faststream/kafka/broker/logging.py b/faststream/kafka/broker/logging.py index 16b1103b83..e7e534e98b 100644 --- a/faststream/kafka/broker/logging.py +++ b/faststream/kafka/broker/logging.py @@ -1,9 +1,9 @@ import logging -from inspect import Parameter from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, Tuple, Union from faststream.broker.core.usecase import BrokerUsecase from faststream.log.logging import get_broker_logger +from faststream.types import EMPTY if TYPE_CHECKING: import aiokafka @@ -26,7 +26,7 @@ class KafkaLoggingBroker( def __init__( self, *args: Any, - logger: Union["LoggerProto", object, None] = Parameter.empty, + logger: Optional["LoggerProto"] = EMPTY, log_level: int = logging.INFO, log_fmt: Optional[str] = None, **kwargs: Any, diff --git a/faststream/kafka/fastapi/fastapi.py b/faststream/kafka/fastapi/fastapi.py index 9b9aa0cb11..d1ee019b1d 100644 --- a/faststream/kafka/fastapi/fastapi.py +++ b/faststream/kafka/fastapi/fastapi.py @@ -1,5 +1,4 @@ import logging -from inspect import Parameter from typing import ( TYPE_CHECKING, Any, @@ -33,6 +32,7 @@ from faststream.broker.fastapi.router import StreamRouter from faststream.broker.utils import default_filter from faststream.kafka.broker.broker import KafkaBroker as KB +from faststream.types import EMPTY if TYPE_CHECKING: from asyncio import AbstractEventLoop @@ -312,9 +312,9 @@ def __init__( ] = None, # logging args logger: Annotated[ - Union["LoggerProto", None, object], + Optional["LoggerProto"], Doc("User specified logger to pass into Context and log service messages."), - ] = Parameter.empty, + ] = EMPTY, log_level: Annotated[ int, Doc("Service messages log level."), diff --git a/faststream/kafka/parser.py b/faststream/kafka/parser.py index 8e35ed0a02..92ad8c163f 100644 --- a/faststream/kafka/parser.py +++ b/faststream/kafka/parser.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, cast from faststream.broker.message import decode_message, gen_cor_id from faststream.kafka.message import FAKE_CONSUMER, KafkaMessage @@ -97,7 +97,9 @@ async def decode_message( msg: "StreamMessage[Tuple[ConsumerRecord, ...]]", ) -> "DecodedMessage": """Decode a batch of messages.""" + # super() should be here due python can't find it in comprehension + super_obj = cast(AioKafkaParser, super()) + return [ - decode_message(await super(AioKafkaBatchParser, self).parse_message(m)) - for m in msg.raw_message + decode_message(await super_obj.parse_message(m)) for m in msg.raw_message ] diff --git a/faststream/kafka/subscriber/usecase.py b/faststream/kafka/subscriber/usecase.py index e9297d0bb1..c1bcfa6511 100644 --- a/faststream/kafka/subscriber/usecase.py +++ b/faststream/kafka/subscriber/usecase.py @@ -256,23 +256,6 @@ def build_log_context( "message_id": getattr(message, "message_id", ""), } - def get_log_context( - self, - message: Optional["StreamMessage[ConsumerRecord]"], - ) -> Dict[str, str]: - if message is None: - topic = ",".join(self.topic_names) - elif isinstance(message.raw_message, Sequence): - topic = message.raw_message[0].topic - else: - topic = message.raw_message.topic - - return self.build_log_context( - message=message, - topic=topic, - group_id=self.group_id, - ) - def add_prefix(self, prefix: str) -> None: self.topics = tuple("".join((prefix, t)) for t in self.topics) @@ -348,6 +331,21 @@ async def get_msg(self) -> "ConsumerRecord": assert self.consumer, "You should setup subscriber at first." # nosec B101 return await self.consumer.getone() + def get_log_context( + self, + message: Optional["StreamMessage[ConsumerRecord]"], + ) -> Dict[str, str]: + if message is None: + topic = ",".join(self.topic_names) + else: + topic = message.raw_message.topic + + return self.build_log_context( + message=message, + topic=topic, + group_id=self.group_id, + ) + class BatchSubscriber(LogicSubscriber[Tuple["ConsumerRecord", ...]]): def __init__( @@ -428,3 +426,18 @@ async def get_msg(self) -> Tuple["ConsumerRecord", ...]: return () return tuple(chain(*messages.values())) + + def get_log_context( + self, + message: Optional["StreamMessage[Tuple[ConsumerRecord, ...]]"], + ) -> Dict[str, str]: + if message is None: + topic = ",".join(self.topic_names) + else: + topic = message.raw_message[0].topic + + return self.build_log_context( + message=message, + topic=topic, + group_id=self.group_id, + ) diff --git a/faststream/kafka/testing.py b/faststream/kafka/testing.py index 4a52be016f..d8b585252f 100755 --- a/faststream/kafka/testing.py +++ b/faststream/kafka/testing.py @@ -55,7 +55,7 @@ def create_publisher_fake_subscriber( if not sub.calls: @sub # type: ignore[misc] - def f(msg: Any) -> None: + def publisher_response_subscriber(msg: Any) -> None: pass broker.setup_subscriber(sub) diff --git a/faststream/nats/broker/broker.py b/faststream/nats/broker/broker.py index 74fa1f0bdf..f32b9ddd13 100644 --- a/faststream/nats/broker/broker.py +++ b/faststream/nats/broker/broker.py @@ -1,6 +1,5 @@ import logging import warnings -from inspect import Parameter from typing import ( TYPE_CHECKING, Any, @@ -38,6 +37,7 @@ from faststream.nats.publisher.producer import NatsFastProducer, NatsJSFastProducer from faststream.nats.security import parse_security from faststream.nats.subscriber.asyncapi import AsyncAPISubscriber +from faststream.types import EMPTY if TYPE_CHECKING: import ssl @@ -417,9 +417,9 @@ def __init__( ] = None, # logging args logger: Annotated[ - Union["LoggerProto", None, object], + Optional["LoggerProto"], Doc("User specified logger to pass into Context and log service messages."), - ] = Parameter.empty, + ] = EMPTY, log_level: Annotated[ int, Doc("Service messages log level."), @@ -555,16 +555,16 @@ def __init__( async def connect( # type: ignore[override] self, servers: Annotated[ - Union[str, Iterable[str], object], + Union[str, Iterable[str]], Doc("NATS cluster addresses to connect."), - ] = Parameter.empty, + ] = EMPTY, **kwargs: "Unpack[NatsInitKwargs]", ) -> "Client": """Connect broker object to NATS cluster. To startup subscribers too you should use `broker.start()` after/instead this method. """ - if servers is not Parameter.empty: + if servers is not EMPTY: connect_kwargs: AnyDict = { **kwargs, "servers": servers, diff --git a/faststream/nats/broker/logging.py b/faststream/nats/broker/logging.py index 23bf41779b..5e2572ddcb 100644 --- a/faststream/nats/broker/logging.py +++ b/faststream/nats/broker/logging.py @@ -1,12 +1,12 @@ import logging -from inspect import Parameter -from typing import TYPE_CHECKING, Any, ClassVar, Optional, Union +from typing import TYPE_CHECKING, Any, ClassVar, Optional from nats.aio.client import Client from nats.aio.msg import Msg from faststream.broker.core.usecase import BrokerUsecase from faststream.log.logging import get_broker_logger +from faststream.types import EMPTY if TYPE_CHECKING: from faststream.types import LoggerProto @@ -22,7 +22,7 @@ class NatsLoggingBroker(BrokerUsecase[Msg, Client]): def __init__( self, *args: Any, - logger: Union["LoggerProto", object, None] = Parameter.empty, + logger: Optional["LoggerProto"] = EMPTY, log_level: int = logging.INFO, log_fmt: Optional[str] = None, **kwargs: Any, diff --git a/faststream/nats/fastapi/fastapi.py b/faststream/nats/fastapi/fastapi.py index 4010c5f02a..5f5dfe3281 100644 --- a/faststream/nats/fastapi/fastapi.py +++ b/faststream/nats/fastapi/fastapi.py @@ -1,5 +1,4 @@ import logging -from inspect import Parameter from typing import ( TYPE_CHECKING, Any, @@ -39,6 +38,7 @@ from faststream.nats.broker import NatsBroker from faststream.nats.publisher.asyncapi import AsyncAPIPublisher from faststream.nats.subscriber.asyncapi import AsyncAPISubscriber +from faststream.types import EMPTY if TYPE_CHECKING: import ssl @@ -265,9 +265,9 @@ def __init__( ] = None, # logging args logger: Annotated[ - Union["LoggerProto", None, object], + Optional["LoggerProto"], Doc("User specified logger to pass into Context and log service messages."), - ] = Parameter.empty, + ] = EMPTY, log_level: Annotated[ int, Doc("Service messages log level."), diff --git a/faststream/nats/subscriber/usecase.py b/faststream/nats/subscriber/usecase.py index 4370f70e1a..85c133770c 100644 --- a/faststream/nats/subscriber/usecase.py +++ b/faststream/nats/subscriber/usecase.py @@ -1,7 +1,6 @@ import asyncio from abc import abstractmethod from contextlib import suppress -from functools import cached_property from typing import ( TYPE_CHECKING, Any, @@ -146,7 +145,7 @@ def setup( # type: ignore[override] _call_decorators=_call_decorators, ) - @cached_property + @property def clear_subject(self) -> str: """Compile `test.{name}` to `test.*` subject.""" _, path = compile_nats_wildcard(self.subject) @@ -215,7 +214,7 @@ def add_prefix(self, prefix: str) -> None: for subject in (self.config.filter_subjects or ()) ] - @cached_property + @property def _resolved_subject_string(self) -> str: return self.subject or ", ".join(self.config.filter_subjects or ()) diff --git a/faststream/nats/testing.py b/faststream/nats/testing.py index 4d13333c5f..b998d26b43 100644 --- a/faststream/nats/testing.py +++ b/faststream/nats/testing.py @@ -33,7 +33,7 @@ def create_publisher_fake_subscriber( if not sub.calls: @sub - def f(msg: Any) -> None: + def publisher_response_subscriber(msg: Any) -> None: pass broker.setup_subscriber(sub) diff --git a/faststream/rabbit/broker/broker.py b/faststream/rabbit/broker/broker.py index c12ec28bae..25b2e52085 100644 --- a/faststream/rabbit/broker/broker.py +++ b/faststream/rabbit/broker/broker.py @@ -1,5 +1,4 @@ import logging -from inspect import Parameter from typing import ( TYPE_CHECKING, Any, @@ -31,6 +30,7 @@ from faststream.rabbit.security import parse_security from faststream.rabbit.subscriber.asyncapi import AsyncAPISubscriber from faststream.rabbit.utils import build_url +from faststream.types import EMPTY if TYPE_CHECKING: from ssl import SSLContext @@ -102,6 +102,16 @@ def __init__( "TimeoutType", Doc("Connection establishement timeout."), ] = None, + fail_fast: Annotated[ + bool, + Doc( + "Broker startup raises `AMQPConnectionError` if RabbitMQ is unreachable." + ), + ] = True, + reconnect_interval: Annotated[ + "TimeoutType", + Doc("Time to sleep between reconnection attempts."), + ] = 5.0, # channel args channel_number: Annotated[ Optional[int], @@ -186,9 +196,9 @@ def __init__( ] = None, # logging args logger: Annotated[ - Union["LoggerProto", None, object], + Optional["LoggerProto"], Doc("User specified logger to pass into Context and log service messages."), - ] = Parameter.empty, + ] = EMPTY, log_level: Annotated[ int, Doc("Service messages log level."), @@ -242,6 +252,8 @@ def __init__( url=str(amqp_url), ssl_context=security_args.get("ssl_context"), timeout=timeout, + fail_fast=fail_fast, + reconnect_interval=reconnect_interval, # channel args channel_number=channel_number, publisher_confirms=publisher_confirms, @@ -298,8 +310,9 @@ def _publisher_setup_extra(self) -> "AnyDict": async def connect( # type: ignore[override] self, url: Annotated[ - Union[str, "URL", object], Doc("RabbitMQ destination location to connect.") - ] = Parameter.empty, + Union[str, "URL", None], + Doc("RabbitMQ destination location to connect."), + ] = EMPTY, *, host: Annotated[ Optional[str], @@ -331,26 +344,36 @@ async def connect( # type: ignore[override] "TimeoutType", Doc("Connection establishement timeout."), ] = None, + fail_fast: Annotated[ + bool, + Doc( + "Broker startup raises `AMQPConnectionError` if RabbitMQ is unreachable." + ), + ] = EMPTY, + reconnect_interval: Annotated[ + "TimeoutType", + Doc("Time to sleep between reconnection attempts."), + ] = EMPTY, # channel args channel_number: Annotated[ - Union[int, None, object], + Optional[int], Doc("Specify the channel number explicit."), - ] = Parameter.empty, + ] = EMPTY, publisher_confirms: Annotated[ - Union[bool, object], + bool, Doc( "if `True` the `publish` method will " "return `bool` type after publish is complete." "Otherwise it will returns `None`." ), - ] = Parameter.empty, + ] = EMPTY, on_return_raises: Annotated[ - Union[bool, object], + bool, Doc( "raise an :class:`aio_pika.exceptions.DeliveryError`" "when mandatory message will be returned" ), - ] = Parameter.empty, + ] = EMPTY, ) -> "RobustConnection": """Connect broker object to RabbitMQ. @@ -358,19 +381,25 @@ async def connect( # type: ignore[override] """ kwargs: AnyDict = {} - if channel_number is not Parameter.empty: + if channel_number is not EMPTY: kwargs["channel_number"] = channel_number - if publisher_confirms is not Parameter.empty: + if publisher_confirms is not EMPTY: kwargs["publisher_confirms"] = publisher_confirms - if on_return_raises is not Parameter.empty: + if on_return_raises is not EMPTY: kwargs["on_return_raises"] = on_return_raises if timeout: kwargs["timeout"] = timeout - url = None if url is Parameter.empty else cast(Union[str, "URL"], url) + if fail_fast is not EMPTY: + kwargs["fail_fast"] = fail_fast + + if reconnect_interval is not EMPTY: + kwargs["reconnect_interval"] = reconnect_interval + + url = None if url is EMPTY else url if url or any( (host, port, virtualhost, ssl_options, client_properties, security) @@ -401,8 +430,11 @@ async def _connect( # type: ignore[override] self, url: str, *, + fail_fast: bool, + reconnect_interval: "TimeoutType", timeout: "TimeoutType", ssl_context: Optional["SSLContext"], + # channel args channel_number: Optional[int], publisher_confirms: bool, on_return_raises: bool, @@ -413,6 +445,8 @@ async def _connect( # type: ignore[override] url, timeout=timeout, ssl_context=ssl_context, + reconnect_interval=reconnect_interval, + fail_fast=fail_fast, ), ) diff --git a/faststream/rabbit/broker/logging.py b/faststream/rabbit/broker/logging.py index 21af975df4..738254b36e 100644 --- a/faststream/rabbit/broker/logging.py +++ b/faststream/rabbit/broker/logging.py @@ -1,11 +1,11 @@ import logging -from inspect import Parameter -from typing import TYPE_CHECKING, Any, ClassVar, Optional, Union +from typing import TYPE_CHECKING, Any, ClassVar, Optional from aio_pika import IncomingMessage, RobustConnection from faststream.broker.core.usecase import BrokerUsecase from faststream.log.logging import get_broker_logger +from faststream.types import EMPTY if TYPE_CHECKING: from faststream.types import LoggerProto @@ -21,7 +21,7 @@ class RabbitLoggingBroker(BrokerUsecase[IncomingMessage, RobustConnection]): def __init__( self, *args: Any, - logger: Union["LoggerProto", object, None] = Parameter.empty, + logger: Optional["LoggerProto"] = EMPTY, log_level: int = logging.INFO, log_fmt: Optional[str] = None, **kwargs: Any, diff --git a/faststream/rabbit/fastapi/router.py b/faststream/rabbit/fastapi/router.py index 71342b8787..2d155ee973 100644 --- a/faststream/rabbit/fastapi/router.py +++ b/faststream/rabbit/fastapi/router.py @@ -1,5 +1,4 @@ import logging -from inspect import Parameter from typing import ( TYPE_CHECKING, Any, @@ -31,6 +30,7 @@ RabbitQueue, ) from faststream.rabbit.subscriber.asyncapi import AsyncAPISubscriber +from faststream.types import EMPTY if TYPE_CHECKING: from enum import Enum @@ -96,6 +96,16 @@ def __init__( "TimeoutType", Doc("Connection establishement timeout."), ] = None, + fail_fast: Annotated[ + bool, + Doc( + "Broker startup raises `AMQPConnectionError` if RabbitMQ is unreachable." + ), + ] = True, + reconnect_interval: Annotated[ + "TimeoutType", + Doc("Time to sleep between reconnection attempts."), + ] = 5.0, # channel args channel_number: Annotated[ Optional[int], @@ -176,9 +186,9 @@ def __init__( ] = None, # logging args logger: Annotated[ - Union["LoggerProto", None, object], + Optional["LoggerProto"], Doc("User specified logger to pass into Context and log service messages."), - ] = Parameter.empty, + ] = EMPTY, log_level: Annotated[ int, Doc("Service messages log level."), @@ -423,6 +433,8 @@ def __init__( ssl_options=ssl_options, client_properties=client_properties, timeout=timeout, + fail_fast=fail_fast, + reconnect_interval=reconnect_interval, max_consumers=max_consumers, app_id=app_id, graceful_timeout=graceful_timeout, diff --git a/faststream/rabbit/testing.py b/faststream/rabbit/testing.py index ec57d7a17d..81df26f45f 100644 --- a/faststream/rabbit/testing.py +++ b/faststream/rabbit/testing.py @@ -66,7 +66,7 @@ def create_publisher_fake_subscriber( if not sub.calls: @sub - def f(msg: Any) -> None: + def publisher_response_subscriber(msg: Any) -> None: pass broker.setup_subscriber(sub) diff --git a/faststream/redis/broker/broker.py b/faststream/redis/broker/broker.py index 0eba0f8f5f..e46d9cd067 100644 --- a/faststream/redis/broker/broker.py +++ b/faststream/redis/broker/broker.py @@ -1,6 +1,5 @@ import logging from functools import partial -from inspect import Parameter from typing import ( TYPE_CHECKING, Any, @@ -33,6 +32,7 @@ from faststream.redis.broker.registrator import RedisRegistrator from faststream.redis.publisher.producer import RedisFastProducer from faststream.redis.security import parse_security +from faststream.types import EMPTY if TYPE_CHECKING: from types import TracebackType @@ -94,12 +94,11 @@ class RedisBroker( def __init__( self, url: str = "redis://localhost:6379", - polling_interval: Optional[float] = None, *, - host: Union[str, object] = Parameter.empty, - port: Union[str, int, object] = Parameter.empty, - db: Union[str, int, object] = Parameter.empty, - connection_class: Union[Type["Connection"], object] = Parameter.empty, + host: str = EMPTY, + port: Union[str, int] = EMPTY, + db: Union[str, int] = EMPTY, + connection_class: Type["Connection"] = EMPTY, client_name: Optional[str] = None, health_check_interval: float = 0, max_connections: Optional[int] = None, @@ -167,9 +166,9 @@ def __init__( ] = None, # logging args logger: Annotated[ - Union["LoggerProto", None, object], + Optional["LoggerProto"], Doc("User specified logger to pass into Context and log service messages."), - ] = Parameter.empty, + ] = EMPTY, log_level: Annotated[ int, Doc("Service messages log level."), @@ -196,7 +195,6 @@ def __init__( Doc("Any custom decorator to apply to wrapped functions."), ] = (), ) -> None: - self.global_polling_interval = polling_interval self._producer = None if asyncapi_url is None: @@ -255,11 +253,11 @@ def __init__( @override async def connect( # type: ignore[override] self, - url: Union[str, None, object] = Parameter.empty, + url: Optional[str] = EMPTY, **kwargs: "Unpack[RedisInitKwargs]", ) -> "Redis[bytes]": """Connect to the Redis server.""" - if url is not Parameter.empty: + if url is not EMPTY: connect_kwargs: AnyDict = { "url": url, **kwargs, @@ -274,10 +272,10 @@ async def _connect( # type: ignore[override] self, url: str, *, - host: Union[str, object], - port: Union[str, int, object], - db: Union[str, int, object], - connection_class: Union[Type["Connection"], object], + host: str, + port: Union[str, int], + db: Union[str, int], + connection_class: Type["Connection"], client_name: Optional[str], health_check_interval: float, max_connections: Optional[int], @@ -314,13 +312,13 @@ async def _connect( # type: ignore[override] "encoder_class": encoder_class, } - if port is not Parameter.empty: + if port is not EMPTY: url_options["port"] = port - if host is not Parameter.empty: + if host is not EMPTY: url_options["host"] = host - if db is not Parameter.empty: + if db is not EMPTY: url_options["db"] = db - if connection_class is not Parameter.empty: + if connection_class is not EMPTY: url_options["connection_class"] = connection_class pool = ConnectionPool( diff --git a/faststream/redis/broker/logging.py b/faststream/redis/broker/logging.py index e488d5968d..b4d94af615 100644 --- a/faststream/redis/broker/logging.py +++ b/faststream/redis/broker/logging.py @@ -1,10 +1,10 @@ import logging -from inspect import Parameter -from typing import TYPE_CHECKING, Any, ClassVar, Optional, Union +from typing import TYPE_CHECKING, Any, ClassVar, Optional from faststream.broker.core.usecase import BrokerUsecase from faststream.log.logging import get_broker_logger from faststream.redis.message import UnifyRedisDict +from faststream.types import EMPTY if TYPE_CHECKING: from redis.asyncio.client import Redis # noqa: F401 @@ -21,7 +21,7 @@ class RedisLoggingBroker(BrokerUsecase[UnifyRedisDict, "Redis[bytes]"]): def __init__( self, *args: Any, - logger: Union["LoggerProto", object, None] = Parameter.empty, + logger: Optional["LoggerProto"] = EMPTY, log_level: int = logging.INFO, log_fmt: Optional[str] = None, **kwargs: Any, diff --git a/faststream/redis/fastapi/fastapi.py b/faststream/redis/fastapi/fastapi.py index 7a8de1b18d..8521f0a433 100644 --- a/faststream/redis/fastapi/fastapi.py +++ b/faststream/redis/fastapi/fastapi.py @@ -1,5 +1,4 @@ import logging -from inspect import Parameter from typing import ( TYPE_CHECKING, Any, @@ -35,6 +34,7 @@ from faststream.redis.publisher.asyncapi import AsyncAPIPublisher from faststream.redis.schemas import ListSub, PubSub, StreamSub from faststream.redis.subscriber.asyncapi import AsyncAPISubscriber +from faststream.types import EMPTY if TYPE_CHECKING: from enum import Enum @@ -67,12 +67,11 @@ class RedisRouter(StreamRouter[UnifyRedisDict]): def __init__( self, url: str = "redis://localhost:6379", - polling_interval: Optional[float] = None, *, - host: Union[str, object] = Parameter.empty, - port: Union[str, int, object] = Parameter.empty, - db: Union[str, int, object] = Parameter.empty, - connection_class: Union[Type["Connection"], object] = Parameter.empty, + host: str = EMPTY, + port: Union[str, int] = EMPTY, + db: Union[str, int] = EMPTY, + connection_class: Type["Connection"] = EMPTY, client_name: Optional[str] = SERVICE_NAME, health_check_interval: float = 0, max_connections: Optional[int] = None, @@ -138,7 +137,7 @@ def __init__( logger: Annotated[ Union["LoggerProto", None, object], Doc("User specified logger to pass into Context and log service messages."), - ] = Parameter.empty, + ] = EMPTY, log_level: Annotated[ int, Doc("Service messages log level."), @@ -377,7 +376,6 @@ def __init__( ) -> None: super().__init__( url=url, - polling_interval=polling_interval, host=host, port=port, db=db, diff --git a/faststream/redis/subscriber/usecase.py b/faststream/redis/subscriber/usecase.py index 67325098be..ca9719747a 100644 --- a/faststream/redis/subscriber/usecase.py +++ b/faststream/redis/subscriber/usecase.py @@ -157,9 +157,7 @@ async def start( # type: ignore[override] await super().start() start_signal = anyio.Event() - self.task = asyncio.create_task( - self._consume(*args, start_signal=start_signal) - ) + self.task = asyncio.create_task(self._consume(*args, start_signal=start_signal)) with anyio.fail_after(3.0): await start_signal.wait() diff --git a/faststream/redis/testing.py b/faststream/redis/testing.py index c43033e30f..2a625b80d6 100644 --- a/faststream/redis/testing.py +++ b/faststream/redis/testing.py @@ -43,7 +43,7 @@ def create_publisher_fake_subscriber( if not sub.calls: @sub - def f(msg: Any) -> None: + def publisher_response_subscriber(msg: Any) -> None: pass broker.setup_subscriber(sub) diff --git a/faststream/security.py b/faststream/security.py index 6a959da45e..4bcd4eba99 100644 --- a/faststream/security.py +++ b/faststream/security.py @@ -159,10 +159,7 @@ class SASLOAuthBearer(BaseSecurity): This class defines basic security configuration for SASL/OAUTHBEARER authentication. """ - __slots__ = ( - "use_ssl", - "ssl_context" - ) + __slots__ = ("use_ssl", "ssl_context") def get_requirement(self) -> List["AnyDict"]: """Get the security requirements for SASL/OAUTHBEARER authentication.""" @@ -179,10 +176,7 @@ class SASLGSSAPI(BaseSecurity): This class defines security configuration for SASL/GSSAPI authentication. """ - __slots__ = ( - "use_ssl", - "ssl_context" - ) + __slots__ = ("use_ssl", "ssl_context") def get_requirement(self) -> List["AnyDict"]: """Get the security requirements for SASL/GSSAPI authentication.""" diff --git a/faststream/testing/broker.py b/faststream/testing/broker.py index 07ee03dee7..813cd818aa 100644 --- a/faststream/testing/broker.py +++ b/faststream/testing/broker.py @@ -70,7 +70,6 @@ def __init__( async def __aenter__(self) -> Broker: # TODO: remove useless middlewares filter - middlewares = tuple( filter( lambda x: not isinstance(x, CriticalLogMiddleware), @@ -148,6 +147,10 @@ def _patch_broker(cls, broker: Broker) -> Generator[None, None, None]: broker, "_producer", new=None, + ), mock.patch.object( + broker, + "ping", + return_value=True, ): yield diff --git a/faststream/types.py b/faststream/types.py index 681a7a3b18..2dfca177d0 100644 --- a/faststream/types.py +++ b/faststream/types.py @@ -107,3 +107,6 @@ def log( exc_info: Any = None, extra: Optional[Mapping[str, Any]] = None, ) -> None: ... + + +EMPTY: Any = object() diff --git a/faststream/utils/context/builders.py b/faststream/utils/context/builders.py index 2ee5bb01e5..76e7499ba3 100644 --- a/faststream/utils/context/builders.py +++ b/faststream/utils/context/builders.py @@ -1,6 +1,6 @@ -from inspect import Parameter from typing import Any, Callable, Optional +from faststream.types import EMPTY from faststream.utils.context.types import Context as Context_ @@ -8,7 +8,7 @@ def Context( # noqa: N802 real_name: str = "", *, cast: bool = False, - default: Any = Parameter.empty, + default: Any = EMPTY, initial: Optional[Callable[..., Any]] = None, ) -> Any: return Context_( @@ -23,7 +23,7 @@ def Header( # noqa: N802 real_name: str = "", *, cast: bool = True, - default: Any = Parameter.empty, + default: Any = EMPTY, ) -> Any: return Context_( real_name=real_name, @@ -37,7 +37,7 @@ def Path( # noqa: N802 real_name: str = "", *, cast: bool = True, - default: Any = Parameter.empty, + default: Any = EMPTY, ) -> Any: return Context_( real_name=real_name, diff --git a/faststream/utils/context/repository.py b/faststream/utils/context/repository.py index eb6404b6bb..af90b9a197 100644 --- a/faststream/utils/context/repository.py +++ b/faststream/utils/context/repository.py @@ -1,9 +1,8 @@ from contextlib import contextmanager from contextvars import ContextVar, Token -from inspect import Parameter from typing import Any, Dict, Iterator, Mapping -from faststream.types import AnyDict +from faststream.types import EMPTY, AnyDict from faststream.utils.classes import Singleton __all__ = ("ContextRepo", "context") @@ -128,7 +127,7 @@ def get(self, key: str, default: Any = None) -> Any: Returns: The value associated with the key. """ - if (glob := self._global_context.get(key, Parameter.empty)) is Parameter.empty: + if (glob := self._global_context.get(key, EMPTY)) is EMPTY: return self.get_local(key, default) else: return glob @@ -158,7 +157,7 @@ def resolve(self, argument: str) -> Any: """ first, *keys = argument.split(".") - if (v := self.get(first, Parameter.empty)) is Parameter.empty: + if (v := self.get(first, EMPTY)) is EMPTY: raise KeyError(f"`{self.context}` does not contains `{first}` key") for i in keys: diff --git a/faststream/utils/context/types.py b/faststream/utils/context/types.py index f27d6fe77c..38f7b00678 100644 --- a/faststream/utils/context/types.py +++ b/faststream/utils/context/types.py @@ -1,9 +1,8 @@ -from inspect import Parameter from typing import Any, Callable, Optional from fast_depends.library import CustomField -from faststream.types import AnyDict +from faststream.types import EMPTY, AnyDict from faststream.utils.context.repository import context @@ -24,7 +23,7 @@ def __init__( self, real_name: str = "", *, - default: Any = Parameter.empty, + default: Any = EMPTY, initial: Optional[Callable[..., Any]] = None, cast: bool = False, prefix: str = "", @@ -47,7 +46,7 @@ def __init__( self.initial = initial super().__init__( cast=cast, - required=(default is Parameter.empty), + required=(default is EMPTY), ) def use(self, /, **kwargs: Any) -> AnyDict: @@ -67,7 +66,7 @@ def use(self, /, **kwargs: Any) -> AnyDict: default=self.default, initial=self.initial, ) - ) is not Parameter.empty: + ) is not EMPTY: kwargs[self.param_name] = v return kwargs @@ -78,13 +77,13 @@ def resolve_context_by_name( default: Any, initial: Optional[Callable[..., Any]], ) -> Any: - value: Any = Parameter.empty + value: Any = EMPTY try: value = context.resolve(name) except (KeyError, AttributeError): - if default is not Parameter.empty: + if default is not EMPTY: value = default elif initial is not None: diff --git a/pyproject.toml b/pyproject.toml index 47e1e72c7f..a5658e7f63 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,7 +97,7 @@ devdocs = [ types = [ "faststream[optionals]", - "mypy==1.11.0", + "mypy==1.11.1", # mypy extensions "types-Deprecated", "types-PyYAML", @@ -111,14 +111,14 @@ types = [ lint = [ "faststream[types]", - "ruff==0.5.5", + "ruff==0.5.6", "bandit==1.7.9", - "semgrep==1.81.0", + "semgrep==1.83.0", "codespell==2.3.0", ] test-core = [ - "coverage[toml]==7.6.0", + "coverage[toml]==7.6.1", "pytest==8.3.2", "pytest-asyncio==0.23.8", "dirty-equals==0.7.1.post0", @@ -127,7 +127,7 @@ test-core = [ testing = [ "faststream[test-core]", - "fastapi==0.111.1", + "fastapi==0.112.0", "pydantic-settings>=2.0.0,<3.0.0", "httpx==0.27.0", "PyYAML==6.0.1", diff --git a/tests/docs/__init__.py b/tests/a_docs/__init__.py similarity index 100% rename from tests/docs/__init__.py rename to tests/a_docs/__init__.py diff --git a/tests/docs/confluent/__init__.py b/tests/a_docs/confluent/__init__.py similarity index 100% rename from tests/docs/confluent/__init__.py rename to tests/a_docs/confluent/__init__.py diff --git a/tests/docs/confluent/ack/__init__.py b/tests/a_docs/confluent/ack/__init__.py similarity index 100% rename from tests/docs/confluent/ack/__init__.py rename to tests/a_docs/confluent/ack/__init__.py diff --git a/tests/docs/confluent/ack/test_errors.py b/tests/a_docs/confluent/ack/test_errors.py similarity index 93% rename from tests/docs/confluent/ack/test_errors.py rename to tests/a_docs/confluent/ack/test_errors.py index 8ca7bed5a9..2e17ffd176 100644 --- a/tests/docs/confluent/ack/test_errors.py +++ b/tests/a_docs/confluent/ack/test_errors.py @@ -17,6 +17,6 @@ async def test_ack_exc(): AsyncConfluentConsumer, "commit", spy_decorator(AsyncConfluentConsumer.commit) ) as m: async with TestKafkaBroker(broker, with_real=True), TestApp(app): - await handle.wait_call(10) + await handle.wait_call(20) assert m.mock.call_count diff --git a/tests/docs/confluent/additional_config/__init__.py b/tests/a_docs/confluent/additional_config/__init__.py similarity index 100% rename from tests/docs/confluent/additional_config/__init__.py rename to tests/a_docs/confluent/additional_config/__init__.py diff --git a/tests/docs/confluent/additional_config/test_app.py b/tests/a_docs/confluent/additional_config/test_app.py similarity index 100% rename from tests/docs/confluent/additional_config/test_app.py rename to tests/a_docs/confluent/additional_config/test_app.py diff --git a/tests/docs/confluent/basic/__init__.py b/tests/a_docs/confluent/basic/__init__.py similarity index 100% rename from tests/docs/confluent/basic/__init__.py rename to tests/a_docs/confluent/basic/__init__.py diff --git a/tests/docs/confluent/basic/test_basic.py b/tests/a_docs/confluent/basic/test_basic.py similarity index 100% rename from tests/docs/confluent/basic/test_basic.py rename to tests/a_docs/confluent/basic/test_basic.py diff --git a/tests/docs/confluent/basic/test_cmd_run.py b/tests/a_docs/confluent/basic/test_cmd_run.py similarity index 80% rename from tests/docs/confluent/basic/test_cmd_run.py rename to tests/a_docs/confluent/basic/test_cmd_run.py index bd06a60168..502e048a4f 100644 --- a/tests/docs/confluent/basic/test_cmd_run.py +++ b/tests/a_docs/confluent/basic/test_cmd_run.py @@ -8,13 +8,18 @@ from faststream.cli.main import cli +@pytest.fixture() +def confluent_basic_project(): + return "docs.docs_src.confluent.basic.basic:app" + + @pytest.mark.confluent() def test_run_cmd( runner: CliRunner, mock: Mock, event: asyncio.Event, monkeypatch: pytest.MonkeyPatch, - kafka_basic_project, + confluent_basic_project, ): async def patched_run(self: FastStream, *args, **kwargs): await self.start() @@ -27,7 +32,7 @@ async def patched_run(self: FastStream, *args, **kwargs): cli, [ "run", - kafka_basic_project, + confluent_basic_project, ], ) diff --git a/tests/docs/confluent/batch_consuming_pydantic/__init__.py b/tests/a_docs/confluent/batch_consuming_pydantic/__init__.py similarity index 100% rename from tests/docs/confluent/batch_consuming_pydantic/__init__.py rename to tests/a_docs/confluent/batch_consuming_pydantic/__init__.py diff --git a/tests/docs/confluent/batch_consuming_pydantic/test_app.py b/tests/a_docs/confluent/batch_consuming_pydantic/test_app.py similarity index 100% rename from tests/docs/confluent/batch_consuming_pydantic/test_app.py rename to tests/a_docs/confluent/batch_consuming_pydantic/test_app.py diff --git a/tests/docs/confluent/consumes_basics/__init__.py b/tests/a_docs/confluent/consumes_basics/__init__.py similarity index 100% rename from tests/docs/confluent/consumes_basics/__init__.py rename to tests/a_docs/confluent/consumes_basics/__init__.py diff --git a/tests/docs/confluent/consumes_basics/test_app.py b/tests/a_docs/confluent/consumes_basics/test_app.py similarity index 100% rename from tests/docs/confluent/consumes_basics/test_app.py rename to tests/a_docs/confluent/consumes_basics/test_app.py diff --git a/tests/docs/confluent/publish_batch/__init__.py b/tests/a_docs/confluent/publish_batch/__init__.py similarity index 100% rename from tests/docs/confluent/publish_batch/__init__.py rename to tests/a_docs/confluent/publish_batch/__init__.py diff --git a/tests/docs/confluent/publish_batch/test_app.py b/tests/a_docs/confluent/publish_batch/test_app.py similarity index 100% rename from tests/docs/confluent/publish_batch/test_app.py rename to tests/a_docs/confluent/publish_batch/test_app.py diff --git a/tests/docs/confluent/publish_batch/test_issues.py b/tests/a_docs/confluent/publish_batch/test_issues.py similarity index 100% rename from tests/docs/confluent/publish_batch/test_issues.py rename to tests/a_docs/confluent/publish_batch/test_issues.py diff --git a/tests/docs/confluent/publish_example/__init__.py b/tests/a_docs/confluent/publish_example/__init__.py similarity index 100% rename from tests/docs/confluent/publish_example/__init__.py rename to tests/a_docs/confluent/publish_example/__init__.py diff --git a/tests/docs/confluent/publish_example/test_app.py b/tests/a_docs/confluent/publish_example/test_app.py similarity index 100% rename from tests/docs/confluent/publish_example/test_app.py rename to tests/a_docs/confluent/publish_example/test_app.py diff --git a/tests/docs/confluent/publish_with_partition_key/__init__.py b/tests/a_docs/confluent/publish_with_partition_key/__init__.py similarity index 100% rename from tests/docs/confluent/publish_with_partition_key/__init__.py rename to tests/a_docs/confluent/publish_with_partition_key/__init__.py diff --git a/tests/docs/confluent/publish_with_partition_key/test_app.py b/tests/a_docs/confluent/publish_with_partition_key/test_app.py similarity index 100% rename from tests/docs/confluent/publish_with_partition_key/test_app.py rename to tests/a_docs/confluent/publish_with_partition_key/test_app.py diff --git a/tests/docs/confluent/publisher_object/__init__.py b/tests/a_docs/confluent/publisher_object/__init__.py similarity index 100% rename from tests/docs/confluent/publisher_object/__init__.py rename to tests/a_docs/confluent/publisher_object/__init__.py diff --git a/tests/docs/confluent/publisher_object/test_publisher_object.py b/tests/a_docs/confluent/publisher_object/test_publisher_object.py similarity index 100% rename from tests/docs/confluent/publisher_object/test_publisher_object.py rename to tests/a_docs/confluent/publisher_object/test_publisher_object.py diff --git a/tests/docs/confluent/raw_publish/__init__.py b/tests/a_docs/confluent/raw_publish/__init__.py similarity index 100% rename from tests/docs/confluent/raw_publish/__init__.py rename to tests/a_docs/confluent/raw_publish/__init__.py diff --git a/tests/docs/confluent/raw_publish/test_raw_publish.py b/tests/a_docs/confluent/raw_publish/test_raw_publish.py similarity index 100% rename from tests/docs/confluent/raw_publish/test_raw_publish.py rename to tests/a_docs/confluent/raw_publish/test_raw_publish.py diff --git a/tests/docs/confluent/test_security.py b/tests/a_docs/confluent/test_security.py similarity index 100% rename from tests/docs/confluent/test_security.py rename to tests/a_docs/confluent/test_security.py diff --git a/tests/docs/getting_started/__init__.py b/tests/a_docs/getting_started/__init__.py similarity index 100% rename from tests/docs/getting_started/__init__.py rename to tests/a_docs/getting_started/__init__.py diff --git a/tests/docs/getting_started/asyncapi/__init__.py b/tests/a_docs/getting_started/asyncapi/__init__.py similarity index 100% rename from tests/docs/getting_started/asyncapi/__init__.py rename to tests/a_docs/getting_started/asyncapi/__init__.py diff --git a/tests/docs/getting_started/asyncapi/asyncapi_customization/__init__.py b/tests/a_docs/getting_started/asyncapi/asyncapi_customization/__init__.py similarity index 100% rename from tests/docs/getting_started/asyncapi/asyncapi_customization/__init__.py rename to tests/a_docs/getting_started/asyncapi/asyncapi_customization/__init__.py diff --git a/tests/docs/getting_started/asyncapi/asyncapi_customization/test_basic.py b/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_basic.py similarity index 100% rename from tests/docs/getting_started/asyncapi/asyncapi_customization/test_basic.py rename to tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_basic.py diff --git a/tests/docs/getting_started/asyncapi/asyncapi_customization/test_broker.py b/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_broker.py similarity index 100% rename from tests/docs/getting_started/asyncapi/asyncapi_customization/test_broker.py rename to tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_broker.py diff --git a/tests/docs/getting_started/asyncapi/asyncapi_customization/test_handler.py b/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_handler.py similarity index 100% rename from tests/docs/getting_started/asyncapi/asyncapi_customization/test_handler.py rename to tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_handler.py diff --git a/tests/docs/getting_started/asyncapi/asyncapi_customization/test_info.py b/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_info.py similarity index 100% rename from tests/docs/getting_started/asyncapi/asyncapi_customization/test_info.py rename to tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_info.py diff --git a/tests/docs/getting_started/asyncapi/asyncapi_customization/test_payload.py b/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_payload.py similarity index 100% rename from tests/docs/getting_started/asyncapi/asyncapi_customization/test_payload.py rename to tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_payload.py diff --git a/tests/docs/getting_started/cli/__init__.py b/tests/a_docs/getting_started/cli/__init__.py similarity index 100% rename from tests/docs/getting_started/cli/__init__.py rename to tests/a_docs/getting_started/cli/__init__.py diff --git a/tests/docs/getting_started/cli/confluent/__init__.py b/tests/a_docs/getting_started/cli/confluent/__init__.py similarity index 100% rename from tests/docs/getting_started/cli/confluent/__init__.py rename to tests/a_docs/getting_started/cli/confluent/__init__.py diff --git a/tests/docs/getting_started/cli/confluent/test_confluent_context.py b/tests/a_docs/getting_started/cli/confluent/test_confluent_context.py similarity index 100% rename from tests/docs/getting_started/cli/confluent/test_confluent_context.py rename to tests/a_docs/getting_started/cli/confluent/test_confluent_context.py diff --git a/tests/docs/getting_started/cli/kafka/__init__.py b/tests/a_docs/getting_started/cli/kafka/__init__.py similarity index 100% rename from tests/docs/getting_started/cli/kafka/__init__.py rename to tests/a_docs/getting_started/cli/kafka/__init__.py diff --git a/tests/docs/getting_started/cli/kafka/test_kafka_context.py b/tests/a_docs/getting_started/cli/kafka/test_kafka_context.py similarity index 100% rename from tests/docs/getting_started/cli/kafka/test_kafka_context.py rename to tests/a_docs/getting_started/cli/kafka/test_kafka_context.py diff --git a/tests/docs/getting_started/cli/nats/__init__.py b/tests/a_docs/getting_started/cli/nats/__init__.py similarity index 100% rename from tests/docs/getting_started/cli/nats/__init__.py rename to tests/a_docs/getting_started/cli/nats/__init__.py diff --git a/tests/docs/getting_started/cli/nats/test_nats_context.py b/tests/a_docs/getting_started/cli/nats/test_nats_context.py similarity index 100% rename from tests/docs/getting_started/cli/nats/test_nats_context.py rename to tests/a_docs/getting_started/cli/nats/test_nats_context.py diff --git a/tests/docs/getting_started/cli/rabbit/__init__.py b/tests/a_docs/getting_started/cli/rabbit/__init__.py similarity index 100% rename from tests/docs/getting_started/cli/rabbit/__init__.py rename to tests/a_docs/getting_started/cli/rabbit/__init__.py diff --git a/tests/docs/getting_started/cli/rabbit/test_rabbit_context.py b/tests/a_docs/getting_started/cli/rabbit/test_rabbit_context.py similarity index 100% rename from tests/docs/getting_started/cli/rabbit/test_rabbit_context.py rename to tests/a_docs/getting_started/cli/rabbit/test_rabbit_context.py diff --git a/tests/docs/getting_started/cli/redis/__init__.py b/tests/a_docs/getting_started/cli/redis/__init__.py similarity index 100% rename from tests/docs/getting_started/cli/redis/__init__.py rename to tests/a_docs/getting_started/cli/redis/__init__.py diff --git a/tests/docs/getting_started/cli/redis/test_redis_context.py b/tests/a_docs/getting_started/cli/redis/test_redis_context.py similarity index 100% rename from tests/docs/getting_started/cli/redis/test_redis_context.py rename to tests/a_docs/getting_started/cli/redis/test_redis_context.py diff --git a/tests/docs/getting_started/config/test_settings_base_1.py b/tests/a_docs/getting_started/config/test_settings_base_1.py similarity index 100% rename from tests/docs/getting_started/config/test_settings_base_1.py rename to tests/a_docs/getting_started/config/test_settings_base_1.py diff --git a/tests/docs/getting_started/config/test_settings_base_2.py b/tests/a_docs/getting_started/config/test_settings_base_2.py similarity index 100% rename from tests/docs/getting_started/config/test_settings_base_2.py rename to tests/a_docs/getting_started/config/test_settings_base_2.py diff --git a/tests/docs/getting_started/config/test_settings_env.py b/tests/a_docs/getting_started/config/test_settings_env.py similarity index 100% rename from tests/docs/getting_started/config/test_settings_env.py rename to tests/a_docs/getting_started/config/test_settings_env.py diff --git a/tests/docs/getting_started/config/test_usage.py b/tests/a_docs/getting_started/config/test_usage.py similarity index 100% rename from tests/docs/getting_started/config/test_usage.py rename to tests/a_docs/getting_started/config/test_usage.py diff --git a/tests/docs/getting_started/context/__init__.py b/tests/a_docs/getting_started/context/__init__.py similarity index 100% rename from tests/docs/getting_started/context/__init__.py rename to tests/a_docs/getting_started/context/__init__.py diff --git a/tests/docs/getting_started/context/test_annotated.py b/tests/a_docs/getting_started/context/test_annotated.py similarity index 100% rename from tests/docs/getting_started/context/test_annotated.py rename to tests/a_docs/getting_started/context/test_annotated.py diff --git a/tests/docs/getting_started/context/test_base.py b/tests/a_docs/getting_started/context/test_base.py similarity index 100% rename from tests/docs/getting_started/context/test_base.py rename to tests/a_docs/getting_started/context/test_base.py diff --git a/tests/docs/getting_started/context/test_cast.py b/tests/a_docs/getting_started/context/test_cast.py similarity index 100% rename from tests/docs/getting_started/context/test_cast.py rename to tests/a_docs/getting_started/context/test_cast.py diff --git a/tests/docs/getting_started/context/test_custom_global.py b/tests/a_docs/getting_started/context/test_custom_global.py similarity index 100% rename from tests/docs/getting_started/context/test_custom_global.py rename to tests/a_docs/getting_started/context/test_custom_global.py diff --git a/tests/docs/getting_started/context/test_custom_local.py b/tests/a_docs/getting_started/context/test_custom_local.py similarity index 100% rename from tests/docs/getting_started/context/test_custom_local.py rename to tests/a_docs/getting_started/context/test_custom_local.py diff --git a/tests/docs/getting_started/context/test_default_arguments.py b/tests/a_docs/getting_started/context/test_default_arguments.py similarity index 100% rename from tests/docs/getting_started/context/test_default_arguments.py rename to tests/a_docs/getting_started/context/test_default_arguments.py diff --git a/tests/docs/getting_started/context/test_existed_context.py b/tests/a_docs/getting_started/context/test_existed_context.py similarity index 100% rename from tests/docs/getting_started/context/test_existed_context.py rename to tests/a_docs/getting_started/context/test_existed_context.py diff --git a/tests/docs/getting_started/context/test_fields_access.py b/tests/a_docs/getting_started/context/test_fields_access.py similarity index 100% rename from tests/docs/getting_started/context/test_fields_access.py rename to tests/a_docs/getting_started/context/test_fields_access.py diff --git a/tests/docs/getting_started/context/test_initial.py b/tests/a_docs/getting_started/context/test_initial.py similarity index 100% rename from tests/docs/getting_started/context/test_initial.py rename to tests/a_docs/getting_started/context/test_initial.py diff --git a/tests/docs/getting_started/context/test_manual_local_context.py b/tests/a_docs/getting_started/context/test_manual_local_context.py similarity index 100% rename from tests/docs/getting_started/context/test_manual_local_context.py rename to tests/a_docs/getting_started/context/test_manual_local_context.py diff --git a/tests/docs/getting_started/context/test_nested.py b/tests/a_docs/getting_started/context/test_nested.py similarity index 100% rename from tests/docs/getting_started/context/test_nested.py rename to tests/a_docs/getting_started/context/test_nested.py diff --git a/tests/docs/getting_started/dependencies/__init__.py b/tests/a_docs/getting_started/dependencies/__init__.py similarity index 100% rename from tests/docs/getting_started/dependencies/__init__.py rename to tests/a_docs/getting_started/dependencies/__init__.py diff --git a/tests/docs/getting_started/dependencies/basic/__init__.py b/tests/a_docs/getting_started/dependencies/basic/__init__.py similarity index 100% rename from tests/docs/getting_started/dependencies/basic/__init__.py rename to tests/a_docs/getting_started/dependencies/basic/__init__.py diff --git a/tests/docs/getting_started/dependencies/basic/test_base.py b/tests/a_docs/getting_started/dependencies/basic/test_base.py similarity index 100% rename from tests/docs/getting_started/dependencies/basic/test_base.py rename to tests/a_docs/getting_started/dependencies/basic/test_base.py diff --git a/tests/docs/getting_started/dependencies/basic/test_depends.py b/tests/a_docs/getting_started/dependencies/basic/test_depends.py similarity index 100% rename from tests/docs/getting_started/dependencies/basic/test_depends.py rename to tests/a_docs/getting_started/dependencies/basic/test_depends.py diff --git a/tests/docs/getting_started/dependencies/basic/test_nested_depends.py b/tests/a_docs/getting_started/dependencies/basic/test_nested_depends.py similarity index 100% rename from tests/docs/getting_started/dependencies/basic/test_nested_depends.py rename to tests/a_docs/getting_started/dependencies/basic/test_nested_depends.py diff --git a/tests/docs/getting_started/dependencies/test_basic.py b/tests/a_docs/getting_started/dependencies/test_basic.py similarity index 100% rename from tests/docs/getting_started/dependencies/test_basic.py rename to tests/a_docs/getting_started/dependencies/test_basic.py diff --git a/tests/docs/getting_started/dependencies/test_class.py b/tests/a_docs/getting_started/dependencies/test_class.py similarity index 100% rename from tests/docs/getting_started/dependencies/test_class.py rename to tests/a_docs/getting_started/dependencies/test_class.py diff --git a/tests/docs/getting_started/dependencies/test_global.py b/tests/a_docs/getting_started/dependencies/test_global.py similarity index 100% rename from tests/docs/getting_started/dependencies/test_global.py rename to tests/a_docs/getting_started/dependencies/test_global.py diff --git a/tests/docs/getting_started/dependencies/test_global_broker.py b/tests/a_docs/getting_started/dependencies/test_global_broker.py similarity index 100% rename from tests/docs/getting_started/dependencies/test_global_broker.py rename to tests/a_docs/getting_started/dependencies/test_global_broker.py diff --git a/tests/docs/getting_started/dependencies/test_sub_dep.py b/tests/a_docs/getting_started/dependencies/test_sub_dep.py similarity index 100% rename from tests/docs/getting_started/dependencies/test_sub_dep.py rename to tests/a_docs/getting_started/dependencies/test_sub_dep.py diff --git a/tests/docs/getting_started/dependencies/test_yield.py b/tests/a_docs/getting_started/dependencies/test_yield.py similarity index 100% rename from tests/docs/getting_started/dependencies/test_yield.py rename to tests/a_docs/getting_started/dependencies/test_yield.py diff --git a/tests/docs/getting_started/index/__init__.py b/tests/a_docs/getting_started/index/__init__.py similarity index 100% rename from tests/docs/getting_started/index/__init__.py rename to tests/a_docs/getting_started/index/__init__.py diff --git a/tests/docs/getting_started/index/test_basic.py b/tests/a_docs/getting_started/index/test_basic.py similarity index 100% rename from tests/docs/getting_started/index/test_basic.py rename to tests/a_docs/getting_started/index/test_basic.py diff --git a/tests/docs/getting_started/lifespan/__init__.py b/tests/a_docs/getting_started/lifespan/__init__.py similarity index 100% rename from tests/docs/getting_started/lifespan/__init__.py rename to tests/a_docs/getting_started/lifespan/__init__.py diff --git a/tests/docs/getting_started/lifespan/test_basic.py b/tests/a_docs/getting_started/lifespan/test_basic.py similarity index 100% rename from tests/docs/getting_started/lifespan/test_basic.py rename to tests/a_docs/getting_started/lifespan/test_basic.py diff --git a/tests/docs/getting_started/lifespan/test_ml.py b/tests/a_docs/getting_started/lifespan/test_ml.py similarity index 100% rename from tests/docs/getting_started/lifespan/test_ml.py rename to tests/a_docs/getting_started/lifespan/test_ml.py diff --git a/tests/docs/getting_started/lifespan/test_ml_context.py b/tests/a_docs/getting_started/lifespan/test_ml_context.py similarity index 100% rename from tests/docs/getting_started/lifespan/test_ml_context.py rename to tests/a_docs/getting_started/lifespan/test_ml_context.py diff --git a/tests/docs/getting_started/lifespan/test_multi.py b/tests/a_docs/getting_started/lifespan/test_multi.py similarity index 100% rename from tests/docs/getting_started/lifespan/test_multi.py rename to tests/a_docs/getting_started/lifespan/test_multi.py diff --git a/tests/docs/getting_started/lifespan/test_testing.py b/tests/a_docs/getting_started/lifespan/test_testing.py similarity index 100% rename from tests/docs/getting_started/lifespan/test_testing.py rename to tests/a_docs/getting_started/lifespan/test_testing.py diff --git a/tests/docs/getting_started/publishing/__init__.py b/tests/a_docs/getting_started/publishing/__init__.py similarity index 100% rename from tests/docs/getting_started/publishing/__init__.py rename to tests/a_docs/getting_started/publishing/__init__.py diff --git a/tests/docs/getting_started/publishing/test_broker.py b/tests/a_docs/getting_started/publishing/test_broker.py similarity index 100% rename from tests/docs/getting_started/publishing/test_broker.py rename to tests/a_docs/getting_started/publishing/test_broker.py diff --git a/tests/docs/getting_started/publishing/test_broker_context.py b/tests/a_docs/getting_started/publishing/test_broker_context.py similarity index 98% rename from tests/docs/getting_started/publishing/test_broker_context.py rename to tests/a_docs/getting_started/publishing/test_broker_context.py index aa8d0f194b..b44b9a2144 100644 --- a/tests/docs/getting_started/publishing/test_broker_context.py +++ b/tests/a_docs/getting_started/publishing/test_broker_context.py @@ -38,7 +38,7 @@ async def test_broker_context_confluent(): from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker, with_real=True), TestApp(app): - await handle.wait_call(5) + await handle.wait_call(30) handle.mock.assert_called_once_with("Hi!") diff --git a/tests/docs/getting_started/publishing/test_decorator.py b/tests/a_docs/getting_started/publishing/test_decorator.py similarity index 100% rename from tests/docs/getting_started/publishing/test_decorator.py rename to tests/a_docs/getting_started/publishing/test_decorator.py diff --git a/tests/docs/getting_started/publishing/test_direct.py b/tests/a_docs/getting_started/publishing/test_direct.py similarity index 100% rename from tests/docs/getting_started/publishing/test_direct.py rename to tests/a_docs/getting_started/publishing/test_direct.py diff --git a/tests/docs/getting_started/publishing/test_object.py b/tests/a_docs/getting_started/publishing/test_object.py similarity index 100% rename from tests/docs/getting_started/publishing/test_object.py rename to tests/a_docs/getting_started/publishing/test_object.py diff --git a/tests/docs/getting_started/routers/__init__.py b/tests/a_docs/getting_started/routers/__init__.py similarity index 100% rename from tests/docs/getting_started/routers/__init__.py rename to tests/a_docs/getting_started/routers/__init__.py diff --git a/tests/docs/getting_started/routers/test_base.py b/tests/a_docs/getting_started/routers/test_base.py similarity index 100% rename from tests/docs/getting_started/routers/test_base.py rename to tests/a_docs/getting_started/routers/test_base.py diff --git a/tests/docs/getting_started/routers/test_delay.py b/tests/a_docs/getting_started/routers/test_delay.py similarity index 100% rename from tests/docs/getting_started/routers/test_delay.py rename to tests/a_docs/getting_started/routers/test_delay.py diff --git a/tests/docs/getting_started/routers/test_delay_equal.py b/tests/a_docs/getting_started/routers/test_delay_equal.py similarity index 100% rename from tests/docs/getting_started/routers/test_delay_equal.py rename to tests/a_docs/getting_started/routers/test_delay_equal.py diff --git a/tests/docs/getting_started/serialization/__init__.py b/tests/a_docs/getting_started/serialization/__init__.py similarity index 100% rename from tests/docs/getting_started/serialization/__init__.py rename to tests/a_docs/getting_started/serialization/__init__.py diff --git a/tests/docs/getting_started/serialization/test_parser.py b/tests/a_docs/getting_started/serialization/test_parser.py similarity index 100% rename from tests/docs/getting_started/serialization/test_parser.py rename to tests/a_docs/getting_started/serialization/test_parser.py diff --git a/tests/docs/getting_started/subscription/__init__.py b/tests/a_docs/getting_started/subscription/__init__.py similarity index 100% rename from tests/docs/getting_started/subscription/__init__.py rename to tests/a_docs/getting_started/subscription/__init__.py diff --git a/tests/docs/getting_started/subscription/test_annotated.py b/tests/a_docs/getting_started/subscription/test_annotated.py similarity index 100% rename from tests/docs/getting_started/subscription/test_annotated.py rename to tests/a_docs/getting_started/subscription/test_annotated.py diff --git a/tests/docs/getting_started/subscription/test_filter.py b/tests/a_docs/getting_started/subscription/test_filter.py similarity index 100% rename from tests/docs/getting_started/subscription/test_filter.py rename to tests/a_docs/getting_started/subscription/test_filter.py diff --git a/tests/docs/getting_started/subscription/test_pydantic.py b/tests/a_docs/getting_started/subscription/test_pydantic.py similarity index 100% rename from tests/docs/getting_started/subscription/test_pydantic.py rename to tests/a_docs/getting_started/subscription/test_pydantic.py diff --git a/tests/docs/getting_started/subscription/test_real.py b/tests/a_docs/getting_started/subscription/test_real.py similarity index 100% rename from tests/docs/getting_started/subscription/test_real.py rename to tests/a_docs/getting_started/subscription/test_real.py diff --git a/tests/docs/getting_started/subscription/test_testing.py b/tests/a_docs/getting_started/subscription/test_testing.py similarity index 100% rename from tests/docs/getting_started/subscription/test_testing.py rename to tests/a_docs/getting_started/subscription/test_testing.py diff --git a/tests/docs/index/__init__.py b/tests/a_docs/index/__init__.py similarity index 100% rename from tests/docs/index/__init__.py rename to tests/a_docs/index/__init__.py diff --git a/tests/docs/index/test_basic.py b/tests/a_docs/index/test_basic.py similarity index 100% rename from tests/docs/index/test_basic.py rename to tests/a_docs/index/test_basic.py diff --git a/tests/docs/index/test_dependencies.py b/tests/a_docs/index/test_dependencies.py similarity index 100% rename from tests/docs/index/test_dependencies.py rename to tests/a_docs/index/test_dependencies.py diff --git a/tests/docs/index/test_pydantic.py b/tests/a_docs/index/test_pydantic.py similarity index 100% rename from tests/docs/index/test_pydantic.py rename to tests/a_docs/index/test_pydantic.py diff --git a/tests/docs/integration/__init__.py b/tests/a_docs/integration/__init__.py similarity index 100% rename from tests/docs/integration/__init__.py rename to tests/a_docs/integration/__init__.py diff --git a/tests/docs/integration/fastapi/__init__.py b/tests/a_docs/integration/fastapi/__init__.py similarity index 100% rename from tests/docs/integration/fastapi/__init__.py rename to tests/a_docs/integration/fastapi/__init__.py diff --git a/tests/docs/integration/fastapi/test_base.py b/tests/a_docs/integration/fastapi/test_base.py similarity index 100% rename from tests/docs/integration/fastapi/test_base.py rename to tests/a_docs/integration/fastapi/test_base.py diff --git a/tests/docs/integration/fastapi/test_depends.py b/tests/a_docs/integration/fastapi/test_depends.py similarity index 100% rename from tests/docs/integration/fastapi/test_depends.py rename to tests/a_docs/integration/fastapi/test_depends.py diff --git a/tests/docs/integration/fastapi/test_multiple.py b/tests/a_docs/integration/fastapi/test_multiple.py similarity index 100% rename from tests/docs/integration/fastapi/test_multiple.py rename to tests/a_docs/integration/fastapi/test_multiple.py diff --git a/tests/docs/integration/fastapi/test_multiple_lifespan.py b/tests/a_docs/integration/fastapi/test_multiple_lifespan.py similarity index 100% rename from tests/docs/integration/fastapi/test_multiple_lifespan.py rename to tests/a_docs/integration/fastapi/test_multiple_lifespan.py diff --git a/tests/docs/integration/fastapi/test_send.py b/tests/a_docs/integration/fastapi/test_send.py similarity index 100% rename from tests/docs/integration/fastapi/test_send.py rename to tests/a_docs/integration/fastapi/test_send.py diff --git a/tests/docs/integration/fastapi/test_startup.py b/tests/a_docs/integration/fastapi/test_startup.py similarity index 100% rename from tests/docs/integration/fastapi/test_startup.py rename to tests/a_docs/integration/fastapi/test_startup.py diff --git a/tests/docs/integration/fastapi/test_test.py b/tests/a_docs/integration/fastapi/test_test.py similarity index 100% rename from tests/docs/integration/fastapi/test_test.py rename to tests/a_docs/integration/fastapi/test_test.py diff --git a/tests/docs/integration/http/__init__.py b/tests/a_docs/integration/http/__init__.py similarity index 100% rename from tests/docs/integration/http/__init__.py rename to tests/a_docs/integration/http/__init__.py diff --git a/tests/docs/integration/http/test_fastapi.py b/tests/a_docs/integration/http/test_fastapi.py similarity index 100% rename from tests/docs/integration/http/test_fastapi.py rename to tests/a_docs/integration/http/test_fastapi.py diff --git a/tests/docs/kafka/__init__.py b/tests/a_docs/kafka/__init__.py similarity index 100% rename from tests/docs/kafka/__init__.py rename to tests/a_docs/kafka/__init__.py diff --git a/tests/docs/kafka/ack/__init__.py b/tests/a_docs/kafka/ack/__init__.py similarity index 100% rename from tests/docs/kafka/ack/__init__.py rename to tests/a_docs/kafka/ack/__init__.py diff --git a/tests/docs/kafka/ack/test_errors.py b/tests/a_docs/kafka/ack/test_errors.py similarity index 100% rename from tests/docs/kafka/ack/test_errors.py rename to tests/a_docs/kafka/ack/test_errors.py diff --git a/tests/docs/kafka/basic/__init__.py b/tests/a_docs/kafka/basic/__init__.py similarity index 100% rename from tests/docs/kafka/basic/__init__.py rename to tests/a_docs/kafka/basic/__init__.py diff --git a/tests/docs/kafka/basic/test_basic.py b/tests/a_docs/kafka/basic/test_basic.py similarity index 100% rename from tests/docs/kafka/basic/test_basic.py rename to tests/a_docs/kafka/basic/test_basic.py diff --git a/tests/docs/kafka/basic/test_cmd_run.py b/tests/a_docs/kafka/basic/test_cmd_run.py similarity index 100% rename from tests/docs/kafka/basic/test_cmd_run.py rename to tests/a_docs/kafka/basic/test_cmd_run.py diff --git a/tests/docs/kafka/batch_consuming_pydantic/__init__.py b/tests/a_docs/kafka/batch_consuming_pydantic/__init__.py similarity index 100% rename from tests/docs/kafka/batch_consuming_pydantic/__init__.py rename to tests/a_docs/kafka/batch_consuming_pydantic/__init__.py diff --git a/tests/docs/kafka/batch_consuming_pydantic/test_app.py b/tests/a_docs/kafka/batch_consuming_pydantic/test_app.py similarity index 100% rename from tests/docs/kafka/batch_consuming_pydantic/test_app.py rename to tests/a_docs/kafka/batch_consuming_pydantic/test_app.py diff --git a/tests/docs/kafka/consumes_basics/__init__.py b/tests/a_docs/kafka/consumes_basics/__init__.py similarity index 100% rename from tests/docs/kafka/consumes_basics/__init__.py rename to tests/a_docs/kafka/consumes_basics/__init__.py diff --git a/tests/docs/kafka/consumes_basics/test_app.py b/tests/a_docs/kafka/consumes_basics/test_app.py similarity index 100% rename from tests/docs/kafka/consumes_basics/test_app.py rename to tests/a_docs/kafka/consumes_basics/test_app.py diff --git a/tests/docs/kafka/publish_batch/__init__.py b/tests/a_docs/kafka/publish_batch/__init__.py similarity index 100% rename from tests/docs/kafka/publish_batch/__init__.py rename to tests/a_docs/kafka/publish_batch/__init__.py diff --git a/tests/docs/kafka/publish_batch/test_app.py b/tests/a_docs/kafka/publish_batch/test_app.py similarity index 100% rename from tests/docs/kafka/publish_batch/test_app.py rename to tests/a_docs/kafka/publish_batch/test_app.py diff --git a/tests/docs/kafka/publish_batch/test_issues.py b/tests/a_docs/kafka/publish_batch/test_issues.py similarity index 100% rename from tests/docs/kafka/publish_batch/test_issues.py rename to tests/a_docs/kafka/publish_batch/test_issues.py diff --git a/tests/docs/kafka/publish_example/__init__.py b/tests/a_docs/kafka/publish_example/__init__.py similarity index 100% rename from tests/docs/kafka/publish_example/__init__.py rename to tests/a_docs/kafka/publish_example/__init__.py diff --git a/tests/docs/kafka/publish_example/test_app.py b/tests/a_docs/kafka/publish_example/test_app.py similarity index 100% rename from tests/docs/kafka/publish_example/test_app.py rename to tests/a_docs/kafka/publish_example/test_app.py diff --git a/tests/docs/kafka/publish_with_partition_key/__init__.py b/tests/a_docs/kafka/publish_with_partition_key/__init__.py similarity index 100% rename from tests/docs/kafka/publish_with_partition_key/__init__.py rename to tests/a_docs/kafka/publish_with_partition_key/__init__.py diff --git a/tests/docs/kafka/publish_with_partition_key/test_app.py b/tests/a_docs/kafka/publish_with_partition_key/test_app.py similarity index 100% rename from tests/docs/kafka/publish_with_partition_key/test_app.py rename to tests/a_docs/kafka/publish_with_partition_key/test_app.py diff --git a/tests/docs/kafka/publisher_object/__init__.py b/tests/a_docs/kafka/publisher_object/__init__.py similarity index 100% rename from tests/docs/kafka/publisher_object/__init__.py rename to tests/a_docs/kafka/publisher_object/__init__.py diff --git a/tests/docs/kafka/publisher_object/test_publisher_object.py b/tests/a_docs/kafka/publisher_object/test_publisher_object.py similarity index 100% rename from tests/docs/kafka/publisher_object/test_publisher_object.py rename to tests/a_docs/kafka/publisher_object/test_publisher_object.py diff --git a/tests/docs/kafka/raw_publish/__init__.py b/tests/a_docs/kafka/raw_publish/__init__.py similarity index 100% rename from tests/docs/kafka/raw_publish/__init__.py rename to tests/a_docs/kafka/raw_publish/__init__.py diff --git a/tests/docs/kafka/raw_publish/test_raw_publish.py b/tests/a_docs/kafka/raw_publish/test_raw_publish.py similarity index 100% rename from tests/docs/kafka/raw_publish/test_raw_publish.py rename to tests/a_docs/kafka/raw_publish/test_raw_publish.py diff --git a/tests/docs/kafka/test_security.py b/tests/a_docs/kafka/test_security.py similarity index 100% rename from tests/docs/kafka/test_security.py rename to tests/a_docs/kafka/test_security.py diff --git a/tests/docs/nats/__init__.py b/tests/a_docs/nats/__init__.py similarity index 100% rename from tests/docs/nats/__init__.py rename to tests/a_docs/nats/__init__.py diff --git a/tests/docs/nats/ack/__init__.py b/tests/a_docs/nats/ack/__init__.py similarity index 100% rename from tests/docs/nats/ack/__init__.py rename to tests/a_docs/nats/ack/__init__.py diff --git a/tests/docs/nats/ack/test_errors.py b/tests/a_docs/nats/ack/test_errors.py similarity index 100% rename from tests/docs/nats/ack/test_errors.py rename to tests/a_docs/nats/ack/test_errors.py diff --git a/tests/docs/nats/js/__init__.py b/tests/a_docs/nats/js/__init__.py similarity index 100% rename from tests/docs/nats/js/__init__.py rename to tests/a_docs/nats/js/__init__.py diff --git a/tests/docs/nats/js/test_kv.py b/tests/a_docs/nats/js/test_kv.py similarity index 100% rename from tests/docs/nats/js/test_kv.py rename to tests/a_docs/nats/js/test_kv.py diff --git a/tests/docs/nats/js/test_main.py b/tests/a_docs/nats/js/test_main.py similarity index 100% rename from tests/docs/nats/js/test_main.py rename to tests/a_docs/nats/js/test_main.py diff --git a/tests/docs/nats/js/test_object.py b/tests/a_docs/nats/js/test_object.py similarity index 100% rename from tests/docs/nats/js/test_object.py rename to tests/a_docs/nats/js/test_object.py diff --git a/tests/docs/nats/js/test_pull_sub.py b/tests/a_docs/nats/js/test_pull_sub.py similarity index 100% rename from tests/docs/nats/js/test_pull_sub.py rename to tests/a_docs/nats/js/test_pull_sub.py diff --git a/tests/docs/nats/test_direct.py b/tests/a_docs/nats/test_direct.py similarity index 100% rename from tests/docs/nats/test_direct.py rename to tests/a_docs/nats/test_direct.py diff --git a/tests/docs/nats/test_pattern.py b/tests/a_docs/nats/test_pattern.py similarity index 100% rename from tests/docs/nats/test_pattern.py rename to tests/a_docs/nats/test_pattern.py diff --git a/tests/docs/rabbit/__init__.py b/tests/a_docs/rabbit/__init__.py similarity index 100% rename from tests/docs/rabbit/__init__.py rename to tests/a_docs/rabbit/__init__.py diff --git a/tests/docs/rabbit/ack/__init__.py b/tests/a_docs/rabbit/ack/__init__.py similarity index 100% rename from tests/docs/rabbit/ack/__init__.py rename to tests/a_docs/rabbit/ack/__init__.py diff --git a/tests/docs/rabbit/ack/test_errors.py b/tests/a_docs/rabbit/ack/test_errors.py similarity index 100% rename from tests/docs/rabbit/ack/test_errors.py rename to tests/a_docs/rabbit/ack/test_errors.py diff --git a/tests/docs/rabbit/subscription/__init__.py b/tests/a_docs/rabbit/subscription/__init__.py similarity index 100% rename from tests/docs/rabbit/subscription/__init__.py rename to tests/a_docs/rabbit/subscription/__init__.py diff --git a/tests/docs/rabbit/subscription/test_direct.py b/tests/a_docs/rabbit/subscription/test_direct.py similarity index 100% rename from tests/docs/rabbit/subscription/test_direct.py rename to tests/a_docs/rabbit/subscription/test_direct.py diff --git a/tests/docs/rabbit/subscription/test_fanout.py b/tests/a_docs/rabbit/subscription/test_fanout.py similarity index 100% rename from tests/docs/rabbit/subscription/test_fanout.py rename to tests/a_docs/rabbit/subscription/test_fanout.py diff --git a/tests/docs/rabbit/subscription/test_header.py b/tests/a_docs/rabbit/subscription/test_header.py similarity index 100% rename from tests/docs/rabbit/subscription/test_header.py rename to tests/a_docs/rabbit/subscription/test_header.py diff --git a/tests/docs/rabbit/subscription/test_index.py b/tests/a_docs/rabbit/subscription/test_index.py similarity index 100% rename from tests/docs/rabbit/subscription/test_index.py rename to tests/a_docs/rabbit/subscription/test_index.py diff --git a/tests/docs/rabbit/subscription/test_stream.py b/tests/a_docs/rabbit/subscription/test_stream.py similarity index 100% rename from tests/docs/rabbit/subscription/test_stream.py rename to tests/a_docs/rabbit/subscription/test_stream.py diff --git a/tests/docs/rabbit/subscription/test_topic.py b/tests/a_docs/rabbit/subscription/test_topic.py similarity index 100% rename from tests/docs/rabbit/subscription/test_topic.py rename to tests/a_docs/rabbit/subscription/test_topic.py diff --git a/tests/docs/rabbit/test_declare.py b/tests/a_docs/rabbit/test_declare.py similarity index 100% rename from tests/docs/rabbit/test_declare.py rename to tests/a_docs/rabbit/test_declare.py diff --git a/tests/docs/rabbit/test_security.py b/tests/a_docs/rabbit/test_security.py similarity index 100% rename from tests/docs/rabbit/test_security.py rename to tests/a_docs/rabbit/test_security.py diff --git a/tests/docs/redis/__init__.py b/tests/a_docs/redis/__init__.py similarity index 100% rename from tests/docs/redis/__init__.py rename to tests/a_docs/redis/__init__.py diff --git a/tests/docs/redis/list/__init__.py b/tests/a_docs/redis/list/__init__.py similarity index 100% rename from tests/docs/redis/list/__init__.py rename to tests/a_docs/redis/list/__init__.py diff --git a/tests/docs/redis/list/test_list_pub.py b/tests/a_docs/redis/list/test_list_pub.py similarity index 100% rename from tests/docs/redis/list/test_list_pub.py rename to tests/a_docs/redis/list/test_list_pub.py diff --git a/tests/docs/redis/list/test_list_sub.py b/tests/a_docs/redis/list/test_list_sub.py similarity index 100% rename from tests/docs/redis/list/test_list_sub.py rename to tests/a_docs/redis/list/test_list_sub.py diff --git a/tests/docs/redis/list/test_sub_batch.py b/tests/a_docs/redis/list/test_sub_batch.py similarity index 100% rename from tests/docs/redis/list/test_sub_batch.py rename to tests/a_docs/redis/list/test_sub_batch.py diff --git a/tests/docs/redis/pub_sub/__init__.py b/tests/a_docs/redis/pub_sub/__init__.py similarity index 100% rename from tests/docs/redis/pub_sub/__init__.py rename to tests/a_docs/redis/pub_sub/__init__.py diff --git a/tests/docs/redis/pub_sub/test_channel_sub.py b/tests/a_docs/redis/pub_sub/test_channel_sub.py similarity index 100% rename from tests/docs/redis/pub_sub/test_channel_sub.py rename to tests/a_docs/redis/pub_sub/test_channel_sub.py diff --git a/tests/docs/redis/pub_sub/test_channel_sub_pattern.py b/tests/a_docs/redis/pub_sub/test_channel_sub_pattern.py similarity index 100% rename from tests/docs/redis/pub_sub/test_channel_sub_pattern.py rename to tests/a_docs/redis/pub_sub/test_channel_sub_pattern.py diff --git a/tests/docs/redis/pub_sub/test_pattern_data.py b/tests/a_docs/redis/pub_sub/test_pattern_data.py similarity index 100% rename from tests/docs/redis/pub_sub/test_pattern_data.py rename to tests/a_docs/redis/pub_sub/test_pattern_data.py diff --git a/tests/docs/redis/pub_sub/test_publihser_object.py b/tests/a_docs/redis/pub_sub/test_publihser_object.py similarity index 100% rename from tests/docs/redis/pub_sub/test_publihser_object.py rename to tests/a_docs/redis/pub_sub/test_publihser_object.py diff --git a/tests/docs/redis/pub_sub/test_publisher_decorator.py b/tests/a_docs/redis/pub_sub/test_publisher_decorator.py similarity index 100% rename from tests/docs/redis/pub_sub/test_publisher_decorator.py rename to tests/a_docs/redis/pub_sub/test_publisher_decorator.py diff --git a/tests/docs/redis/pub_sub/test_raw_publish.py b/tests/a_docs/redis/pub_sub/test_raw_publish.py similarity index 100% rename from tests/docs/redis/pub_sub/test_raw_publish.py rename to tests/a_docs/redis/pub_sub/test_raw_publish.py diff --git a/tests/docs/redis/stream/__init__.py b/tests/a_docs/redis/stream/__init__.py similarity index 100% rename from tests/docs/redis/stream/__init__.py rename to tests/a_docs/redis/stream/__init__.py diff --git a/tests/docs/redis/stream/test_ack_errors.py b/tests/a_docs/redis/stream/test_ack_errors.py similarity index 100% rename from tests/docs/redis/stream/test_ack_errors.py rename to tests/a_docs/redis/stream/test_ack_errors.py diff --git a/tests/docs/redis/stream/test_batch_sub.py b/tests/a_docs/redis/stream/test_batch_sub.py similarity index 100% rename from tests/docs/redis/stream/test_batch_sub.py rename to tests/a_docs/redis/stream/test_batch_sub.py diff --git a/tests/docs/redis/stream/test_group.py b/tests/a_docs/redis/stream/test_group.py similarity index 100% rename from tests/docs/redis/stream/test_group.py rename to tests/a_docs/redis/stream/test_group.py diff --git a/tests/docs/redis/stream/test_pub.py b/tests/a_docs/redis/stream/test_pub.py similarity index 100% rename from tests/docs/redis/stream/test_pub.py rename to tests/a_docs/redis/stream/test_pub.py diff --git a/tests/docs/redis/stream/test_sub.py b/tests/a_docs/redis/stream/test_sub.py similarity index 100% rename from tests/docs/redis/stream/test_sub.py rename to tests/a_docs/redis/stream/test_sub.py diff --git a/tests/docs/redis/test_rpc.py b/tests/a_docs/redis/test_rpc.py similarity index 100% rename from tests/docs/redis/test_rpc.py rename to tests/a_docs/redis/test_rpc.py diff --git a/tests/docs/redis/test_security.py b/tests/a_docs/redis/test_security.py similarity index 100% rename from tests/docs/redis/test_security.py rename to tests/a_docs/redis/test_security.py diff --git a/tests/asgi/__init__.py b/tests/asgi/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/asgi/confluent/__init__.py b/tests/asgi/confluent/__init__.py new file mode 100644 index 0000000000..c4a1803708 --- /dev/null +++ b/tests/asgi/confluent/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("confluent_kafka") diff --git a/tests/asgi/confluent/test_asgi.py b/tests/asgi/confluent/test_asgi.py new file mode 100644 index 0000000000..75e4b37254 --- /dev/null +++ b/tests/asgi/confluent/test_asgi.py @@ -0,0 +1,10 @@ +from faststream.confluent import KafkaBroker, TestKafkaBroker +from tests.asgi.testcase import AsgiTestcase + + +class TestConfluentAsgi(AsgiTestcase): + def get_broker(self): + return KafkaBroker() + + def get_test_broker(self, broker): + return TestKafkaBroker(broker) diff --git a/tests/asgi/kafka/__init__.py b/tests/asgi/kafka/__init__.py new file mode 100644 index 0000000000..bd6bc708fc --- /dev/null +++ b/tests/asgi/kafka/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aiokafka") diff --git a/tests/asgi/kafka/test_asgi.py b/tests/asgi/kafka/test_asgi.py new file mode 100644 index 0000000000..cb26b402dc --- /dev/null +++ b/tests/asgi/kafka/test_asgi.py @@ -0,0 +1,10 @@ +from faststream.kafka import KafkaBroker, TestKafkaBroker +from tests.asgi.testcase import AsgiTestcase + + +class TestKafkaAsgi(AsgiTestcase): + def get_broker(self): + return KafkaBroker() + + def get_test_broker(self, broker): + return TestKafkaBroker(broker) diff --git a/tests/asgi/nats/__init__.py b/tests/asgi/nats/__init__.py new file mode 100644 index 0000000000..87ead90ee6 --- /dev/null +++ b/tests/asgi/nats/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("nats") diff --git a/tests/asgi/nats/test_asgi.py b/tests/asgi/nats/test_asgi.py new file mode 100644 index 0000000000..f54f52b25a --- /dev/null +++ b/tests/asgi/nats/test_asgi.py @@ -0,0 +1,10 @@ +from faststream.nats import NatsBroker, TestNatsBroker +from tests.asgi.testcase import AsgiTestcase + + +class TestNatsAsgi(AsgiTestcase): + def get_broker(self): + return NatsBroker() + + def get_test_broker(self, broker): + return TestNatsBroker(broker) diff --git a/tests/asgi/rabbit/__init__.py b/tests/asgi/rabbit/__init__.py new file mode 100644 index 0000000000..ebec43fcd5 --- /dev/null +++ b/tests/asgi/rabbit/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aio_pika") diff --git a/tests/asgi/rabbit/test_asgi.py b/tests/asgi/rabbit/test_asgi.py new file mode 100644 index 0000000000..9df4794225 --- /dev/null +++ b/tests/asgi/rabbit/test_asgi.py @@ -0,0 +1,10 @@ +from faststream.rabbit import RabbitBroker, TestRabbitBroker +from tests.asgi.testcase import AsgiTestcase + + +class TestRabbitAsgi(AsgiTestcase): + def get_broker(self): + return RabbitBroker() + + def get_test_broker(self, broker): + return TestRabbitBroker(broker) diff --git a/tests/asgi/redis/__init__.py b/tests/asgi/redis/__init__.py new file mode 100644 index 0000000000..4752ef19b1 --- /dev/null +++ b/tests/asgi/redis/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("redis") diff --git a/tests/asgi/redis/test_asgi.py b/tests/asgi/redis/test_asgi.py new file mode 100644 index 0000000000..3b3e5a38be --- /dev/null +++ b/tests/asgi/redis/test_asgi.py @@ -0,0 +1,10 @@ +from faststream.redis import RedisBroker, TestRedisBroker +from tests.asgi.testcase import AsgiTestcase + + +class TestRedisAsgi(AsgiTestcase): + def get_broker(self): + return RedisBroker() + + def get_test_broker(self, broker): + return TestRedisBroker(broker) diff --git a/tests/asgi/testcase.py b/tests/asgi/testcase.py new file mode 100644 index 0000000000..5b1b2f62b7 --- /dev/null +++ b/tests/asgi/testcase.py @@ -0,0 +1,81 @@ +from typing import Any + +import pytest +from starlette.testclient import TestClient +from starlette.websockets import WebSocketDisconnect + +from faststream.asgi import AsgiFastStream, AsgiResponse, get, make_ping_asgi + + +class AsgiTestcase: + def get_broker(self) -> Any: + raise NotImplementedError() + + def get_test_broker(self, broker) -> Any: + raise NotImplementedError() + + def test_not_found(self): + app = AsgiFastStream() + + with TestClient(app) as client: + response = client.get("/") + assert response.status_code == 404 + + def test_ws_not_found(self): + app = AsgiFastStream() + + with TestClient(app) as client: # noqa: SIM117 + with pytest.raises(WebSocketDisconnect): + with client.websocket_connect("/ws"): # raises error + pass + + def test_asgi_ping_unhealthy(self): + broker = self.get_broker() + + app = AsgiFastStream( + asgi_routes=[ + ("/health", make_ping_asgi(broker, timeout=5.0)), + ] + ) + + with TestClient(app) as client: + response = client.get("/health") + assert response.status_code == 500 + + @pytest.mark.asyncio() + async def test_asgi_ping_healthy(self): + broker = self.get_broker() + + app = AsgiFastStream( + broker, + asgi_routes=[("/health", make_ping_asgi(broker, timeout=5.0))], + ) + + async with self.get_test_broker(broker): + with TestClient(app) as client: + response = client.get("/health") + assert response.status_code == 204 + + @pytest.mark.asyncio() + async def test_asyncapi_asgi(self): + broker = self.get_broker() + + app = AsgiFastStream(broker, asyncapi_path="/docs") + + async with self.get_test_broker(broker): + with TestClient(app) as client: + response = client.get("/docs") + assert response.status_code == 200 + assert response.text + + def test_get_decorator(self): + @get + async def some_handler(scope): + return AsgiResponse(body=b"test", status_code=200) + + app = AsgiFastStream(asgi_routes=[("/test", some_handler)]) + + with TestClient(app) as client: + response = client.get("/test") + assert response.status_code == 200 + assert response.text == "test" diff --git a/tests/asyncapi/base/arguments.py b/tests/asyncapi/base/arguments.py index 9b7bf75e39..4d5597f232 100644 --- a/tests/asyncapi/base/arguments.py +++ b/tests/asyncapi/base/arguments.py @@ -396,38 +396,6 @@ async def handle(user: User): ... "type": "object", } - def test_with_filter(self): - class User(pydantic.BaseModel): - name: str = "" - id: int - - broker = self.broker_class() - - @broker.subscriber( # pragma: no branch - "test", - filter=lambda m: m.content_type == "application/json", - ) - async def handle(id: int): ... - - @broker.subscriber("test") - async def handle_default(msg): ... - - schema = get_app_schema(self.build_app(broker)).to_jsonable() - - assert ( - len( - next(iter(schema["components"]["messages"].values()))["payload"][ - "oneOf" - ] - ) - == 2 - ) - - payload = schema["components"]["schemas"] - - assert "Handle:Message:Payload" in list(payload.keys()) - assert "HandleDefault:Message:Payload" in list(payload.keys()) - def test_ignores_depends(self): broker = self.broker_class() @@ -648,3 +616,37 @@ async def handle(id: int, user: Optional[str] = None, message=Context()): ... "type": "object", } ) + + def test_with_filter(self): + # TODO: move it to FastAPICompatible with FastAPI refactore + class User(pydantic.BaseModel): + name: str = "" + id: int + + broker = self.broker_class() + + sub = broker.subscriber("test") + + @sub( + filter=lambda m: m.content_type == "application/json", + ) + async def handle(id: int): ... + + @sub + async def handle_default(msg): ... + + schema = get_app_schema(self.build_app(broker)).to_jsonable() + + assert ( + len( + next(iter(schema["components"]["messages"].values()))["payload"][ + "oneOf" + ] + ) + == 2 + ) + + payload = schema["components"]["schemas"] + + assert "Handle:Message:Payload" in list(payload.keys()) + assert "HandleDefault:Message:Payload" in list(payload.keys()) diff --git a/tests/asyncapi/confluent/test_security.py b/tests/asyncapi/confluent/test_security.py index 02b5e5c714..f865ba9163 100644 --- a/tests/asyncapi/confluent/test_security.py +++ b/tests/asyncapi/confluent/test_security.py @@ -6,10 +6,10 @@ from faststream.confluent import KafkaBroker from faststream.security import ( BaseSecurity, + SASLOAuthBearer, SASLPlaintext, SASLScram256, SASLScram512, - SASLOAuthBearer, ) basic_schema = { @@ -187,7 +187,9 @@ async def test_topic(msg: str) -> str: schema = get_app_schema(app).to_jsonable() sasl_oauthbearer_security_schema = deepcopy(basic_schema) - sasl_oauthbearer_security_schema["servers"]["development"]["security"] = [{"oauthbearer": []}] + sasl_oauthbearer_security_schema["servers"]["development"]["security"] = [ + {"oauthbearer": []} + ] sasl_oauthbearer_security_schema["components"]["securitySchemes"] = { "oauthbearer": {"type": "oauthBearer"} } diff --git a/tests/brokers/base/basic.py b/tests/brokers/base/basic.py new file mode 100644 index 0000000000..e550393052 --- /dev/null +++ b/tests/brokers/base/basic.py @@ -0,0 +1,13 @@ +from typing import Any, Dict, Tuple + + +class BaseTestcaseConfig: + timeout: float = 3.0 + + def get_subscriber_params( + self, *args: Any, **kwargs: Any + ) -> Tuple[ + Tuple[Any, ...], + Dict[str, Any], + ]: + return args, kwargs diff --git a/tests/brokers/base/consume.py b/tests/brokers/base/consume.py index 60405f576f..0ba02b5a2a 100644 --- a/tests/brokers/base/consume.py +++ b/tests/brokers/base/consume.py @@ -1,6 +1,6 @@ import asyncio from abc import abstractmethod -from typing import Any, ClassVar, Dict +from typing import Any from unittest.mock import MagicMock import anyio @@ -11,12 +11,11 @@ from faststream.broker.core.usecase import BrokerUsecase from faststream.exceptions import StopConsume +from .basic import BaseTestcaseConfig -@pytest.mark.asyncio() -class BrokerConsumeTestcase: - timeout: int = 3 - subscriber_kwargs: ClassVar[Dict[str, Any]] = {} +@pytest.mark.asyncio() +class BrokerConsumeTestcase(BaseTestcaseConfig): @abstractmethod def get_broker(self, broker: BrokerUsecase) -> BrokerUsecase[Any, Any]: raise NotImplementedError @@ -31,7 +30,9 @@ async def test_consume( ): consume_broker = self.get_broker() - @consume_broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @consume_broker.subscriber(*args, **kwargs) def subscriber(m): event.set() @@ -57,8 +58,11 @@ async def test_consume_from_multi( consume = asyncio.Event() consume2 = asyncio.Event() - @consume_broker.subscriber(queue, **self.subscriber_kwargs) - @consume_broker.subscriber(queue + "1", **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + args2, kwargs2 = self.get_subscriber_params(queue + "1") + + @consume_broker.subscriber(*args, **kwargs) + @consume_broker.subscriber(*args2, **kwargs2) def subscriber(m): mock() if not consume.is_set(): @@ -92,7 +96,9 @@ async def test_consume_double( consume = asyncio.Event() consume2 = asyncio.Event() - @consume_broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @consume_broker.subscriber(*args, **kwargs) async def handler(m): mock() if not consume.is_set(): @@ -126,14 +132,17 @@ async def test_different_consume( consume = asyncio.Event() consume2 = asyncio.Event() - @consume_broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @consume_broker.subscriber(*args, **kwargs) def handler(m): mock.handler() consume.set() another_topic = queue + "1" + args, kwargs = self.get_subscriber_params(another_topic) - @consume_broker.subscriber(another_topic, **self.subscriber_kwargs) + @consume_broker.subscriber(*args, **kwargs) def handler2(m): mock.handler2() consume2.set() @@ -165,16 +174,18 @@ async def test_consume_with_filter( consume = asyncio.Event() consume2 = asyncio.Event() - @consume_broker.subscriber( + args, kwargs = self.get_subscriber_params( queue, - filter=lambda m: m.content_type == "application/json", - **self.subscriber_kwargs, ) + + sub = consume_broker.subscriber(*args, **kwargs) + + @sub(filter=lambda m: m.content_type == "application/json") async def handler(m): mock.handler(m) consume.set() - @consume_broker.subscriber(queue, **self.subscriber_kwargs) + @sub async def handler2(m): mock.handler2(m) consume2.set() @@ -213,7 +224,9 @@ class Foo(BaseModel): def dependency() -> str: return "100" - @consume_broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @consume_broker.subscriber(*args, **kwargs) async def handler(m: Foo, dep: int = Depends(dependency), broker=Context()): mock(m, dep, broker) event.set() @@ -245,7 +258,8 @@ async def subscriber(m): async with self.patch_broker(consume_broker) as br: await br.start() - sub = br.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + sub = br.subscriber(*args, **kwargs) sub(subscriber) br.setup_subscriber(sub) await sub.start() @@ -271,7 +285,9 @@ async def test_stop_consume_exc( ): consume_broker = self.get_broker() - @consume_broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @consume_broker.subscriber(*args, **kwargs) def subscriber(m): mock() event.set() diff --git a/tests/brokers/base/fastapi.py b/tests/brokers/base/fastapi.py index 5f37956b75..5c1f09b80a 100644 --- a/tests/brokers/base/fastapi.py +++ b/tests/brokers/base/fastapi.py @@ -14,17 +14,21 @@ from faststream.broker.fastapi.router import StreamRouter from faststream.types import AnyCallable +from .basic import BaseTestcaseConfig + Broker = TypeVar("Broker", bound=BrokerUsecase) @pytest.mark.asyncio() -class FastAPITestcase: +class FastAPITestcase(BaseTestcaseConfig): router_class: Type[StreamRouter[BrokerUsecase]] async def test_base_real(self, mock: Mock, queue: str, event: asyncio.Event): router = self.router_class() - @router.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) async def hello(msg): event.set() return mock(msg) @@ -36,7 +40,7 @@ async def hello(msg): asyncio.create_task(router.broker.publish("hi", queue)), asyncio.create_task(event.wait()), ), - timeout=3, + timeout=self.timeout, ) assert event.is_set() @@ -49,7 +53,9 @@ def task(msg): event.set() return mock(msg) - @router.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) async def hello(msg, tasks: BackgroundTasks): tasks.add_task(task, msg) @@ -60,7 +66,7 @@ async def hello(msg, tasks: BackgroundTasks): asyncio.create_task(router.broker.publish("hi", queue)), asyncio.create_task(event.wait()), ), - timeout=3, + timeout=self.timeout, ) assert event.is_set() @@ -71,7 +77,9 @@ async def test_context(self, mock: Mock, queue: str, event: asyncio.Event): context_key = "message.headers" - @router.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) async def hello(msg=Context(context_key)): event.set() return mock(msg == context.resolve(context_key)) @@ -83,7 +91,7 @@ async def hello(msg=Context(context_key)): asyncio.create_task(router.broker.publish("", queue)), asyncio.create_task(event.wait()), ), - timeout=3, + timeout=self.timeout, ) assert event.is_set() @@ -92,7 +100,9 @@ async def hello(msg=Context(context_key)): async def test_initial_context(self, queue: str, event: asyncio.Event): router = self.router_class() - @router.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) async def hello(msg: int, data=Context(queue, initial=set)): data.add(msg) if len(data) == 2: @@ -106,7 +116,7 @@ async def hello(msg: int, data=Context(queue, initial=set)): asyncio.create_task(router.broker.publish(2, queue)), asyncio.create_task(event.wait()), ), - timeout=3, + timeout=self.timeout, ) assert event.is_set() @@ -117,8 +127,11 @@ async def test_double_real(self, mock: Mock, queue: str, event: asyncio.Event): event2 = asyncio.Event() router = self.router_class() - @router.subscriber(queue) - @router.subscriber(queue + "2") + args, kwargs = self.get_subscriber_params(queue) + args2, kwargs2 = self.get_subscriber_params(queue + "2") + + @router.subscriber(*args, **kwargs) + @router.subscriber(*args2, **kwargs2) async def hello(msg: str): if event.is_set(): event2.set() @@ -135,7 +148,7 @@ async def hello(msg: str): asyncio.create_task(event.wait()), asyncio.create_task(event2.wait()), ), - timeout=3, + timeout=self.timeout, ) assert event.is_set() @@ -150,12 +163,16 @@ async def test_base_publisher_real( ): router = self.router_class() - @router.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) @router.publisher(queue + "resp") async def m(): return "hi" - @router.subscriber(queue + "resp") + args2, kwargs2 = self.get_subscriber_params(queue + "resp") + + @router.subscriber(*args2, **kwargs2) async def resp(msg): event.set() mock(msg) @@ -168,7 +185,7 @@ async def resp(msg): asyncio.create_task(router.broker.publish("", queue)), asyncio.create_task(event.wait()), ), - timeout=3, + timeout=self.timeout, ) assert event.is_set() @@ -176,7 +193,7 @@ async def resp(msg): @pytest.mark.asyncio() -class FastAPILocalTestcase: +class FastAPILocalTestcase(BaseTestcaseConfig): router_class: Type[StreamRouter[BrokerUsecase]] broker_test: Callable[[Broker], Broker] build_message: AnyCallable @@ -186,7 +203,9 @@ async def test_base(self, queue: str): app = FastAPI(lifespan=router.lifespan_context) - @router.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) async def hello(): return "hi" @@ -207,7 +226,9 @@ async def test_base_without_state(self, queue: str): app = FastAPI(lifespan=router.lifespan_context) - @router.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) async def hello(): return "hi" @@ -228,7 +249,9 @@ async def test_invalid(self, queue: str): app = FastAPI(lifespan=router.lifespan_context) - @router.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) async def hello(msg: int): ... app.include_router(router) @@ -241,7 +264,9 @@ async def hello(msg: int): ... async def test_headers(self, queue: str): router = self.router_class() - @router.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) async def hello(w=Header()): return w @@ -262,7 +287,9 @@ def dep(a): mock(a) return a - @router.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) async def hello(a, w=Depends(dep)): return w @@ -285,7 +312,9 @@ def dep(a): yield a mock.close() - @router.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) async def hello(a, w=Depends(dep)): mock.start.assert_called_once() assert not mock.close.call_count @@ -309,7 +338,9 @@ def mock_dep(): router = self.router_class(dependencies=(Depends(mock_dep, use_cache=False),)) - @router.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) async def hello(a): return a @@ -325,7 +356,12 @@ def mock_dep(): router = self.router_class() - @router.subscriber(queue, dependencies=(Depends(mock_dep, use_cache=False),)) + args, kwargs = self.get_subscriber_params( + queue, + dependencies=(Depends(mock_dep, use_cache=False),), + ) + + @router.subscriber(*args, **kwargs) async def hello(a): return a @@ -349,12 +385,10 @@ async def test_hooks(self, mock: Mock): @router.after_startup def test_sync(app): mock.sync_called() - return {"sync_called": mock.async_called.called is False} @router.after_startup async def test_async(app): mock.async_called() - return {"async_called": mock.sync_called.called} @router.on_broker_shutdown def test_shutdown_sync(app): @@ -364,11 +398,8 @@ def test_shutdown_sync(app): async def test_shutdown_async(app): mock.async_shutdown_called() - async with self.broker_test(router.broker), router.lifespan_context( - app - ) as context: - assert context["sync_called"] - assert context["async_called"] + async with self.broker_test(router.broker), router.lifespan_context(app): + pass mock.sync_called.assert_called_once() mock.async_called.assert_called_once() @@ -398,7 +429,9 @@ async def lifespan(app): async def test_subscriber_mock(self, queue: str): router = self.router_class() - @router.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) async def m(): return "hi" @@ -411,8 +444,10 @@ async def test_publisher_mock(self, queue: str): publisher = router.publisher(queue + "resp") + args, kwargs = self.get_subscriber_params(queue) + @publisher - @router.subscriber(queue) + @router.subscriber(*args, **kwargs) async def m(): return "response" @@ -426,11 +461,15 @@ async def test_include(self, queue: str): app = FastAPI(lifespan=router.lifespan_context) - @router.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) async def hello(): return "hi" - @router2.subscriber(queue + "1") + args2, kwargs2 = self.get_subscriber_params(queue + "1") + + @router2.subscriber(*args2, **kwargs2) async def hello_router2(): return "hi" @@ -467,7 +506,9 @@ def dep1(): app = FastAPI(lifespan=router.lifespan_context) app.dependency_overrides[dep1] = lambda: mock() - @router2.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @router2.subscriber(*args, **kwargs) async def hello_router2(dep=Depends(dep1)): return "hi" diff --git a/tests/brokers/base/middlewares.py b/tests/brokers/base/middlewares.py index 7ed74522d8..e9446a2b78 100644 --- a/tests/brokers/base/middlewares.py +++ b/tests/brokers/base/middlewares.py @@ -1,5 +1,5 @@ import asyncio -from typing import Any, ClassVar, Dict, Type +from typing import Type from unittest.mock import Mock import pytest @@ -7,12 +7,12 @@ from faststream.broker.core.usecase import BrokerUsecase from faststream.broker.middlewares import BaseMiddleware +from .basic import BaseTestcaseConfig + @pytest.mark.asyncio() -class LocalMiddlewareTestcase: +class LocalMiddlewareTestcase(BaseTestcaseConfig): broker_class: Type[BrokerUsecase] - timeout: int = 3 - subscriber_kwargs: ClassVar[Dict[str, Any]] = {} @pytest.fixture() def raw_broker(self): @@ -39,7 +39,9 @@ async def mid(call_next, msg): broker = self.broker_class() - @broker.subscriber(queue, middlewares=(mid,), **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue, middlewares=(mid,)) + + @broker.subscriber(*args, **kwargs) async def handler(m): mock.inner(m) return "end" @@ -79,7 +81,9 @@ async def mid(call_next, msg, **kwargs): broker = self.broker_class() - @broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @broker.subscriber(*args, **kwargs) @broker.publisher(queue + "1", middlewares=(mid,)) @broker.publisher(queue + "2", middlewares=(mid,)) async def handler(m): @@ -117,8 +121,14 @@ async def mid(call_next, msg): broker = self.broker_class() - @broker.subscriber(queue, **self.subscriber_kwargs) - @broker.subscriber(queue + "1", middlewares=(mid,), **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + args2, kwargs2 = self.get_subscriber_params( + queue + "1", + middlewares=(mid,), + ) + + @broker.subscriber(*args, **kwargs) + @broker.subscriber(*args2, **kwargs2) async def handler(m): if event1.is_set(): event2.set() @@ -161,17 +171,19 @@ async def mid(call_next, msg): broker = self.broker_class() - @broker.subscriber( + args, kwargs = self.get_subscriber_params( queue, - filter=lambda m: m.content_type == "application/json", - **self.subscriber_kwargs, ) + + sub = broker.subscriber(*args, **kwargs) + + @sub(filter=lambda m: m.content_type == "application/json") async def handler(m): event2.set() mock() return "" - @broker.subscriber(queue, middlewares=(mid,), **self.subscriber_kwargs) + @sub(middlewares=(mid,)) async def handler2(m): event1.set() mock() @@ -209,7 +221,9 @@ async def mid(call_next, msg): broker = self.broker_class() - @broker.subscriber(queue, middlewares=(mid,), **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue, middlewares=(mid,)) + + @broker.subscriber(*args, **kwargs) async def handler2(m): event.set() raise ValueError() @@ -249,7 +263,9 @@ async def after_processed(self, exc_type, exc_val, exc_tb): middlewares=(mid,), ) - @broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @broker.subscriber(*args, **kwargs) async def handler(m): event.set() return "" @@ -289,7 +305,9 @@ async def after_processed(self, exc_type, exc_val, exc_tb): broker = self.broker_class() # already registered subscriber - @broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @broker.subscriber(*args, **kwargs) async def handler(m): event.set() return "" @@ -300,7 +318,9 @@ async def handler(m): event2 = asyncio.Event() # new subscriber - @broker.subscriber(f"{queue}1", **self.subscriber_kwargs) + args2, kwargs2 = self.get_subscriber_params(queue + "1") + + @broker.subscriber(*args2, **kwargs2) async def handler2(m): event2.set() return "" @@ -330,11 +350,15 @@ async def on_publish(self, msg: str, *args, **kwargs) -> str: broker = self.broker_class(middlewares=(Mid,)) - @broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @broker.subscriber(*args, **kwargs) async def handler(m): return m - @broker.subscriber(queue + "r", **self.subscriber_kwargs) + args2, kwargs2 = self.get_subscriber_params(queue + "r") + + @broker.subscriber(*args2, **kwargs2) async def handler_resp(m): mock(m) event.set() @@ -378,7 +402,9 @@ async def after_publish(self, *args, **kwargs): broker = self.broker_class(middlewares=(Mid,)) - @broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @broker.subscriber(*args, **kwargs) @broker.publisher(queue + "1") @broker.publisher(queue + "2") async def handler(m): diff --git a/tests/brokers/base/parser.py b/tests/brokers/base/parser.py index 60b5bb93ad..0d8fff408a 100644 --- a/tests/brokers/base/parser.py +++ b/tests/brokers/base/parser.py @@ -1,17 +1,17 @@ import asyncio -from typing import Any, ClassVar, Dict, Type +from typing import Type from unittest.mock import Mock import pytest from faststream.broker.core.usecase import BrokerUsecase +from .basic import BaseTestcaseConfig + @pytest.mark.asyncio() -class LocalCustomParserTestcase: +class LocalCustomParserTestcase(BaseTestcaseConfig): broker_class: Type[BrokerUsecase] - timeout: int = 3 - subscriber_kwargs: ClassVar[Dict[str, Any]] = {} @pytest.fixture() def raw_broker(self): @@ -36,7 +36,9 @@ async def custom_parser(msg, original): mock(msg.body) return msg - @broker.subscriber(queue, parser=custom_parser, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue, parser=custom_parser) + + @broker.subscriber(*args, **kwargs) async def handle(m): event.set() @@ -68,7 +70,9 @@ def custom_decoder(msg): mock(msg.body) return msg - @broker.subscriber(queue, decoder=custom_decoder, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue, decoder=custom_decoder) + + @broker.subscriber(*args, **kwargs) async def handle(m): event.set() @@ -100,7 +104,9 @@ def custom_decoder(msg): broker = self.broker_class(decoder=custom_decoder) - @broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @broker.subscriber(*args, **kwargs) async def handle(m): event.set() @@ -134,8 +140,11 @@ async def custom_parser(msg, original): mock(msg.body) return msg - @broker.subscriber(queue, parser=custom_parser, **self.subscriber_kwargs) - @broker.subscriber(queue + "1", **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue, parser=custom_parser) + args2, kwargs2 = self.get_subscriber_params(queue + "1") + + @broker.subscriber(*args, **kwargs) + @broker.subscriber(*args2, **kwargs2) async def handle(m): if event.is_set(): event2.set() @@ -168,6 +177,15 @@ async def test_local_parser_no_share_between_handlers( event: asyncio.Event, ): broker = self.broker_class() + + args, kwargs = self.get_subscriber_params( + queue, filter=lambda m: m.content_type == "application/json" + ) + + @broker.subscriber(*args, **kwargs) + async def handle(m): + event.set() + event2 = asyncio.Event() async def custom_parser(msg, original): @@ -175,15 +193,9 @@ async def custom_parser(msg, original): mock(msg.body) return msg - @broker.subscriber( - queue, - filter=lambda m: m.content_type == "application/json", - **self.subscriber_kwargs, - ) - async def handle(m): - event.set() + args2, kwargs2 = self.get_subscriber_params(queue, parser=custom_parser) - @broker.subscriber(queue, parser=custom_parser, **self.subscriber_kwargs) + @broker.subscriber(*args2, **kwargs2) async def handle2(m): event2.set() @@ -221,7 +233,9 @@ async def custom_parser(msg, original): broker = self.broker_class(parser=custom_parser) - @broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @broker.subscriber(*args, **kwargs) async def handle(m): event.set() diff --git a/tests/brokers/base/publish.py b/tests/brokers/base/publish.py index abf5f0022c..e1ffbb2e2a 100644 --- a/tests/brokers/base/publish.py +++ b/tests/brokers/base/publish.py @@ -2,7 +2,7 @@ from abc import abstractmethod from dataclasses import asdict, dataclass from datetime import datetime -from typing import Any, ClassVar, Dict, List, Tuple +from typing import Any, Dict, List, Tuple from unittest.mock import Mock import anyio @@ -13,6 +13,8 @@ from faststream._compat import dump_json, model_to_json from faststream.broker.core.usecase import BrokerUsecase +from .basic import BaseTestcaseConfig + class SimpleModel(BaseModel): r: str @@ -26,10 +28,7 @@ class SimpleDataclass: now = datetime.now() -class BrokerPublishTestcase: - timeout: int = 3 - subscriber_kwargs: ClassVar[Dict[str, Any]] = {} - +class BrokerPublishTestcase(BaseTestcaseConfig): @abstractmethod def get_broker(self, apply_types: bool = False) -> BrokerUsecase[Any, Any]: raise NotImplementedError @@ -156,7 +155,9 @@ async def test_serialize( ): pub_broker = self.get_broker(apply_types=True) - @pub_broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @pub_broker.subscriber(*args, **kwargs) async def handler(m: message_type): event.set() mock(m) @@ -184,12 +185,16 @@ async def test_response( ): pub_broker = self.get_broker(apply_types=True) - @pub_broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @pub_broker.subscriber(*args, **kwargs) @pub_broker.publisher(queue + "1") async def m(): return Response(1, headers={"custom": "1"}, correlation_id="1") - @pub_broker.subscriber(queue + "1", **self.subscriber_kwargs) + args2, kwargs2 = self.get_subscriber_params(queue + "1") + + @pub_broker.subscriber(*args2, **kwargs2) async def m_next(msg=Context("message")): event.set() mock( @@ -224,7 +229,9 @@ async def test_unwrap_dict( ): pub_broker = self.get_broker(apply_types=True) - @pub_broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @pub_broker.subscriber(*args, **kwargs) async def m(a: int, b: int): event.set() mock({"a": a, "b": b}) @@ -256,7 +263,9 @@ async def test_unwrap_list( ): pub_broker = self.get_broker(apply_types=True) - @pub_broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @pub_broker.subscriber(*args, **kwargs) async def m(a: int, b: int, *args: Tuple[int, ...]): event.set() mock({"a": a, "b": b, "args": args}) @@ -283,12 +292,16 @@ async def test_base_publisher( ): pub_broker = self.get_broker(apply_types=True) - @pub_broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @pub_broker.subscriber(*args, **kwargs) @pub_broker.publisher(queue + "resp") async def m(): return "" - @pub_broker.subscriber(queue + "resp", **self.subscriber_kwargs) + args2, kwargs2 = self.get_subscriber_params(queue + "resp") + + @pub_broker.subscriber(*args2, **kwargs2) async def resp(msg): event.set() mock(msg) @@ -317,12 +330,16 @@ async def test_publisher_object( publisher = pub_broker.publisher(queue + "resp") + args, kwargs = self.get_subscriber_params(queue) + @publisher - @pub_broker.subscriber(queue, **self.subscriber_kwargs) + @pub_broker.subscriber(*args, **kwargs) async def m(): return "" - @pub_broker.subscriber(queue + "resp", **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue + "resp") + + @pub_broker.subscriber(*args, **kwargs) async def resp(msg): event.set() mock(msg) @@ -351,11 +368,15 @@ async def test_publish_manual( publisher = pub_broker.publisher(queue + "resp") - @pub_broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @pub_broker.subscriber(*args, **kwargs) async def m(): await publisher.publish("") - @pub_broker.subscriber(queue + "resp", **self.subscriber_kwargs) + args2, kwargs2 = self.get_subscriber_params(queue + "resp") + + @pub_broker.subscriber(*args2, **kwargs2) async def resp(msg): event.set() mock(msg) @@ -384,18 +405,24 @@ async def test_multiple_publishers( event = anyio.Event() event2 = anyio.Event() + args, kwargs = self.get_subscriber_params(queue) + @pub_broker.publisher(queue + "resp2") - @pub_broker.subscriber(queue, **self.subscriber_kwargs) + @pub_broker.subscriber(*args, **kwargs) @pub_broker.publisher(queue + "resp") async def m(): return "" - @pub_broker.subscriber(queue + "resp", **self.subscriber_kwargs) + args2, kwargs2 = self.get_subscriber_params(queue + "resp") + + @pub_broker.subscriber(*args2, **kwargs2) async def resp(msg): event.set() mock.resp1(msg) - @pub_broker.subscriber(queue + "resp2", **self.subscriber_kwargs) + args3, kwargs3 = self.get_subscriber_params(queue + "resp2") + + @pub_broker.subscriber(*args3, **kwargs3) async def resp2(msg): event2.set() mock.resp2(msg) @@ -429,17 +456,23 @@ async def test_reusable_publishers( pub = pub_broker.publisher(queue + "resp") + args, kwargs = self.get_subscriber_params(queue) + @pub - @pub_broker.subscriber(queue, **self.subscriber_kwargs) + @pub_broker.subscriber(*args, **kwargs) async def m(): return "" + args2, kwargs2 = self.get_subscriber_params(queue + "2") + @pub - @pub_broker.subscriber(queue + "2", **self.subscriber_kwargs) + @pub_broker.subscriber(*args2, **kwargs2) async def m2(): return "" - @pub_broker.subscriber(queue + "resp", **self.subscriber_kwargs) + args3, kwargs3 = self.get_subscriber_params(queue + "resp") + + @pub_broker.subscriber(*args3, **kwargs3) async def resp(): if not consume.is_set(): consume.set() @@ -472,12 +505,16 @@ async def test_reply_to( ): pub_broker = self.get_broker(apply_types=True) - @pub_broker.subscriber(queue + "reply", **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue + "reply") + + @pub_broker.subscriber(*args, **kwargs) async def reply_handler(m): event.set() mock(m) - @pub_broker.subscriber(queue, **self.subscriber_kwargs) + args2, kwargs2 = self.get_subscriber_params(queue) + + @pub_broker.subscriber(*args2, **kwargs2) async def handler(m): return m @@ -513,11 +550,15 @@ async def after_processed(self, *args: Any, **kwargs: Any): pub_broker = self.get_broker(apply_types=True) pub_broker.add_middleware(Mid) - @pub_broker.subscriber(queue + "reply", **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue + "reply") + + @pub_broker.subscriber(*args, **kwargs) async def reply_handler(m): mock(m) - @pub_broker.subscriber(queue, no_reply=True, **self.subscriber_kwargs) + args2, kwargs2 = self.get_subscriber_params(queue, no_reply=True) + + @pub_broker.subscriber(*args2, **kwargs2) async def handler(m): return m @@ -546,7 +587,9 @@ async def test_publisher_after_start( ): pub_broker = self.get_broker(apply_types=True) - @pub_broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @pub_broker.subscriber(*args, **kwargs) async def handler(m): event.set() mock(m) diff --git a/tests/brokers/base/router.py b/tests/brokers/base/router.py index 1361f4c9b5..89282c9d74 100644 --- a/tests/brokers/base/router.py +++ b/tests/brokers/base/router.py @@ -1,5 +1,5 @@ import asyncio -from typing import Any, ClassVar, Dict, Type +from typing import Type from unittest.mock import Mock import pytest @@ -19,8 +19,6 @@ class RouterTestcase( ): build_message: AnyCallable route_class: Type[SubscriberRoute] - timeout: int = 3 - subscriber_kwargs: ClassVar[Dict[str, Any]] = {} publisher_class: Type[ArgsContainer] def patch_broker(self, br: BrokerUsecase, router: BrokerRouter) -> BrokerUsecase: @@ -42,7 +40,9 @@ async def test_empty_prefix( queue: str, event: asyncio.Event, ): - @router.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) def subscriber(m): event.set() @@ -70,7 +70,9 @@ async def test_not_empty_prefix( ): router.prefix = "test_" - @router.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) def subscriber(m): event.set() @@ -96,7 +98,9 @@ async def test_include_with_prefix( queue: str, event: asyncio.Event, ): - @router.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) def subscriber(m): event.set() @@ -122,12 +126,16 @@ async def test_empty_prefix_publisher( queue: str, event: asyncio.Event, ): - @router.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) @router.publisher(queue + "resp") def subscriber(m): return "hi" - @router.subscriber(queue + "resp", **self.subscriber_kwargs) + args2, kwargs2 = self.get_subscriber_params(queue + "resp") + + @router.subscriber(*args2, **kwargs2) def response(m): event.set() @@ -155,12 +163,16 @@ async def test_not_empty_prefix_publisher( ): router.prefix = "test_" - @router.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) @router.publisher(queue + "resp") def subscriber(m): return "hi" - @router.subscriber(queue + "resp", **self.subscriber_kwargs) + args2, kwargs2 = self.get_subscriber_params(queue + "resp") + + @router.subscriber(*args2, **kwargs2) def response(m): event.set() @@ -190,11 +202,15 @@ async def test_manual_publisher( p = router.publisher(queue + "resp") - @router.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) async def subscriber(m): await p.publish("resp") - @router.subscriber(queue + "resp", **self.subscriber_kwargs) + args2, kwargs2 = self.get_subscriber_params(queue + "resp") + + @router.subscriber(*args2, **kwargs2) def response(m): event.set() @@ -223,9 +239,11 @@ async def test_delayed_handlers( def response(m): event.set() + args, kwargs = self.get_subscriber_params(queue) + r = type(router)( prefix="test_", - handlers=(self.route_class(response, queue, **self.subscriber_kwargs),), + handlers=(self.route_class(response, *args, **kwargs),), ) pub_broker.include_router(r) @@ -254,21 +272,25 @@ async def test_delayed_publishers( def response(m): return m + args, kwargs = self.get_subscriber_params(queue) + r = type(router)( prefix="test_", handlers=( self.route_class( response, - queue, + *args, + **kwargs, publishers=(self.publisher_class(queue + "1"),), - **self.subscriber_kwargs, ), ), ) pub_broker.include_router(r) - @pub_broker.subscriber(f"test_{queue}1", **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(f"test_{queue}1") + + @pub_broker.subscriber(*args, **kwargs) async def handler(msg): mock(msg) event.set() @@ -299,7 +321,9 @@ async def test_nested_routers_sub( core_router = type(router)(prefix="test1_") router.prefix = "test2_" - @router.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) def subscriber(m): event.set() mock(m) @@ -334,14 +358,18 @@ async def test_nested_routers_pub( core_router = type(router)(prefix="test1_") router.prefix = "test2_" - @router.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) @router.publisher(queue + "resp") def subscriber(m): return "hi" - @pub_broker.subscriber( - "test1_" + "test2_" + queue + "resp", **self.subscriber_kwargs + args2, kwargs2 = self.get_subscriber_params( + "test1_" + "test2_" + queue + "resp" ) + + @pub_broker.subscriber(*args2, **kwargs2) def response(m): event.set() @@ -372,9 +400,11 @@ async def test_router_dependencies( router = type(router)(dependencies=(Depends(lambda: 1),)) router2 = type(router)(dependencies=(Depends(lambda: 2),)) - @router2.subscriber( - queue, dependencies=(Depends(lambda: 3),), **self.subscriber_kwargs + args, kwargs = self.get_subscriber_params( + queue, dependencies=(Depends(lambda: 3),) ) + + @router2.subscriber(*args, **kwargs) def subscriber(): ... router.include_router(router2) @@ -391,11 +421,12 @@ async def test_router_include_with_dependencies( ): router2 = type(router)() - @router2.subscriber( + args, kwargs = self.get_subscriber_params( queue, dependencies=(Depends(lambda: 3),), - **self.subscriber_kwargs, ) + + @router2.subscriber(*args, **kwargs) def subscriber(): ... router.include_router(router2, dependencies=(Depends(lambda: 2),)) @@ -414,7 +445,9 @@ async def test_router_middlewares( router = type(router)(middlewares=(BaseMiddleware,)) router2 = type(router)(middlewares=(BaseMiddleware,)) - @router2.subscriber(queue, middlewares=(3,), **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue, middlewares=(3,)) + + @router2.subscriber(*args, **kwargs) @router2.publisher(queue, middlewares=(3,)) def subscriber(): ... @@ -435,7 +468,9 @@ async def test_router_include_with_middlewares( ): router2 = type(router)() - @router2.subscriber(queue, middlewares=(3,), **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue, middlewares=(3,)) + + @router2.subscriber(*args, **kwargs) @router2.publisher(queue, middlewares=(3,)) def subscriber(): ... @@ -470,7 +505,9 @@ async def decoder(msg, original): decoder=decoder, ) - @router.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) def subscriber(s): event.set() @@ -520,9 +557,9 @@ async def decoder(msg, original): decoder=global_decoder, ) - @router.subscriber( - queue, parser=parser, decoder=decoder, **self.subscriber_kwargs - ) + args, kwargs = self.get_subscriber_params(queue, parser=parser, decoder=decoder) + + @router.subscriber(*args, **kwargs) def subscriber(s): event.set() @@ -560,7 +597,9 @@ async def test_publisher_mock( ): pub = router.publisher(queue + "resp") - @router.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) @pub def subscriber(m): event.set() @@ -589,7 +628,9 @@ async def test_subscriber_mock( queue: str, event: asyncio.Event, ): - @router.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @router.subscriber(*args, **kwargs) def subscriber(m): event.set() return "hi" @@ -615,7 +656,9 @@ async def test_manual_publisher_mock( ): publisher = router.publisher(queue + "resp") - @pub_broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @pub_broker.subscriber(*args, **kwargs) async def m(m): await publisher.publish("response") diff --git a/tests/brokers/base/rpc.py b/tests/brokers/base/rpc.py index e544360bc5..e0f0a0cffa 100644 --- a/tests/brokers/base/rpc.py +++ b/tests/brokers/base/rpc.py @@ -9,8 +9,10 @@ from faststream.broker.core.usecase import BrokerUsecase from faststream.utils.functions import timeout_scope +from .basic import BaseTestcaseConfig -class BrokerRPCTestcase: + +class BrokerRPCTestcase(BaseTestcaseConfig): @abstractstaticmethod def get_broker(self, apply_types: bool = False) -> BrokerUsecase[Any, Any]: raise NotImplementedError @@ -22,8 +24,10 @@ def patch_broker(self, broker: BrokerUsecase[Any, Any]) -> BrokerUsecase[Any, An async def test_rpc(self, queue: str): rpc_broker = self.get_broker() - @rpc_broker.subscriber(queue) - async def m(m): # pragma: no cover + args, kwargs = self.get_subscriber_params(queue) + + @rpc_broker.subscriber(*args, **kwargs) + async def m(m): return "1" async with self.patch_broker(rpc_broker) as br: @@ -36,7 +40,9 @@ async def m(m): # pragma: no cover async def test_rpc_timeout_raises(self, queue: str): rpc_broker = self.get_broker() - @rpc_broker.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @rpc_broker.subscriber(*args, **kwargs) async def m(m): # pragma: no cover await anyio.sleep(1) @@ -56,7 +62,9 @@ async def m(m): # pragma: no cover async def test_rpc_timeout_none(self, queue: str): rpc_broker = self.get_broker() - @rpc_broker.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @rpc_broker.subscriber(*args, **kwargs) async def m(m): # pragma: no cover await anyio.sleep(1) @@ -83,13 +91,17 @@ async def test_rpc_with_reply( reply_queue = queue + "1" - @rpc_broker.subscriber(reply_queue) + args, kwargs = self.get_subscriber_params(reply_queue) + + @rpc_broker.subscriber(*args, **kwargs) async def response_hanler(m: str): mock(m) event.set() - @rpc_broker.subscriber(queue) - async def m(m): # pragma: no cover + args2, kwargs2 = self.get_subscriber_params(queue) + + @rpc_broker.subscriber(*args2, **kwargs2) + async def m(m): return "1" async with self.patch_broker(rpc_broker) as br: diff --git a/tests/brokers/base/testclient.py b/tests/brokers/base/testclient.py index b0335d0741..3d96efbdd9 100644 --- a/tests/brokers/base/testclient.py +++ b/tests/brokers/base/testclient.py @@ -28,7 +28,9 @@ def get_fake_producer_class(self) -> type: async def test_subscriber_mock(self, queue: str): test_broker = self.get_broker() - @test_broker.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @test_broker.subscriber(*args, **kwargs) async def m(msg): pass @@ -43,8 +45,10 @@ async def test_publisher_mock(self, queue: str): publisher = test_broker.publisher(queue + "resp") + args, kwargs = self.get_subscriber_params(queue) + @publisher - @test_broker.subscriber(queue) + @test_broker.subscriber(*args, **kwargs) async def m(msg): return "response" @@ -59,7 +63,9 @@ async def test_manual_publisher_mock(self, queue: str): publisher = test_broker.publisher(queue + "resp") - @test_broker.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @test_broker.subscriber(*args, **kwargs) async def m(msg): await publisher.publish("response") @@ -72,7 +78,9 @@ async def m(msg): async def test_exception_raises(self, queue: str): test_broker = self.get_broker() - @test_broker.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @test_broker.subscriber(*args, **kwargs) async def m(msg): # pragma: no cover raise ValueError() @@ -110,14 +118,16 @@ async def test_broker_with_real_doesnt_get_patched(self): assert br._connection is not None assert br._producer is not None - async def test_broker_with_real_patches_subscribers_and_subscribers( + async def test_broker_with_real_patches_publishers_and_subscribers( self, queue: str ): test_broker = self.get_broker() publisher = test_broker.publisher(f"{queue}1") - @test_broker.subscriber(queue) + args, kwargs = self.get_subscriber_params(queue) + + @test_broker.subscriber(*args, **kwargs) async def m(msg): await publisher.publish(f"response: {msg}") @@ -129,6 +139,7 @@ async def m(msg): await m.wait_call(self.timeout) m.mock.assert_called_once_with("hello") + with anyio.fail_after(self.timeout): while not publisher.mock.called: await asyncio.sleep(0.1) diff --git a/tests/brokers/confluent/basic.py b/tests/brokers/confluent/basic.py new file mode 100644 index 0000000000..6fffc1c976 --- /dev/null +++ b/tests/brokers/confluent/basic.py @@ -0,0 +1,27 @@ +from typing import Any, Dict, Tuple + +from faststream.confluent import TopicPartition +from tests.brokers.base.basic import BaseTestcaseConfig as _Base + + +class ConfluentTestcaseConfig(_Base): + timeout: float = 10.0 + + def get_subscriber_params( + self, *topics: Any, **kwargs: Any + ) -> Tuple[ + Tuple[Any, ...], + Dict[str, Any], + ]: + if len(topics) == 1: + partitions = [TopicPartition(topics[0], partition=0, offset=0)] + topics = () + + else: + partitions = [] + + return topics, { + "auto_offset_reset": "earliest", + "partitions": partitions, + **kwargs, + } diff --git a/tests/brokers/confluent/test_consume.py b/tests/brokers/confluent/test_consume.py index 805b3a97f2..44debfff6a 100644 --- a/tests/brokers/confluent/test_consume.py +++ b/tests/brokers/confluent/test_consume.py @@ -1,5 +1,4 @@ import asyncio -from typing import Any, ClassVar, Dict from unittest.mock import patch import pytest @@ -11,14 +10,13 @@ from tests.brokers.base.consume import BrokerRealConsumeTestcase from tests.tools import spy_decorator +from .basic import ConfluentTestcaseConfig + @pytest.mark.confluent() -class TestConsume(BrokerRealConsumeTestcase): +class TestConsume(ConfluentTestcaseConfig, BrokerRealConsumeTestcase): """A class to represent a test Kafka broker.""" - timeout: int = 10 - subscriber_kwargs: ClassVar[Dict[str, Any]] = {"auto_offset_reset": "earliest"} - def get_broker(self, apply_types: bool = False): return KafkaBroker(apply_types=apply_types) @@ -28,7 +26,9 @@ async def test_consume_batch(self, queue: str): msgs_queue = asyncio.Queue(maxsize=1) - @consume_broker.subscriber(queue, batch=True, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue, batch=True) + + @consume_broker.subscriber(*args, **kwargs) async def handler(msg): await msgs_queue.put(msg) @@ -53,7 +53,9 @@ async def test_consume_batch_headers( ): consume_broker = self.get_broker(apply_types=True) - @consume_broker.subscriber(queue, batch=True, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue, batch=True) + + @consume_broker.subscriber(*args, **kwargs) def subscriber(m, msg: KafkaMessage): check = all( ( @@ -88,9 +90,11 @@ async def test_consume_ack( ): consume_broker = self.get_broker(apply_types=True) - @consume_broker.subscriber( - queue, group_id="test", auto_commit=False, **self.subscriber_kwargs + args, kwargs = self.get_subscriber_params( + queue, group_id="test", auto_commit=False ) + + @consume_broker.subscriber(*args, **kwargs) async def handler(msg: KafkaMessage): event.set() @@ -127,9 +131,11 @@ async def test_consume_ack_manual( ): consume_broker = self.get_broker(apply_types=True) - @consume_broker.subscriber( - queue, group_id="test", auto_commit=False, **self.subscriber_kwargs + args, kwargs = self.get_subscriber_params( + queue, group_id="test", auto_commit=False ) + + @consume_broker.subscriber(*args, **kwargs) async def handler(msg: KafkaMessage): await msg.ack() event.set() @@ -162,9 +168,11 @@ async def test_consume_ack_raise( ): consume_broker = self.get_broker(apply_types=True) - @consume_broker.subscriber( - queue, group_id="test", auto_commit=False, **self.subscriber_kwargs + args, kwargs = self.get_subscriber_params( + queue, group_id="test", auto_commit=False ) + + @consume_broker.subscriber(*args, **kwargs) async def handler(msg: KafkaMessage): event.set() raise AckMessage() @@ -197,9 +205,11 @@ async def test_nack( ): consume_broker = self.get_broker(apply_types=True) - @consume_broker.subscriber( - queue, group_id="test", auto_commit=False, **self.subscriber_kwargs + args, kwargs = self.get_subscriber_params( + queue, group_id="test", auto_commit=False ) + + @consume_broker.subscriber(*args, **kwargs) async def handler(msg: KafkaMessage): await msg.nack() event.set() @@ -232,9 +242,9 @@ async def test_consume_no_ack( ): consume_broker = self.get_broker(apply_types=True) - @consume_broker.subscriber( - queue, group_id="test", no_ack=True, **self.subscriber_kwargs - ) + args, kwargs = self.get_subscriber_params(queue, group_id="test", no_ack=True) + + @consume_broker.subscriber(*args, **kwargs) async def handler(msg: KafkaMessage): event.set() @@ -271,9 +281,11 @@ async def test_consume_with_no_auto_commit( ): consume_broker = self.get_broker(apply_types=True) - @consume_broker.subscriber( - queue, auto_commit=False, group_id="test", **self.subscriber_kwargs + args, kwargs = self.get_subscriber_params( + queue, auto_commit=False, group_id="test" ) + + @consume_broker.subscriber(*args, **kwargs) async def subscriber_no_auto_commit(msg: KafkaMessage): await msg.nack() event.set() @@ -281,9 +293,11 @@ async def subscriber_no_auto_commit(msg: KafkaMessage): broker2 = self.get_broker() event2 = asyncio.Event() - @broker2.subscriber( - queue, auto_commit=True, group_id="test", **self.subscriber_kwargs + args, kwargs = self.get_subscriber_params( + queue, auto_commit=True, group_id="test" ) + + @broker2.subscriber(*args, **kwargs) async def subscriber_with_auto_commit(m): event2.set() diff --git a/tests/brokers/confluent/test_fastapi.py b/tests/brokers/confluent/test_fastapi.py index 5d2c297edc..dbfe5b4ba2 100644 --- a/tests/brokers/confluent/test_fastapi.py +++ b/tests/brokers/confluent/test_fastapi.py @@ -4,13 +4,15 @@ import pytest -from faststream.kafka.fastapi import KafkaRouter -from faststream.kafka.testing import TestKafkaBroker, build_message +from faststream.confluent.fastapi import KafkaRouter +from faststream.confluent.testing import TestKafkaBroker, build_message from tests.brokers.base.fastapi import FastAPILocalTestcase, FastAPITestcase +from .basic import ConfluentTestcaseConfig + @pytest.mark.confluent() -class TestRabbitRouter(FastAPITestcase): +class TestConfluentRouter(ConfluentTestcaseConfig, FastAPITestcase): router_class = KafkaRouter async def test_batch_real( @@ -21,7 +23,9 @@ async def test_batch_real( ): router = KafkaRouter() - @router.subscriber(queue, batch=True, auto_offset_reset="earliest") + args, kwargs = self.get_subscriber_params(queue, batch=True) + + @router.subscriber(*args, **kwargs) async def hello(msg: List[str]): event.set() return mock(msg) @@ -33,14 +37,14 @@ async def hello(msg: List[str]): asyncio.create_task(router.broker.publish("hi", queue)), asyncio.create_task(event.wait()), ), - timeout=10, + timeout=self.timeout, ) assert event.is_set() mock.assert_called_with(["hi"]) -class TestRouterLocal(FastAPILocalTestcase): +class TestRouterLocal(ConfluentTestcaseConfig, FastAPILocalTestcase): router_class = KafkaRouter broker_test = staticmethod(TestKafkaBroker) build_message = staticmethod(build_message) @@ -53,7 +57,9 @@ async def test_batch_testclient( ): router = KafkaRouter() - @router.subscriber(queue, batch=True, auto_offset_reset="earliest") + args, kwargs = self.get_subscriber_params(queue, batch=True) + + @router.subscriber(*args, **kwargs) async def hello(msg: List[str]): event.set() return mock(msg) @@ -64,7 +70,7 @@ async def hello(msg: List[str]): asyncio.create_task(router.broker.publish("hi", queue)), asyncio.create_task(event.wait()), ), - timeout=10, + timeout=self.timeout, ) assert event.is_set() diff --git a/tests/brokers/confluent/test_logger.py b/tests/brokers/confluent/test_logger.py index ab72676fc5..0904330de3 100644 --- a/tests/brokers/confluent/test_logger.py +++ b/tests/brokers/confluent/test_logger.py @@ -1,20 +1,19 @@ import asyncio import logging -from typing import Any, ClassVar, Dict +from typing import Any import pytest from faststream.broker.core.usecase import BrokerUsecase from faststream.confluent import KafkaBroker +from .basic import ConfluentTestcaseConfig + @pytest.mark.confluent() -class TestLogger: +class TestLogger(ConfluentTestcaseConfig): """A class to represent a test Kafka broker.""" - timeout: int = 10 - subscriber_kwargs: ClassVar[Dict[str, Any]] = {"auto_offset_reset": "earliest"} - def get_broker(self, apply_types: bool = False): return KafkaBroker(apply_types=apply_types) @@ -30,7 +29,9 @@ async def test_custom_logger( test_logger = logging.getLogger("test_logger") consume_broker = KafkaBroker(logger=test_logger) - @consume_broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @consume_broker.subscriber(*args, **kwargs) def subscriber(m): event.set() @@ -49,7 +50,7 @@ def subscriber(m): asyncio.create_task(br.publish("hello", queue)), asyncio.create_task(event.wait()), ), - timeout=self.timeout, + timeout=10, ) assert event.is_set() diff --git a/tests/brokers/confluent/test_middlewares.py b/tests/brokers/confluent/test_middlewares.py index 25f9eddcc5..4e454d034c 100644 --- a/tests/brokers/confluent/test_middlewares.py +++ b/tests/brokers/confluent/test_middlewares.py @@ -1,13 +1,11 @@ -from typing import Any, ClassVar, Dict - import pytest from faststream.confluent import KafkaBroker from tests.brokers.base.middlewares import MiddlewareTestcase +from .basic import ConfluentTestcaseConfig + @pytest.mark.confluent() -class TestMiddlewares(MiddlewareTestcase): +class TestMiddlewares(ConfluentTestcaseConfig, MiddlewareTestcase): broker_class = KafkaBroker - timeout: int = 10 - subscriber_kwargs: ClassVar[Dict[str, Any]] = {"auto_offset_reset": "earliest"} diff --git a/tests/brokers/confluent/test_parser.py b/tests/brokers/confluent/test_parser.py index 374f5f9f44..6ffff38e73 100644 --- a/tests/brokers/confluent/test_parser.py +++ b/tests/brokers/confluent/test_parser.py @@ -1,13 +1,11 @@ -from typing import Any, ClassVar, Dict - import pytest from faststream.confluent import KafkaBroker from tests.brokers.base.parser import CustomParserTestcase +from .basic import ConfluentTestcaseConfig + @pytest.mark.confluent() -class TestCustomParser(CustomParserTestcase): +class TestCustomParser(ConfluentTestcaseConfig, CustomParserTestcase): broker_class = KafkaBroker - timeout: int = 10 - subscriber_kwargs: ClassVar[Dict[str, Any]] = {"auto_offset_reset": "earliest"} diff --git a/tests/brokers/confluent/test_publish.py b/tests/brokers/confluent/test_publish.py index 71a48b94d2..c05bcaf7dc 100644 --- a/tests/brokers/confluent/test_publish.py +++ b/tests/brokers/confluent/test_publish.py @@ -1,5 +1,4 @@ import asyncio -from typing import Any, ClassVar, Dict from unittest.mock import Mock import pytest @@ -8,12 +7,11 @@ from faststream.confluent import KafkaBroker, KafkaResponse from tests.brokers.base.publish import BrokerPublishTestcase +from .basic import ConfluentTestcaseConfig -@pytest.mark.confluent() -class TestPublish(BrokerPublishTestcase): - timeout: int = 10 - subscriber_kwargs: ClassVar[Dict[str, Any]] = {"auto_offset_reset": "earliest"} +@pytest.mark.confluent() +class TestPublish(ConfluentTestcaseConfig, BrokerPublishTestcase): def get_broker(self, apply_types: bool = False): return KafkaBroker(apply_types=apply_types) @@ -23,7 +21,9 @@ async def test_publish_batch(self, queue: str): msgs_queue = asyncio.Queue(maxsize=2) - @pub_broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @pub_broker.subscriber(*args, **kwargs) async def handler(msg): await msgs_queue.put(msg) @@ -48,7 +48,9 @@ async def test_batch_publisher_manual(self, queue: str): msgs_queue = asyncio.Queue(maxsize=2) - @pub_broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @pub_broker.subscriber(*args, **kwargs) async def handler(msg): await msgs_queue.put(msg) @@ -75,12 +77,16 @@ async def test_batch_publisher_decorator(self, queue: str): msgs_queue = asyncio.Queue(maxsize=2) - @pub_broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @pub_broker.subscriber(*args, **kwargs) async def handler(msg): await msgs_queue.put(msg) + args2, kwargs2 = self.get_subscriber_params(queue + "1") + @pub_broker.publisher(queue, batch=True) - @pub_broker.subscriber(queue + "1", **self.subscriber_kwargs) + @pub_broker.subscriber(*args2, **kwargs2) async def pub(m): return 1, "hi" @@ -108,12 +114,16 @@ async def test_response( ): pub_broker = self.get_broker(apply_types=True) - @pub_broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @pub_broker.subscriber(*args, **kwargs) @pub_broker.publisher(topic=queue + "1") async def handle(): return KafkaResponse(1) - @pub_broker.subscriber(queue + "1", **self.subscriber_kwargs) + args2, kwargs2 = self.get_subscriber_params(queue + "1") + + @pub_broker.subscriber(*args2, **kwargs2) async def handle_next(msg=Context("message")): mock(body=msg.body) event.set() diff --git a/tests/brokers/confluent/test_router.py b/tests/brokers/confluent/test_router.py index 827ab71179..2f6e2c93c2 100644 --- a/tests/brokers/confluent/test_router.py +++ b/tests/brokers/confluent/test_router.py @@ -1,23 +1,19 @@ -from typing import Any, ClassVar, Dict - import pytest from faststream.confluent import KafkaPublisher, KafkaRoute, KafkaRouter from tests.brokers.base.router import RouterLocalTestcase, RouterTestcase +from .basic import ConfluentTestcaseConfig + @pytest.mark.confluent() -class TestRouter(RouterTestcase): +class TestRouter(ConfluentTestcaseConfig, RouterTestcase): broker_class = KafkaRouter route_class = KafkaRoute - timeout: int = 10 publisher_class = KafkaPublisher - subscriber_kwargs: ClassVar[Dict[str, Any]] = {"auto_offset_reset": "earliest"} -class TestRouterLocal(RouterLocalTestcase): +class TestRouterLocal(ConfluentTestcaseConfig, RouterLocalTestcase): broker_class = KafkaRouter route_class = KafkaRoute - timeout: int = 10 publisher_class = KafkaPublisher - subscriber_kwargs: ClassVar[Dict[str, Any]] = {"auto_offset_reset": "earliest"} diff --git a/tests/brokers/confluent/test_test_client.py b/tests/brokers/confluent/test_test_client.py index 43c558a7de..9f091d53ee 100644 --- a/tests/brokers/confluent/test_test_client.py +++ b/tests/brokers/confluent/test_test_client.py @@ -7,9 +7,11 @@ from faststream.confluent.testing import FakeProducer from tests.brokers.base.testclient import BrokerTestclientTestcase +from .basic import ConfluentTestcaseConfig + @pytest.mark.asyncio() -class TestTestclient(BrokerTestclientTestcase): +class TestTestclient(ConfluentTestcaseConfig, BrokerTestclientTestcase): """A class to represent a test Kafka broker.""" test_class = TestKafkaBroker @@ -216,7 +218,7 @@ async def test_broker_with_real_doesnt_get_patched(self): await super().test_broker_with_real_doesnt_get_patched() @pytest.mark.confluent() - async def test_broker_with_real_patches_subscribers_and_subscribers( + async def test_broker_with_real_patches_publishers_and_subscribers( self, queue: str ): - await super().test_broker_with_real_patches_subscribers_and_subscribers(queue) + await super().test_broker_with_real_patches_publishers_and_subscribers(queue) diff --git a/tests/brokers/confluent/test_test_reentrancy.py b/tests/brokers/confluent/test_test_reentrancy.py index fde984ec57..6974971bf9 100644 --- a/tests/brokers/confluent/test_test_reentrancy.py +++ b/tests/brokers/confluent/test_test_reentrancy.py @@ -1,29 +1,38 @@ +from uuid import uuid4 + import pytest -from faststream.confluent import KafkaBroker, TestKafkaBroker +from faststream.confluent import KafkaBroker, TestKafkaBroker, TopicPartition broker = KafkaBroker() -to_output_data = broker.publisher("output_data") +first_topic_name = str(uuid4()) +out_topic_name = str(uuid4()) + +to_output_data = broker.publisher(out_topic_name, partition=0) @to_output_data -@broker.subscriber("input_data", auto_offset_reset="earliest") +@broker.subscriber( + partitions=[TopicPartition(first_topic_name, 0)], auto_offset_reset="earliest" +) async def on_input_data(msg: int): return msg + 1 -@broker.subscriber("output_data", auto_offset_reset="earliest") +@broker.subscriber( + partitions=[TopicPartition(out_topic_name, 0)], auto_offset_reset="earliest" +) async def on_output_data(msg: int): pass async def _test_with_broker(with_real: bool): async with TestKafkaBroker(broker, with_real=with_real) as tester: - await tester.publish(1, "input_data") + await tester.publish(1, first_topic_name) - await on_output_data.wait_call(10) + await on_output_data.wait_call(20) on_input_data.mock.assert_called_with(1) to_output_data.mock.assert_called_with(2) @@ -51,7 +60,7 @@ async def on_output_data(msg: int): async with TestKafkaBroker(broker) as tester: await tester.publish(1, "input_data") - await on_output_data.wait_call(3) + await on_output_data.wait_call(20) on_input_data.mock.assert_called_with(1) to_output_data.mock.assert_called_with(2) @@ -61,7 +70,7 @@ async def on_output_data(msg: int): @pytest.mark.asyncio() @pytest.mark.skip( reason=( - "Failed cuz `on_output_data` subscriber creates inside test and doesn't removed after" + "Failed due `on_output_data` subscriber creates inside test and doesn't removed after " "https://github.com/airtai/faststream/issues/556" ) ) diff --git a/tests/brokers/kafka/test_test_client.py b/tests/brokers/kafka/test_test_client.py index 1d96940310..7940645f98 100644 --- a/tests/brokers/kafka/test_test_client.py +++ b/tests/brokers/kafka/test_test_client.py @@ -264,7 +264,7 @@ async def test_broker_with_real_doesnt_get_patched(self): await super().test_broker_with_real_doesnt_get_patched() @pytest.mark.kafka() - async def test_broker_with_real_patches_subscribers_and_subscribers( + async def test_broker_with_real_patches_publishers_and_subscribers( self, queue: str ): - await super().test_broker_with_real_patches_subscribers_and_subscribers(queue) + await super().test_broker_with_real_patches_publishers_and_subscribers(queue) diff --git a/tests/brokers/kafka/test_test_reentrancy.py b/tests/brokers/kafka/test_test_reentrancy.py index d82a0ada85..7b369ddb8e 100644 --- a/tests/brokers/kafka/test_test_reentrancy.py +++ b/tests/brokers/kafka/test_test_reentrancy.py @@ -61,7 +61,7 @@ async def on_output_data(msg: int): @pytest.mark.asyncio() @pytest.mark.skip( reason=( - "Failed cuz `on_output_data` subscriber creates inside test and doesn't removed after" + "Failed due `on_output_data` subscriber creates inside test and doesn't removed after " "https://github.com/airtai/faststream/issues/556" ) ) diff --git a/tests/brokers/nats/test_test_client.py b/tests/brokers/nats/test_test_client.py index cf801a9362..f2d8fc8b83 100644 --- a/tests/brokers/nats/test_test_client.py +++ b/tests/brokers/nats/test_test_client.py @@ -265,7 +265,7 @@ async def test_broker_with_real_doesnt_get_patched(self): await super().test_broker_with_real_doesnt_get_patched() @pytest.mark.nats() - async def test_broker_with_real_patches_subscribers_and_subscribers( + async def test_broker_with_real_patches_publishers_and_subscribers( self, queue: str ): - await super().test_broker_with_real_patches_subscribers_and_subscribers(queue) + await super().test_broker_with_real_patches_publishers_and_subscribers(queue) diff --git a/tests/brokers/rabbit/test_test_client.py b/tests/brokers/rabbit/test_test_client.py index 9f46002176..9329d88b6a 100644 --- a/tests/brokers/rabbit/test_test_client.py +++ b/tests/brokers/rabbit/test_test_client.py @@ -305,10 +305,10 @@ async def test_broker_with_real_doesnt_get_patched(self): await super().test_broker_with_real_doesnt_get_patched() @pytest.mark.rabbit() - async def test_broker_with_real_patches_subscribers_and_subscribers( + async def test_broker_with_real_patches_publishers_and_subscribers( self, queue: str ): - await super().test_broker_with_real_patches_subscribers_and_subscribers(queue) + await super().test_broker_with_real_patches_publishers_and_subscribers(queue) @pytest.mark.parametrize( diff --git a/tests/brokers/rabbit/test_test_reentrancy.py b/tests/brokers/rabbit/test_test_reentrancy.py index 6f532346c0..5c488d08c0 100644 --- a/tests/brokers/rabbit/test_test_reentrancy.py +++ b/tests/brokers/rabbit/test_test_reentrancy.py @@ -61,7 +61,7 @@ async def on_output_data(msg: int): @pytest.mark.asyncio() @pytest.mark.skip( reason=( - "Failed cuz `on_output_data` subscriber creates inside test and doesn't removed after" + "Failed due `on_output_data` subscriber creates inside test and doesn't removed after " "https://github.com/airtai/faststream/issues/556" ) ) diff --git a/tests/brokers/redis/test_test_client.py b/tests/brokers/redis/test_test_client.py index 80f00ca441..7965b3f327 100644 --- a/tests/brokers/redis/test_test_client.py +++ b/tests/brokers/redis/test_test_client.py @@ -240,7 +240,7 @@ async def test_broker_with_real_doesnt_get_patched(self): await super().test_broker_with_real_doesnt_get_patched() @pytest.mark.redis() - async def test_broker_with_real_patches_subscribers_and_subscribers( + async def test_broker_with_real_patches_publishers_and_subscribers( self, queue: str ): - await super().test_broker_with_real_patches_subscribers_and_subscribers(queue) + await super().test_broker_with_real_patches_publishers_and_subscribers(queue) diff --git a/tests/mypy/kafka.py b/tests/mypy/kafka.py index 47fcbddf2d..eeeef066ed 100644 --- a/tests/mypy/kafka.py +++ b/tests/mypy/kafka.py @@ -56,15 +56,16 @@ async def async_filter(msg: KafkaMessage) -> bool: broker = KafkaBroker() -@broker.subscriber( - "test", +sub = broker.subscriber("test") + + +@sub( filter=sync_filter, ) async def handle() -> None: ... -@broker.subscriber( - "test", +@sub( filter=async_filter, ) async def handle2() -> None: ... @@ -119,16 +120,16 @@ async def handle7() -> None: ... router = KafkaRouter() +router_sub = router.subscriber("test") -@router.subscriber( - "test", + +@router_sub( filter=sync_filter, ) async def handle8() -> None: ... -@router.subscriber( - "test", +@router_sub( filter=async_filter, ) async def handle9() -> None: ... @@ -215,16 +216,16 @@ def async_handler() -> None: ... fastapi_router = FastAPIRouter() +fastapi_sub = fastapi_router.subscriber("test") -@fastapi_router.subscriber( - "test", + +@fastapi_sub( filter=sync_filter, ) async def handle15() -> None: ... -@fastapi_router.subscriber( - "test", +@fastapi_sub( filter=async_filter, ) async def handle16() -> None: ... diff --git a/tests/mypy/nats.py b/tests/mypy/nats.py index 2df03803d7..955458eada 100644 --- a/tests/mypy/nats.py +++ b/tests/mypy/nats.py @@ -56,15 +56,16 @@ async def async_filter(msg: NatsMessage) -> bool: broker = NatsBroker() -@broker.subscriber( - "test", +sub = broker.subscriber("test") + + +@sub( filter=sync_filter, ) async def handle() -> None: ... -@broker.subscriber( - "test", +@sub( filter=async_filter, ) async def handle2() -> None: ... @@ -120,15 +121,16 @@ async def handle7() -> None: ... router = NatsRouter() -@router.subscriber( - "test", +router_sub = router.subscriber("test") + + +@router_sub( filter=sync_filter, ) async def handle8() -> None: ... -@router.subscriber( - "test", +@router_sub( filter=async_filter, ) async def handle9() -> None: ... @@ -215,16 +217,16 @@ def async_handler() -> None: ... fastapi_router = FastAPIRouter() +fastapi_sub = fastapi_router.subscriber("test") -@fastapi_router.subscriber( - "test", + +@fastapi_sub( filter=sync_filter, ) async def handle15() -> None: ... -@fastapi_router.subscriber( - "test", +@fastapi_sub( filter=async_filter, ) async def handle16() -> None: ... diff --git a/tests/mypy/rabbit.py b/tests/mypy/rabbit.py index 2bc2be77e1..064f6faad7 100644 --- a/tests/mypy/rabbit.py +++ b/tests/mypy/rabbit.py @@ -56,16 +56,16 @@ async def async_filter(msg: RabbitMessage) -> bool: broker = RabbitBroker() +sub = broker.subscriber("test") -@broker.subscriber( - "test", + +@sub( filter=sync_filter, ) async def handle() -> None: ... -@broker.subscriber( - "test", +@sub( filter=async_filter, ) async def handle2() -> None: ... @@ -121,15 +121,16 @@ async def handle7() -> None: ... router = RabbitRouter() -@router.subscriber( - "test", +router_sub = router.subscriber("test") + + +@router_sub( filter=sync_filter, ) async def handle8() -> None: ... -@router.subscriber( - "test", +@router_sub( filter=async_filter, ) async def handle9() -> None: ... @@ -217,15 +218,16 @@ def async_handler() -> None: ... fastapi_router = FastAPIRouter() -@fastapi_router.subscriber( - "test", +fastapi_sub = fastapi_router.subscriber("test") + + +@fastapi_sub( filter=sync_filter, ) async def handle15() -> None: ... -@fastapi_router.subscriber( - "test", +@fastapi_sub( filter=async_filter, ) async def handle16() -> None: ... diff --git a/tests/mypy/redis.py b/tests/mypy/redis.py index f927386719..58a3da36cd 100644 --- a/tests/mypy/redis.py +++ b/tests/mypy/redis.py @@ -58,15 +58,16 @@ async def async_filter(msg: Message) -> bool: broker = Broker() -@broker.subscriber( - "test", +sub = broker.subscriber("test") + + +@sub( filter=sync_filter, ) async def handle() -> None: ... -@broker.subscriber( - "test", +@sub( filter=async_filter, ) async def handle2() -> None: ... @@ -123,15 +124,16 @@ async def handle7() -> None: ... router = StreamRouter() -@router.subscriber( - "test", +router_sub = router.subscriber("test") + + +@router_sub( filter=sync_filter, ) async def handle8() -> None: ... -@router.subscriber( - "test", +@router_sub( filter=async_filter, ) async def handle9() -> None: ... @@ -218,16 +220,16 @@ def async_handler() -> None: ... fastapi_router = FastAPIRouter() +fastapi_sub = fastapi_router.subscriber("test") -@fastapi_router.subscriber( - "test", + +@fastapi_sub( filter=sync_filter, ) async def handle15() -> None: ... -@fastapi_router.subscriber( - "test", +@fastapi_sub( filter=async_filter, ) async def handle16() -> None: ... diff --git a/tests/opentelemetry/basic.py b/tests/opentelemetry/basic.py index 794a09ee6d..eab2ddcbe1 100644 --- a/tests/opentelemetry/basic.py +++ b/tests/opentelemetry/basic.py @@ -1,5 +1,5 @@ import asyncio -from typing import Any, ClassVar, Dict, List, Optional, Tuple, Type, cast +from typing import List, Optional, Tuple, Type, cast from unittest.mock import Mock import pytest @@ -21,15 +21,14 @@ ) from faststream.opentelemetry.middleware import MessageAction as Action from faststream.opentelemetry.middleware import TelemetryMiddleware +from tests.brokers.base.basic import BaseTestcaseConfig @pytest.mark.asyncio() -class LocalTelemetryTestcase: +class LocalTelemetryTestcase(BaseTestcaseConfig): messaging_system: str include_messages_counters: bool broker_class: Type[BrokerUsecase] - timeout: int = 3 - subscriber_kwargs: ClassVar[Dict[str, Any]] = {} resource: Resource = Resource.create(attributes={"service.name": "faststream.test"}) telemetry_middleware_class: TelemetryMiddleware @@ -165,7 +164,9 @@ async def test_subscriber_create_publish_process_span( mid = self.telemetry_middleware_class(tracer_provider=tracer_provider) broker = self.broker_class(middlewares=(mid,)) - @broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @broker.subscriber(*args, **kwargs) async def handler(m): mock(m) event.set() @@ -205,12 +206,16 @@ async def test_chain_subscriber_publisher( first_queue = queue second_queue = queue + "2" - @broker.subscriber(first_queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(first_queue) + + @broker.subscriber(*args, **kwargs) @broker.publisher(second_queue) async def handler1(m): return m - @broker.subscriber(second_queue, **self.subscriber_kwargs) + args2, kwargs2 = self.get_subscriber_params(second_queue) + + @broker.subscriber(*args2, **kwargs2) async def handler2(m): mock(m) event.set() @@ -258,7 +263,9 @@ async def test_no_trace_context_create_process_span( mid = self.telemetry_middleware_class(tracer_provider=tracer_provider) broker = self.broker_class(middlewares=(mid,)) - @broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @broker.subscriber(*args, **kwargs) async def handler(m): mock(m) event.set() @@ -295,7 +302,9 @@ async def test_metrics( mid = self.telemetry_middleware_class(meter_provider=meter_provider) broker = self.broker_class(middlewares=(mid,)) - @broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @broker.subscriber(*args, **kwargs) async def handler(m): mock(m) event.set() @@ -330,7 +339,9 @@ async def test_error_metrics( broker = self.broker_class(middlewares=(mid,)) expected_value_type = "ValueError" - @broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @broker.subscriber(*args, **kwargs) async def handler(m): try: raise ValueError diff --git a/tests/opentelemetry/confluent/test_confluent.py b/tests/opentelemetry/confluent/test_confluent.py index 930bf9aeaf..d7ca9fbc08 100644 --- a/tests/opentelemetry/confluent/test_confluent.py +++ b/tests/opentelemetry/confluent/test_confluent.py @@ -1,5 +1,5 @@ import asyncio -from typing import Any, ClassVar, Dict, Optional +from typing import Optional from unittest.mock import Mock import pytest @@ -15,18 +15,15 @@ from faststream.confluent.opentelemetry import KafkaTelemetryMiddleware from faststream.opentelemetry.consts import MESSAGING_DESTINATION_PUBLISH_NAME from faststream.opentelemetry.middleware import MessageAction as Action -from tests.brokers.confluent.test_consume import TestConsume -from tests.brokers.confluent.test_publish import TestPublish +from tests.brokers.confluent.basic import ConfluentTestcaseConfig from ..basic import LocalTelemetryTestcase @pytest.mark.confluent() -class TestTelemetry(LocalTelemetryTestcase): +class TestTelemetry(ConfluentTestcaseConfig, LocalTelemetryTestcase): messaging_system = "kafka" include_messages_counters = True - timeout: int = 10 - subscriber_kwargs: ClassVar[Dict[str, Any]] = {"auto_offset_reset": "earliest"} broker_class = KafkaBroker telemetry_middleware_class = KafkaTelemetryMiddleware @@ -81,7 +78,9 @@ async def test_batch( expected_link_count = 1 expected_link_attrs = {"messaging.batch.message_count": 3} - @broker.subscriber(queue, batch=True, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue, batch=True) + + @broker.subscriber(*args, **kwargs) async def handler(m): mock(m) event.set() @@ -133,7 +132,9 @@ async def test_batch_publish_with_single_consume( expected_span_count = 8 expected_pub_batch_count = 1 - @broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @broker.subscriber(*args, **kwargs) async def handler(msg): await msgs_queue.put(msg) @@ -191,7 +192,9 @@ async def test_single_publish_with_batch_consume( expected_span_count = 6 expected_process_batch_count = 1 - @broker.subscriber(queue, batch=True, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue, batch=True) + + @broker.subscriber(*args, **kwargs) async def handler(m): m.sort() mock(m) @@ -222,21 +225,3 @@ async def handler(m): assert event.is_set() mock.assert_called_once_with(["buy", "hi"]) - - -@pytest.mark.confluent() -class TestPublishWithTelemetry(TestPublish): - def get_broker(self, apply_types: bool = False): - return KafkaBroker( - middlewares=(KafkaTelemetryMiddleware(),), - apply_types=apply_types, - ) - - -@pytest.mark.confluent() -class TestConsumeWithTelemetry(TestConsume): - def get_broker(self, apply_types: bool = False): - return KafkaBroker( - middlewares=(KafkaTelemetryMiddleware(),), - apply_types=apply_types, - ) diff --git a/tests/opentelemetry/kafka/test_kafka.py b/tests/opentelemetry/kafka/test_kafka.py index c8f67b40b1..2c7f46fe3c 100644 --- a/tests/opentelemetry/kafka/test_kafka.py +++ b/tests/opentelemetry/kafka/test_kafka.py @@ -79,7 +79,9 @@ async def test_batch( expected_link_count = 1 expected_link_attrs = {"messaging.batch.message_count": 3} - @broker.subscriber(queue, batch=True, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue, batch=True) + + @broker.subscriber(*args, **kwargs) async def handler(m): mock(m) event.set() @@ -131,7 +133,9 @@ async def test_batch_publish_with_single_consume( expected_span_count = 8 expected_pub_batch_count = 1 - @broker.subscriber(queue, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue) + + @broker.subscriber(*args, **kwargs) async def handler(msg): await msgs_queue.put(msg) @@ -189,7 +193,9 @@ async def test_single_publish_with_batch_consume( expected_span_count = 6 expected_process_batch_count = 1 - @broker.subscriber(queue, batch=True, **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(queue, batch=True) + + @broker.subscriber(*args, **kwargs) async def handler(m): m.sort() mock(m) diff --git a/tests/opentelemetry/nats/test_nats.py b/tests/opentelemetry/nats/test_nats.py index 5a6d335766..fff2827170 100644 --- a/tests/opentelemetry/nats/test_nats.py +++ b/tests/opentelemetry/nats/test_nats.py @@ -47,12 +47,13 @@ async def test_batch( expected_span_count = 4 expected_proc_batch_count = 1 - @broker.subscriber( + args, kwargs = self.get_subscriber_params( queue, stream=stream, pull_sub=PullSub(1, batch=True, timeout=30.0), - **self.subscriber_kwargs, ) + + @broker.subscriber(*args, **kwargs) async def handler(m): mock(m) event.set() diff --git a/tests/opentelemetry/redis/test_redis.py b/tests/opentelemetry/redis/test_redis.py index 31b9216e65..0a76d03b5b 100644 --- a/tests/opentelemetry/redis/test_redis.py +++ b/tests/opentelemetry/redis/test_redis.py @@ -45,7 +45,9 @@ async def test_batch( expected_link_count = 1 expected_link_attrs = {"messaging.batch.message_count": 3} - @broker.subscriber(list=ListSub(queue, batch=True), **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(list=ListSub(queue, batch=True)) + + @broker.subscriber(*args, **kwargs) async def handler(m): mock(m) event.set() @@ -97,7 +99,9 @@ async def test_batch_publish_with_single_consume( expected_span_count = 8 expected_pub_batch_count = 1 - @broker.subscriber(list=ListSub(queue), **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(list=ListSub(queue)) + + @broker.subscriber(*args, **kwargs) async def handler(msg): await msgs_queue.put(msg) @@ -155,7 +159,9 @@ async def test_single_publish_with_batch_consume( expected_span_count = 6 expected_process_batch_count = 1 - @broker.subscriber(list=ListSub(queue, batch=True), **self.subscriber_kwargs) + args, kwargs = self.get_subscriber_params(list=ListSub(queue, batch=True)) + + @broker.subscriber(*args, **kwargs) async def handler(m): m.sort() mock(m)