From 4ebaa658fbf29c509a8dc71d19b6a5cd25d5ac3d Mon Sep 17 00:00:00 2001 From: Kumaran Rajendhiran Date: Thu, 6 Jun 2024 15:18:57 +0530 Subject: [PATCH] Chore: update deps (#1503) * Update dependency versions * Run pre-commit * Limit typing-extensions version for python 3.8 --- .github/workflows/docs_update-references.yaml | 2 +- .github/workflows/pr_dependency-review.yaml | 2 +- .secrets.baseline | 4 ++-- faststream/asyncapi/utils.py | 2 +- faststream/broker/fastapi/route.py | 2 +- faststream/broker/fastapi/router.py | 4 ++-- faststream/broker/message.py | 2 +- faststream/broker/publisher/fake.py | 2 +- faststream/broker/publisher/usecase.py | 2 +- faststream/broker/subscriber/call_item.py | 2 +- faststream/broker/subscriber/usecase.py | 4 ++-- faststream/broker/utils.py | 2 +- faststream/broker/wrapper/call.py | 2 +- faststream/cli/supervisors/multiprocess.py | 2 +- faststream/cli/utils/logs.py | 2 +- faststream/cli/utils/parser.py | 4 ++-- faststream/confluent/broker/broker.py | 2 +- faststream/confluent/parser.py | 4 ++-- faststream/confluent/publisher/usecase.py | 10 +++++----- faststream/kafka/broker/broker.py | 4 ++-- faststream/kafka/parser.py | 4 ++-- faststream/kafka/publisher/usecase.py | 6 +++--- faststream/nats/broker/broker.py | 6 +++--- faststream/nats/parser.py | 6 +++--- faststream/nats/publisher/usecase.py | 4 ++-- faststream/nats/subscriber/usecase.py | 2 +- faststream/rabbit/publisher/producer.py | 4 ++-- faststream/rabbit/publisher/usecase.py | 4 ++-- faststream/rabbit/subscriber/asyncapi.py | 4 +--- faststream/redis/broker/broker.py | 6 +++--- faststream/redis/parser.py | 6 +++--- faststream/redis/publisher/usecase.py | 8 ++++---- faststream/redis/subscriber/usecase.py | 2 +- faststream/testing/broker.py | 2 +- pyproject.toml | 11 ++++++----- 35 files changed, 67 insertions(+), 68 deletions(-) diff --git a/.github/workflows/docs_update-references.yaml b/.github/workflows/docs_update-references.yaml index 92e306784e..30f9d91bea 100644 --- a/.github/workflows/docs_update-references.yaml +++ b/.github/workflows/docs_update-references.yaml @@ -2,7 +2,7 @@ name: Generate API References documentation on: pull_request: - types: + types: - opened - synchronize paths: diff --git a/.github/workflows/pr_dependency-review.yaml b/.github/workflows/pr_dependency-review.yaml index 11ad023407..a241701673 100644 --- a/.github/workflows/pr_dependency-review.yaml +++ b/.github/workflows/pr_dependency-review.yaml @@ -8,7 +8,7 @@ name: 'Dependency Review' on: pull_request: - types: + types: - opened - synchronize branches: diff --git a/.secrets.baseline b/.secrets.baseline index 15213e9215..6cfe352e2a 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -128,7 +128,7 @@ "filename": "docs/docs/en/release.md", "hashed_secret": "35675e68f4b5af7b995d9205ad0fc43842f16450", "is_verified": false, - "line_number": 1282, + "line_number": 1308, "is_secret": false } ], @@ -163,5 +163,5 @@ } ] }, - "generated_at": "2024-05-27T11:45:58Z" + "generated_at": "2024-06-06T04:30:54Z" } diff --git a/faststream/asyncapi/utils.py b/faststream/asyncapi/utils.py index f4ed6b99a1..4edddae6ad 100644 --- a/faststream/asyncapi/utils.py +++ b/faststream/asyncapi/utils.py @@ -14,7 +14,7 @@ def resolve_payloads( served_words: int = 1, ) -> "AnyDict": ln = len(payloads) - payload: "AnyDict" + payload: AnyDict if ln > 1: one_of_payloads = {} diff --git a/faststream/broker/fastapi/route.py b/faststream/broker/fastapi/route.py index aa2c3debcb..1ee27caefc 100644 --- a/faststream/broker/fastapi/route.py +++ b/faststream/broker/fastapi/route.py @@ -189,7 +189,7 @@ async def real_consumer(message: "NativeMessage[Any]") -> Any: """An asynchronous function that processes an incoming message and returns a sendable message.""" body = message.decoded_body - fastapi_body: Union["AnyDict", List[Any]] + fastapi_body: Union[AnyDict, List[Any]] if first_arg is not None: if isinstance(body, dict): path = fastapi_body = body or {} diff --git a/faststream/broker/fastapi/router.py b/faststream/broker/fastapi/router.py index 2a7cb33d3c..2d4153f4f0 100644 --- a/faststream/broker/fastapi/router.py +++ b/faststream/broker/fastapi/router.py @@ -181,7 +181,7 @@ def __init__( on_shutdown=on_shutdown, ) - self.weak_dependencies_provider: "WeakSet[Any]" = WeakSet() + self.weak_dependencies_provider: WeakSet[Any] = WeakSet() if dependency_overrides_provider is not None: self.weak_dependencies_provider.add(dependency_overrides_provider) @@ -306,7 +306,7 @@ async def start_broker_lifespan( async with lifespan_context(app) as maybe_context: if maybe_context is None: - context: "AnyDict" = {} + context: AnyDict = {} else: context = dict(maybe_context) diff --git a/faststream/broker/message.py b/faststream/broker/message.py index beec9fe555..dbe89b089d 100644 --- a/faststream/broker/message.py +++ b/faststream/broker/message.py @@ -66,7 +66,7 @@ async def reject(self) -> None: def decode_message(message: "StreamMessage[Any]") -> "DecodedMessage": """Decodes a message.""" body: Any = getattr(message, "body", message) - m: "DecodedMessage" = body + m: DecodedMessage = body if ( content_type := getattr(message, "content_type", Parameter.empty) diff --git a/faststream/broker/publisher/fake.py b/faststream/broker/publisher/fake.py index 492677abdb..d77c43406b 100644 --- a/faststream/broker/publisher/fake.py +++ b/faststream/broker/publisher/fake.py @@ -39,7 +39,7 @@ async def publish( **kwargs, } - call: "AsyncFunc" = self.method + call: AsyncFunc = self.method for m in chain(_extra_middlewares, self.middlewares): call = partial(m, call) diff --git a/faststream/broker/publisher/usecase.py b/faststream/broker/publisher/usecase.py index 1bdbc74513..c401760a81 100644 --- a/faststream/broker/publisher/usecase.py +++ b/faststream/broker/publisher/usecase.py @@ -138,7 +138,7 @@ def __call__( return handler_call def get_payloads(self) -> List[Tuple["AnyDict", str]]: - payloads: List[Tuple["AnyDict", str]] = [] + payloads: List[Tuple[AnyDict, str]] = [] if self.schema_: params = {"response__": (self.schema_, ...)} diff --git a/faststream/broker/subscriber/call_item.py b/faststream/broker/subscriber/call_item.py index cb6e750353..77bdb70c9a 100644 --- a/faststream/broker/subscriber/call_item.py +++ b/faststream/broker/subscriber/call_item.py @@ -155,7 +155,7 @@ async def call( _extra_middlewares: Iterable["SubscriberMiddleware[Any]"], ) -> Any: """Execute wrapped handler with consume middlewares.""" - call: "AsyncFuncAny" = self.handler.call_wrapped + call: AsyncFuncAny = self.handler.call_wrapped for middleware in chain(self.item_middlewares, _extra_middlewares): call = partial(middleware, call) diff --git a/faststream/broker/subscriber/usecase.py b/faststream/broker/subscriber/usecase.py index 2e5ca52151..2b46a4bf1a 100644 --- a/faststream/broker/subscriber/usecase.py +++ b/faststream/broker/subscriber/usecase.py @@ -308,7 +308,7 @@ async def consume(self, msg: MsgType) -> Any: await stack.enter_async_context(self._stop_scope()) # enter all middlewares - middlewares: List["BaseMiddleware"] = [] + middlewares: List[BaseMiddleware] = [] for base_m in self._broker_middlewares: middleware = base_m(msg) middlewares.append(middleware) @@ -412,7 +412,7 @@ def get_description(self) -> Optional[str]: def get_payloads(self) -> List[Tuple["AnyDict", str]]: """Get the payloads of the handler.""" - payloads: List[Tuple["AnyDict", str]] = [] + payloads: List[Tuple[AnyDict, str]] = [] for h in self.calls: if h.dependant is None: diff --git a/faststream/broker/utils.py b/faststream/broker/utils.py index 6903f4c94d..568a1217f8 100644 --- a/faststream/broker/utils.py +++ b/faststream/broker/utils.py @@ -60,7 +60,7 @@ class MultiLock: def __init__(self) -> None: """Initialize a new instance of the class.""" - self.queue: "asyncio.Queue[None]" = asyncio.Queue() + self.queue: asyncio.Queue[None] = asyncio.Queue() def __enter__(self) -> Self: """Enter the context.""" diff --git a/faststream/broker/wrapper/call.py b/faststream/broker/wrapper/call.py index 2dda3bf1ea..0c997eb5b2 100644 --- a/faststream/broker/wrapper/call.py +++ b/faststream/broker/wrapper/call.py @@ -161,7 +161,7 @@ def set_wrapped( f: Callable[..., Awaitable[Any]] = to_async(call) - dependent: Optional["CallModel[..., Any]"] = None + dependent: Optional[CallModel[..., Any]] = None if _get_dependant is None: dependent = build_call_model( f, diff --git a/faststream/cli/supervisors/multiprocess.py b/faststream/cli/supervisors/multiprocess.py index a44f5c27c9..e7ab9dd413 100644 --- a/faststream/cli/supervisors/multiprocess.py +++ b/faststream/cli/supervisors/multiprocess.py @@ -21,7 +21,7 @@ def __init__( super().__init__(target, args, None) self.workers = workers - self.processes: List["SpawnProcess"] = [] + self.processes: List[SpawnProcess] = [] def startup(self) -> None: logger.info(f"Started parent process [{self.pid}]") diff --git a/faststream/cli/utils/logs.py b/faststream/cli/utils/logs.py index 35ca288efb..2f223455f6 100644 --- a/faststream/cli/utils/logs.py +++ b/faststream/cli/utils/logs.py @@ -63,6 +63,6 @@ def set_log_level(level: int, app: "FastStream") -> None: if app.logger and isinstance(app.logger, logging.Logger): app.logger.setLevel(level) - broker_logger: Optional["LoggerProto"] = getattr(app.broker, "logger", None) + broker_logger: Optional[LoggerProto] = getattr(app.broker, "logger", None) if broker_logger is not None and isinstance(broker_logger, logging.Logger): broker_logger.setLevel(level) diff --git a/faststream/cli/utils/parser.py b/faststream/cli/utils/parser.py index 1c58ce65a4..00c904d774 100644 --- a/faststream/cli/utils/parser.py +++ b/faststream/cli/utils/parser.py @@ -7,10 +7,10 @@ def parse_cli_args(*args: str) -> Tuple[str, Dict[str, "SettingField"]]: """Parses command line arguments.""" - extra_kwargs: Dict[str, "SettingField"] = {} + extra_kwargs: Dict[str, SettingField] = {} k: str = "" - v: "SettingField" + v: SettingField field_args: List[str] = [] app = "" diff --git a/faststream/confluent/broker/broker.py b/faststream/confluent/broker/broker.py index 960b2606ad..95ceb2bfa2 100644 --- a/faststream/confluent/broker/broker.py +++ b/faststream/confluent/broker/broker.py @@ -523,7 +523,7 @@ async def publish_batch( correlation_id = correlation_id or gen_cor_id() - call: "AsyncFunc" = self._producer.publish_batch + call: AsyncFunc = self._producer.publish_batch for m in self._middlewares: call = partial(m(None).publish_scope, call) diff --git a/faststream/confluent/parser.py b/faststream/confluent/parser.py index a093a16da7..3480aee092 100644 --- a/faststream/confluent/parser.py +++ b/faststream/confluent/parser.py @@ -26,7 +26,7 @@ async def parse_message( offset = message.offset() _, timestamp = message.timestamp() - handler: Optional["LogicSubscriber[Any]"] = context.get_local("handler_") + handler: Optional[LogicSubscriber[Any]] = context.get_local("handler_") return KafkaMessage( body=body, @@ -59,7 +59,7 @@ async def parse_message_batch( _, first_timestamp = first.timestamp() - handler: Optional["LogicSubscriber[Any]"] = context.get_local("handler_") + handler: Optional[LogicSubscriber[Any]] = context.get_local("handler_") return KafkaMessage( body=body, diff --git a/faststream/confluent/publisher/usecase.py b/faststream/confluent/publisher/usecase.py index ae0a9da319..b73b0de78c 100644 --- a/faststream/confluent/publisher/usecase.py +++ b/faststream/confluent/publisher/usecase.py @@ -113,7 +113,7 @@ async def publish( ) -> Optional[Any]: assert self._producer, NOT_CONNECTED_YET # nosec B101 - kwargs: "AnyDict" = { + kwargs: AnyDict = { "key": key or self.key, # basic args "topic": topic or self.topic, @@ -124,7 +124,7 @@ async def publish( "correlation_id": correlation_id or gen_cor_id(), } - call: "AsyncFunc" = self._producer.publish + call: AsyncFunc = self._producer.publish for m in chain( ( @@ -155,13 +155,13 @@ async def publish( # type: ignore[override] ) -> None: assert self._producer, NOT_CONNECTED_YET # nosec B101 - msgs: Iterable["SendableMessage"] + msgs: Iterable[SendableMessage] if extra_messages: msgs = (cast("SendableMessage", message), *extra_messages) else: msgs = cast(Iterable["SendableMessage"], message) - kwargs: "AnyDict" = { + kwargs: AnyDict = { "topic": topic or self.topic, "partition": partition or self.partition, "timestamp_ms": timestamp_ms, @@ -170,7 +170,7 @@ async def publish( # type: ignore[override] "correlation_id": correlation_id or gen_cor_id(), } - call: "AsyncFunc" = self._producer.publish_batch + call: AsyncFunc = self._producer.publish_batch for m in chain( ( diff --git a/faststream/kafka/broker/broker.py b/faststream/kafka/broker/broker.py index 42cc4f281b..0827e09060 100644 --- a/faststream/kafka/broker/broker.py +++ b/faststream/kafka/broker/broker.py @@ -607,7 +607,7 @@ async def connect( # type: ignore[override] To startup subscribers too you should use `broker.start()` after/instead this method. """ if bootstrap_servers is not Parameter.empty: - connect_kwargs: "AnyDict" = { + connect_kwargs: AnyDict = { **kwargs, "bootstrap_servers": bootstrap_servers, } @@ -792,7 +792,7 @@ async def publish_batch( correlation_id = correlation_id or gen_cor_id() - call: "AsyncFunc" = self._producer.publish_batch + call: AsyncFunc = self._producer.publish_batch for m in self._middlewares: call = partial(m(None).publish_scope, call) diff --git a/faststream/kafka/parser.py b/faststream/kafka/parser.py index 8487eb3d0b..49924c9e97 100644 --- a/faststream/kafka/parser.py +++ b/faststream/kafka/parser.py @@ -21,7 +21,7 @@ async def parse_message( ) -> "StreamMessage[ConsumerRecord]": """Parses a Kafka message.""" headers = {i: j.decode() for i, j in message.headers} - handler: Optional["LogicSubscriber[Any]"] = context.get_local("handler_") + handler: Optional[LogicSubscriber[Any]] = context.get_local("handler_") return KafkaMessage( body=message.value, headers=headers, @@ -51,7 +51,7 @@ async def parse_message_batch( headers = next(iter(batch_headers), {}) - handler: Optional["LogicSubscriber[Any]"] = context.get_local("handler_") + handler: Optional[LogicSubscriber[Any]] = context.get_local("handler_") return KafkaMessage( body=body, diff --git a/faststream/kafka/publisher/usecase.py b/faststream/kafka/publisher/usecase.py index b254334a61..8fec375bba 100644 --- a/faststream/kafka/publisher/usecase.py +++ b/faststream/kafka/publisher/usecase.py @@ -170,7 +170,7 @@ async def publish( reply_to = reply_to or self.reply_to correlation_id = correlation_id or gen_cor_id() - call: "AsyncFunc" = self._producer.publish + call: AsyncFunc = self._producer.publish for m in chain( ( @@ -250,7 +250,7 @@ async def publish( # type: ignore[override] ) -> None: assert self._producer, NOT_CONNECTED_YET # nosec B101 - msgs: Iterable["SendableMessage"] + msgs: Iterable[SendableMessage] if extra_messages: msgs = (cast("SendableMessage", message), *extra_messages) else: @@ -262,7 +262,7 @@ async def publish( # type: ignore[override] reply_to = reply_to or self.reply_to correlation_id = correlation_id or gen_cor_id() - call: "AsyncFunc" = self._producer.publish_batch + call: AsyncFunc = self._producer.publish_batch for m in chain( ( diff --git a/faststream/nats/broker/broker.py b/faststream/nats/broker/broker.py index e6806172c3..35e35086c8 100644 --- a/faststream/nats/broker/broker.py +++ b/faststream/nats/broker/broker.py @@ -564,7 +564,7 @@ async def connect( # type: ignore[override] To startup subscribers too you should use `broker.start()` after/instead this method. """ if servers is not Parameter.empty: - connect_kwargs: "AnyDict" = { + connect_kwargs: AnyDict = { **kwargs, "servers": servers, } @@ -768,8 +768,8 @@ def setup_subscriber( # type: ignore[override] subscriber: "AsyncAPISubscriber", ) -> None: connection: Union[ - "Client", - "JetStreamContext", + Client, + JetStreamContext, KVBucketDeclarer, OSBucketDeclarer, None, diff --git a/faststream/nats/parser.py b/faststream/nats/parser.py index ef18834eca..206e851999 100644 --- a/faststream/nats/parser.py +++ b/faststream/nats/parser.py @@ -32,7 +32,7 @@ def get_path( self, subject: str, ) -> Optional["AnyDict"]: - path: Optional["AnyDict"] = None + path: Optional[AnyDict] = None if (path_re := self.__path_re) is not None and ( match := path_re.match(subject) @@ -136,9 +136,9 @@ async def decode_batch( self, msg: "StreamMessage[List[Msg]]", ) -> List["DecodedMessage"]: - data: List["DecodedMessage"] = [] + data: List[DecodedMessage] = [] - path: Optional["AnyDict"] = None + path: Optional[AnyDict] = None for m in msg.raw_message: one_msg = await self.parse_message(m, path=path) path = one_msg.path diff --git a/faststream/nats/publisher/usecase.py b/faststream/nats/publisher/usecase.py index c55c254b65..6f52bd2d96 100644 --- a/faststream/nats/publisher/usecase.py +++ b/faststream/nats/publisher/usecase.py @@ -125,7 +125,7 @@ async def publish( ) -> Optional[Any]: assert self._producer, NOT_CONNECTED_YET # nosec B101 - kwargs: "AnyDict" = { + kwargs: AnyDict = { "subject": subject or self.subject, "headers": headers or self.headers, "reply_to": reply_to or self.reply_to, @@ -139,7 +139,7 @@ async def publish( if stream := stream or getattr(self.stream, "name", None): kwargs.update({"stream": stream, "timeout": timeout or self.timeout}) - call: "AsyncFunc" = self._producer.publish + call: AsyncFunc = self._producer.publish for m in chain( ( diff --git a/faststream/nats/subscriber/usecase.py b/faststream/nats/subscriber/usecase.py index 76ae509052..322ef41aa3 100644 --- a/faststream/nats/subscriber/usecase.py +++ b/faststream/nats/subscriber/usecase.py @@ -295,7 +295,7 @@ def get_log_context( class _TasksMixin(LogicSubscriber[Any]): def __init__(self, **kwargs: Any) -> None: - self.tasks: List["asyncio.Task[Any]"] = [] + self.tasks: List[asyncio.Task[Any]] = [] super().__init__(**kwargs) diff --git a/faststream/rabbit/publisher/producer.py b/faststream/rabbit/publisher/producer.py index 09b4ffbb3e..f7a4013bab 100644 --- a/faststream/rabbit/publisher/producer.py +++ b/faststream/rabbit/publisher/producer.py @@ -86,7 +86,7 @@ async def publish( # type: ignore[override] ) -> Optional[Any]: """Publish a message to a RabbitMQ queue.""" context: AsyncContextManager[ - Optional["MemoryObjectReceiveStream[IncomingMessage]"] + Optional[MemoryObjectReceiveStream[IncomingMessage]] ] if rpc: if reply_to is not None: @@ -126,7 +126,7 @@ async def publish( # type: ignore[override] return r else: - msg: Optional["IncomingMessage"] = None + msg: Optional[IncomingMessage] = None with timeout_scope(rpc_timeout, raise_timeout): msg = await response_queue.receive() diff --git a/faststream/rabbit/publisher/usecase.py b/faststream/rabbit/publisher/usecase.py index 0472bbc127..6df3b1078a 100644 --- a/faststream/rabbit/publisher/usecase.py +++ b/faststream/rabbit/publisher/usecase.py @@ -221,7 +221,7 @@ async def publish( ) -> Optional[Any]: assert self._producer, NOT_CONNECTED_YET # nosec B101 - kwargs: "AnyDict" = { + kwargs: AnyDict = { "routing_key": routing_key or self.routing_key or RabbitQueue.validate(queue or self.queue).routing, @@ -238,7 +238,7 @@ async def publish( **publish_kwargs, } - call: "AsyncFunc" = self._producer.publish + call: AsyncFunc = self._producer.publish for m in chain( ( diff --git a/faststream/rabbit/subscriber/asyncapi.py b/faststream/rabbit/subscriber/asyncapi.py index 2b0cb4cd5b..05313a6247 100644 --- a/faststream/rabbit/subscriber/asyncapi.py +++ b/faststream/rabbit/subscriber/asyncapi.py @@ -18,9 +18,7 @@ class AsyncAPISubscriber(LogicSubscriber): """AsyncAPI-compatible Rabbit Subscriber class.""" def get_name(self) -> str: - return ( - f"{self.queue.name}:{getattr(self.exchange, 'name', None) or '_'}:{self.call_name}" - ) + return f"{self.queue.name}:{getattr(self.exchange, 'name', None) or '_'}:{self.call_name}" def get_schema(self) -> Dict[str, Channel]: payloads = self.get_payloads() diff --git a/faststream/redis/broker/broker.py b/faststream/redis/broker/broker.py index 93bea0a7f4..4f30e8adfb 100644 --- a/faststream/redis/broker/broker.py +++ b/faststream/redis/broker/broker.py @@ -257,7 +257,7 @@ async def connect( # type: ignore[override] ) -> "Redis[bytes]": """Connect to the Redis server.""" if url is not Parameter.empty: - connect_kwargs: "AnyDict" = { + connect_kwargs: AnyDict = { "url": url, **kwargs, } @@ -291,7 +291,7 @@ async def _connect( # type: ignore[override] parser_class: Type["BaseParser"], encoder_class: Type["Encoder"], ) -> "Redis[bytes]": - url_options: "AnyDict" = { + url_options: AnyDict = { **dict(parse_url(url)), **parse_security(self.security), "client_name": client_name, @@ -467,7 +467,7 @@ async def publish_batch( correlation_id = correlation_id or gen_cor_id() - call: "AsyncFunc" = self._producer.publish_batch + call: AsyncFunc = self._producer.publish_batch for m in self._middlewares: call = partial(m(None).publish_scope, call) diff --git a/faststream/redis/parser.py b/faststream/redis/parser.py index 52806b7fbd..bad91875ef 100644 --- a/faststream/redis/parser.py +++ b/faststream/redis/parser.py @@ -107,7 +107,7 @@ def encode( @staticmethod def parse(data: bytes) -> Tuple[bytes, "AnyDict"]: - headers: "AnyDict" + headers: AnyDict try: # FastStream message format @@ -192,7 +192,7 @@ def _parse_data( message: Mapping[str, Any], ) -> Tuple[bytes, "AnyDict", List["AnyDict"]]: body: List[Any] = [] - batch_headers: List["AnyDict"] = [] + batch_headers: List[AnyDict] = [] for x in message["data"]: msg_data, msg_headers = _decode_batch_body_item(x) @@ -230,7 +230,7 @@ def _parse_data( message: Mapping[str, Any], ) -> Tuple[bytes, "AnyDict", List["AnyDict"]]: body: List[Any] = [] - batch_headers: List["AnyDict"] = [] + batch_headers: List[AnyDict] = [] for x in message["data"]: msg_data, msg_headers = _decode_batch_body_item(x.get(bDATA_KEY, x)) diff --git a/faststream/redis/publisher/usecase.py b/faststream/redis/publisher/usecase.py index a887140d84..726506985d 100644 --- a/faststream/redis/publisher/usecase.py +++ b/faststream/redis/publisher/usecase.py @@ -159,7 +159,7 @@ async def publish( # type: ignore[override] headers = headers or self.headers correlation_id = correlation_id or gen_cor_id() - call: "AsyncFunc" = self._producer.publish + call: AsyncFunc = self._producer.publish for m in chain( ( @@ -284,7 +284,7 @@ async def publish( # type: ignore[override] reply_to = reply_to or self.reply_to correlation_id = correlation_id or gen_cor_id() - call: "AsyncFunc" = self._producer.publish + call: AsyncFunc = self._producer.publish for m in chain( ( @@ -341,7 +341,7 @@ async def publish( # type: ignore[override] list_sub = ListSub.validate(list or self.list) correlation_id = correlation_id or gen_cor_id() - call: "AsyncFunc" = self._producer.publish_batch + call: AsyncFunc = self._producer.publish_batch for m in chain( ( @@ -465,7 +465,7 @@ async def publish( # type: ignore[override] headers = headers or self.headers correlation_id = correlation_id or gen_cor_id() - call: "AsyncFunc" = self._producer.publish + call: AsyncFunc = self._producer.publish for m in chain( ( diff --git a/faststream/redis/subscriber/usecase.py b/faststream/redis/subscriber/usecase.py index 5dee39ef52..9e60138b9f 100644 --- a/faststream/redis/subscriber/usecase.py +++ b/faststream/redis/subscriber/usecase.py @@ -94,7 +94,7 @@ def __init__( ) self._client = None - self.task: Optional["asyncio.Task[None]"] = None + self.task: Optional[asyncio.Task[None]] = None @override def setup( # type: ignore[override] diff --git a/faststream/testing/broker.py b/faststream/testing/broker.py index 249e5c6846..df0fdbf083 100644 --- a/faststream/testing/broker.py +++ b/faststream/testing/broker.py @@ -93,7 +93,7 @@ async def __aexit__(self, *args: Any) -> None: await self._ctx.__aexit__(*args) # TODO: remove useless middlewares filter - middlewares: Tuple["BrokerMiddleware[Any]", ...] = ( + middlewares: Tuple[BrokerMiddleware[Any], ...] = ( CriticalLogMiddleware( # type: ignore[arg-type] logger=self.broker.logger, log_level=self.broker._msg_log_level, diff --git a/pyproject.toml b/pyproject.toml index 5b22e5c4c8..fce5418594 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,7 +58,8 @@ dependencies = [ "anyio>=3.7.1,<5", "fast-depends>=2.4.0b0,<2.5.0", "typer>=0.9,!=0.12,<1", - "typing-extensions>=4.8.0", + "typing-extensions>=4.8.0,<4.12.1; python_version < '3.9'", + "typing-extensions>=4.8.0; python_version >= '3.9'", ] [project.optional-dependencies] @@ -84,7 +85,7 @@ devdocs = [ "mdx-include==1.4.2", "mkdocstrings[python]==0.25.1", "mkdocs-literate-nav==0.6.1", - "mkdocs-git-revision-date-localized-plugin==1.2.5", + "mkdocs-git-revision-date-localized-plugin==1.2.6", "mike==2.1.1", # versioning "mkdocs-minify-plugin==0.8.0", "mkdocs-macros-plugin==1.0.5", # includes with variables @@ -111,14 +112,14 @@ types = [ lint = [ "faststream[types]", - "ruff==0.4.4", + "ruff==0.4.7", "bandit==1.7.8", "semgrep==1.74.0", "codespell==2.3.0", ] test-core = [ - "coverage[toml]==7.5.2", + "coverage[toml]==7.5.3", "pytest==8.2.1", "pytest-asyncio==0.23.7", "dirty-equals==0.7.1.post0", @@ -130,7 +131,7 @@ testing = [ "pydantic-settings>=2.0.0,<3.0.0", "httpx==0.27.0", "PyYAML==6.0.1", - "watchfiles==0.21.0", + "watchfiles==0.22.0", "email-validator==2.1.1", ]