Skip to content

Commit

Permalink
Merge branch '0.6.0' into 0.6.0
Browse files Browse the repository at this point in the history
  • Loading branch information
Lancetnik authored Jan 8, 2025
2 parents 4fed000 + d5fb85e commit c0c3da1
Show file tree
Hide file tree
Showing 4 changed files with 52 additions and 29 deletions.
20 changes: 10 additions & 10 deletions faststream/confluent/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ async def send(
timestamp_ms: Optional[int] = None,
headers: Optional[list[tuple[str, Union[str, bytes]]]] = None,
no_confirm: bool = False,
) -> "asyncio.Future":
) -> "Union[asyncio.Future[Optional[Message]], Optional[Message]]":
"""Sends a single message to a Kafka topic."""
kwargs: _SendKwargs = {
"value": value,
Expand All @@ -152,22 +152,22 @@ async def send(
if timestamp_ms is not None:
kwargs["timestamp"] = timestamp_ms

if not no_confirm:
result_future: asyncio.Future[Optional[Message]] = asyncio.Future()
loop = asyncio.get_running_loop()
result_future: asyncio.Future[Optional[Message]] = loop.create_future()

def ack_callback(err: Any, msg: Optional[Message]) -> None:
if err or (msg is not None and (err := msg.error())):
result_future.set_exception(KafkaException(err))
else:
result_future.set_result(msg)
def ack_callback(err: Any, msg: Optional[Message]) -> None:
if err or (msg is not None and (err := msg.error())):
loop.call_soon_threadsafe(result_future.set_exception, KafkaException(err))
else:
loop.call_soon_threadsafe(result_future.set_result, msg)

kwargs["on_delivery"] = ack_callback
kwargs["on_delivery"] = ack_callback

# should be sync to prevent segfault
self.producer.produce(topic, **kwargs)

if not no_confirm:
await result_future
return await result_future
return result_future

def create_batch(self) -> "BatchBuilder":
Expand Down
15 changes: 9 additions & 6 deletions faststream/confluent/publisher/producer.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import TYPE_CHECKING, Any, Optional
from typing import TYPE_CHECKING, NoReturn, Optional, Union

from typing_extensions import override

Expand All @@ -13,6 +13,8 @@
if TYPE_CHECKING:
import asyncio

from confluent_kafka import Message

from faststream._internal.types import CustomCallable
from faststream.confluent.client import AsyncConfluentProducer
from faststream.confluent.response import KafkaPublishCommand
Expand Down Expand Up @@ -43,14 +45,14 @@ async def disconnect(self) -> None:
def __bool__(self) -> bool:
return bool(self._producer)

async def ping(self, timeout: float) -> None:
async def ping(self, timeout: float) -> bool:
return await self._producer.ping(timeout=timeout)

@override
async def publish( # type: ignore[override]
self,
cmd: "KafkaPublishCommand",
) -> "asyncio.Future":
) -> "Union[asyncio.Future[Optional[Message]], Optional[Message]]":
"""Publish a message to a topic."""
message, content_type = encode_message(cmd.body)

Expand All @@ -69,7 +71,8 @@ async def publish( # type: ignore[override]
no_confirm=cmd.no_confirm,
)

async def publish_batch(
@override
async def publish_batch( # type: ignore[override]
self,
cmd: "KafkaPublishCommand",
) -> None:
Expand Down Expand Up @@ -104,9 +107,9 @@ async def publish_batch(
)

@override
async def request(
async def request( # type: ignore[override]
self,
cmd: "KafkaPublishCommand",
) -> Any:
) -> NoReturn:
msg = "Kafka doesn't support `request` method without test client."
raise FeatureNotSupportedException(msg)
29 changes: 19 additions & 10 deletions faststream/specification/asyncapi/v2_6_0/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,16 +150,25 @@ def get_broker_channels(
channels = {}

for h in broker._subscribers:
# TODO: add duplication key warning
channels.update({
key: Channel.from_sub(channel) for key, channel in h.schema().items()
})

for key, channel in h.schema().items():
if key in channels:
warnings.warn(
f"Overwrite channel handler, channels have the same names: `{key}`",
RuntimeWarning,
stacklevel=1,
)
channels[key] = Channel.from_sub(channel)

for p in broker._publishers:
# TODO: add duplication key warning
channels.update({
key: Channel.from_pub(channel) for key, channel in p.schema().items()
})
for key, channel in p.schema().items():
if key in channels:
warnings.warn(
f"Overwrite channel handler, channels have the same names: `{key}`",
RuntimeWarning,
stacklevel=1,
)

channels[key] = Channel.from_pub(channel)

return channels

Expand Down Expand Up @@ -205,7 +214,7 @@ def _resolve_msg_payloads(
payloads.update(m.payload.pop(DEF_KEY, {}))
p_title = m.payload.get("title", f"{channel_name}Payload")
p_title = clear_key(p_title)
if p_title in payloads:
if p_title in payloads and payloads[p_title] != m.payload:
warnings.warn(
f"Overwriting the message schema, data types have the same name: `{p_title}`",
RuntimeWarning,
Expand Down
17 changes: 14 additions & 3 deletions faststream/specification/asyncapi/v3_0_0/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,13 @@ def get_broker_channels(
channel_obj = Channel.from_sub(sub_key, sub_channel)

channel_key = clear_key(sub_key)
# TODO: add duplication key warning
if channel_key in channels:
warnings.warn(
f"Overwrite channel handler, channels have the same names: `{channel_key}`",
RuntimeWarning,
stacklevel=1,
)

channels[channel_key] = channel_obj

operations[f"{channel_key}Subscribe"] = Operation.from_sub(
Expand All @@ -177,7 +183,12 @@ def get_broker_channels(
channel_obj = Channel.from_pub(pub_key, pub_channel)

channel_key = clear_key(pub_key)
# TODO: add duplication key warning
if channel_key in channels:
warnings.warn(
f"Overwrite channel handler, channels have the same names: `{channel_key}`",
RuntimeWarning,
stacklevel=1,
)
channels[channel_key] = channel_obj

operations[channel_key] = Operation.from_pub(
Expand Down Expand Up @@ -231,7 +242,7 @@ def _resolve_msg_payloads(
payload_name = m.payload.get("title", f"{channel_name}:{message_name}:Payload")
payload_name = clear_key(payload_name)

if payload_name in payloads:
if payload_name in payloads and payloads[payload_name] != m.payload:
warnings.warn(
f"Overwriting the message schema, data types have the same name: `{payload_name}`",
RuntimeWarning,
Expand Down

0 comments on commit c0c3da1

Please sign in to comment.