from __future__ import annotations import asyncio import json import time from collections.abc import AsyncIterator from typing import Any class UpdateBroker: def __init__(self) -> None: self._subscribers: set[asyncio.Queue[dict[str, object]]] = set() def subscribe(self) -> asyncio.Queue[dict[str, object]]: queue: asyncio.Queue[dict[str, object]] = asyncio.Queue(maxsize=20) self._subscribers.add(queue) return queue def unsubscribe(self, queue: asyncio.Queue[dict[str, object]]) -> None: self._subscribers.discard(queue) async def publish(self, event: str, payload: dict[str, object] | None = None) -> None: message = { "event": event, "data": { "type": event, "published_at": time.time(), **(payload or {}), }, } stale_queues: list[asyncio.Queue[dict[str, object]]] = [] for queue in tuple(self._subscribers): try: queue.put_nowait(message) except asyncio.QueueFull: stale_queues.append(queue) for queue in stale_queues: self.unsubscribe(queue) async def stream_sse_events( broker: UpdateBroker, ) -> AsyncIterator[str]: queue = broker.subscribe() try: yield _sse_event("connected", {"type": "connected", "published_at": time.time()}) while True: try: message = await asyncio.wait_for(queue.get(), timeout=25) except TimeoutError: yield ": keepalive\n\n" continue yield _sse_event( str(message.get("event", "message")), message.get("data") if isinstance(message.get("data"), dict) else {}, ) finally: broker.unsubscribe(queue) def _sse_event(event: str, payload: dict[str, Any]) -> str: return f"event: {event}\ndata: {json.dumps(payload, separators=(',', ':'))}\n\n"