Revert "Add message queue for SSE messages POST endpoint (#459)" (#649)

This commit is contained in:
ihrpr
2025-05-07 16:35:20 +01:00
committed by GitHub
parent c8a14c9dba
commit 9d99aee014
26 changed files with 51 additions and 1247 deletions

View File

@@ -449,30 +449,6 @@ if __name__ == "__main__":
For more information on mounting applications in Starlette, see the [Starlette documentation](https://www.starlette.io/routing/#submounting-routes).
#### Message Dispatch Options
By default, the SSE server uses an in-memory message dispatch system for incoming POST messages. For production deployments or distributed scenarios, you can use Redis or implement your own message dispatch system that conforms to the `MessageDispatch` protocol:
```python
# Using the built-in Redis message dispatch
from mcp.server.fastmcp import FastMCP
from mcp.server.message_queue import RedisMessageDispatch
# Create a Redis message dispatch
redis_dispatch = RedisMessageDispatch(
redis_url="redis://localhost:6379/0", prefix="mcp:pubsub:"
)
# Pass the message dispatch instance to the server
mcp = FastMCP("My App", message_queue=redis_dispatch)
```
To use Redis, add the Redis dependency:
```bash
uv add "mcp[redis]"
```
## Examples
### Echo Server

View File

@@ -88,15 +88,12 @@ def main(port: int, transport: str) -> int:
)
if transport == "sse":
from mcp.server.message_queue.redis import RedisMessageDispatch
from mcp.server.sse import SseServerTransport
from starlette.applications import Starlette
from starlette.responses import Response
from starlette.routing import Mount, Route
message_dispatch = RedisMessageDispatch("redis://localhost:6379/0")
sse = SseServerTransport("/messages/", message_dispatch=message_dispatch)
sse = SseServerTransport("/messages/")
async def handle_sse(request):
async with sse.connect_sse(

View File

@@ -37,7 +37,6 @@ dependencies = [
rich = ["rich>=13.9.4"]
cli = ["typer>=0.12.4", "python-dotenv>=1.0.0"]
ws = ["websockets>=15.0.1"]
redis = ["redis>=5.2.1", "types-redis>=4.6.0.20241004"]
[project.scripts]
mcp = "mcp.cli:app [cli]"
@@ -56,7 +55,6 @@ dev = [
"pytest-xdist>=3.6.1",
"pytest-examples>=0.0.14",
"pytest-pretty>=1.2.0",
"fakeredis==2.28.1",
]
docs = [
"mkdocs>=1.6.1",

View File

@@ -98,9 +98,7 @@ async def sse_client(
await read_stream_writer.send(exc)
continue
session_message = SessionMessage(
message=message
)
session_message = SessionMessage(message)
await read_stream_writer.send(session_message)
case _:
logger.warning(
@@ -150,5 +148,3 @@ async def sse_client(
finally:
await read_stream_writer.aclose()
await write_stream.aclose()
await read_stream.aclose()
await write_stream_reader.aclose()

View File

@@ -144,7 +144,7 @@ async def stdio_client(server: StdioServerParameters, errlog: TextIO = sys.stder
await read_stream_writer.send(exc)
continue
session_message = SessionMessage(message=message)
session_message = SessionMessage(message)
await read_stream_writer.send(session_message)
except anyio.ClosedResourceError:
await anyio.lowlevel.checkpoint()

View File

@@ -153,7 +153,7 @@ class StreamableHTTPTransport:
):
message.root.id = original_request_id
session_message = SessionMessage(message=message)
session_message = SessionMessage(message)
await read_stream_writer.send(session_message)
# Call resumption token callback if we have an ID
@@ -286,7 +286,7 @@ class StreamableHTTPTransport:
try:
content = await response.aread()
message = JSONRPCMessage.model_validate_json(content)
session_message = SessionMessage(message=message)
session_message = SessionMessage(message)
await read_stream_writer.send(session_message)
except Exception as exc:
logger.error(f"Error parsing JSON response: {exc}")
@@ -333,7 +333,7 @@ class StreamableHTTPTransport:
id=request_id,
error=ErrorData(code=32600, message="Session terminated"),
)
session_message = SessionMessage(message=JSONRPCMessage(jsonrpc_error))
session_message = SessionMessage(JSONRPCMessage(jsonrpc_error))
await read_stream_writer.send(session_message)
async def post_writer(

View File

@@ -60,7 +60,7 @@ async def websocket_client(
async for raw_text in ws:
try:
message = types.JSONRPCMessage.model_validate_json(raw_text)
session_message = SessionMessage(message=message)
session_message = SessionMessage(message)
await read_stream_writer.send(session_message)
except ValidationError as exc:
# If JSON parse or model validation fails, send the exception

View File

@@ -44,7 +44,6 @@ from mcp.server.lowlevel.helper_types import ReadResourceContents
from mcp.server.lowlevel.server import LifespanResultT
from mcp.server.lowlevel.server import Server as MCPServer
from mcp.server.lowlevel.server import lifespan as default_lifespan
from mcp.server.message_queue import MessageDispatch
from mcp.server.session import ServerSession, ServerSessionT
from mcp.server.sse import SseServerTransport
from mcp.server.stdio import stdio_server
@@ -92,11 +91,6 @@ class Settings(BaseSettings, Generic[LifespanResultT]):
sse_path: str = "/sse"
message_path: str = "/messages/"
# SSE message queue settings
message_dispatch: MessageDispatch | None = Field(
None, description="Custom message dispatch instance"
)
# resource settings
warn_on_duplicate_resources: bool = True
@@ -607,13 +601,6 @@ class FastMCP:
def sse_app(self, mount_path: str | None = None) -> Starlette:
"""Return an instance of the SSE server app."""
message_dispatch = self.settings.message_dispatch
if message_dispatch is None:
from mcp.server.message_queue import InMemoryMessageDispatch
message_dispatch = InMemoryMessageDispatch()
logger.info("Using default in-memory message dispatch")
from starlette.middleware import Middleware
from starlette.routing import Mount, Route
@@ -625,12 +612,11 @@ class FastMCP:
normalized_message_endpoint = self._normalize_path(
self.settings.mount_path, self.settings.message_path
)
# Set up auth context and dependencies
sse = SseServerTransport(
normalized_message_endpoint,
message_dispatch=message_dispatch
normalized_message_endpoint,
)
async def handle_sse(scope: Scope, receive: Receive, send: Send):
@@ -646,14 +632,7 @@ class FastMCP:
streams[1],
self._mcp_server.create_initialization_options(),
)
return Response()
@asynccontextmanager
async def lifespan(app: Starlette):
try:
yield
finally:
await message_dispatch.close()
return Response()
# Create routes
routes: list[Route | Mount] = []
@@ -730,10 +709,7 @@ class FastMCP:
# Create Starlette app with routes and middleware
return Starlette(
debug=self.settings.debug,
routes=routes,
middleware=middleware,
lifespan=lifespan,
debug=self.settings.debug, routes=routes, middleware=middleware
)
async def list_prompts(self) -> list[MCPPrompt]:

View File

@@ -1,16 +0,0 @@
"""
Message Dispatch Module for MCP Server
This module implements dispatch interfaces for handling
messages between clients and servers.
"""
from mcp.server.message_queue.base import InMemoryMessageDispatch, MessageDispatch
# Try to import Redis implementation if available
try:
from mcp.server.message_queue.redis import RedisMessageDispatch
except ImportError:
RedisMessageDispatch = None
__all__ = ["MessageDispatch", "InMemoryMessageDispatch", "RedisMessageDispatch"]

View File

@@ -1,116 +0,0 @@
import logging
from collections.abc import Awaitable, Callable
from contextlib import asynccontextmanager
from typing import Protocol, runtime_checkable
from uuid import UUID
from pydantic import ValidationError
from mcp.shared.message import SessionMessage
logger = logging.getLogger(__name__)
MessageCallback = Callable[[SessionMessage | Exception], Awaitable[None]]
@runtime_checkable
class MessageDispatch(Protocol):
"""Abstract interface for SSE message dispatching.
This interface allows messages to be published to sessions and callbacks to be
registered for message handling, enabling multiple servers to handle requests.
"""
async def publish_message(
self, session_id: UUID, message: SessionMessage | str
) -> bool:
"""Publish a message for the specified session.
Args:
session_id: The UUID of the session this message is for
message: The message to publish (SessionMessage or str for invalid JSON)
Returns:
bool: True if message was published, False if session not found
"""
...
@asynccontextmanager
async def subscribe(self, session_id: UUID, callback: MessageCallback):
"""Request-scoped context manager that subscribes to messages for a session.
Args:
session_id: The UUID of the session to subscribe to
callback: Async callback function to handle messages for this session
"""
yield
async def session_exists(self, session_id: UUID) -> bool:
"""Check if a session exists.
Args:
session_id: The UUID of the session to check
Returns:
bool: True if the session is active, False otherwise
"""
...
async def close(self) -> None:
"""Close the message dispatch."""
...
class InMemoryMessageDispatch:
"""Default in-memory implementation of the MessageDispatch interface.
This implementation immediately dispatches messages to registered callbacks when
messages are received without any queuing behavior.
"""
def __init__(self) -> None:
self._callbacks: dict[UUID, MessageCallback] = {}
async def publish_message(
self, session_id: UUID, message: SessionMessage | str
) -> bool:
"""Publish a message for the specified session."""
if session_id not in self._callbacks:
logger.warning(f"Message dropped: unknown session {session_id}")
return False
# Parse string messages or recreate original ValidationError
if isinstance(message, str):
try:
callback_argument = SessionMessage.model_validate_json(message)
except ValidationError as exc:
callback_argument = exc
else:
callback_argument = message
# Call the callback with either valid message or recreated ValidationError
await self._callbacks[session_id](callback_argument)
logger.debug(f"Message dispatched to session {session_id}")
return True
@asynccontextmanager
async def subscribe(self, session_id: UUID, callback: MessageCallback):
"""Request-scoped context manager that subscribes to messages for a session."""
self._callbacks[session_id] = callback
logger.debug(f"Subscribing to messages for session {session_id}")
try:
yield
finally:
if session_id in self._callbacks:
del self._callbacks[session_id]
logger.debug(f"Unsubscribed from session {session_id}")
async def session_exists(self, session_id: UUID) -> bool:
"""Check if a session exists."""
return session_id in self._callbacks
async def close(self) -> None:
"""Close the message dispatch."""
pass

View File

@@ -1,198 +0,0 @@
import logging
from contextlib import asynccontextmanager
from typing import Any, cast
from uuid import UUID
import anyio
from anyio import CancelScope, CapacityLimiter, lowlevel
from anyio.abc import TaskGroup
from pydantic import ValidationError
from mcp.server.message_queue.base import MessageCallback
from mcp.shared.message import SessionMessage
try:
import redis.asyncio as redis
except ImportError:
raise ImportError(
"Redis support requires the 'redis' package. "
"Install it with: 'uv add redis' or 'uv add \"mcp[redis]\"'"
)
logger = logging.getLogger(__name__)
class RedisMessageDispatch:
"""Redis implementation of the MessageDispatch interface using pubsub.
This implementation uses Redis pubsub for real-time message distribution across
multiple servers handling the same sessions.
"""
def __init__(
self,
redis_url: str = "redis://localhost:6379/0",
prefix: str = "mcp:pubsub:",
session_ttl: int = 3600, # 1 hour default TTL for sessions
) -> None:
"""Initialize Redis message dispatch.
Args:
redis_url: Redis connection string
prefix: Key prefix for Redis channels to avoid collisions
session_ttl: TTL in seconds for session keys (default: 1 hour)
"""
self._redis = redis.from_url(redis_url, decode_responses=True) # type: ignore
self._pubsub = self._redis.pubsub(ignore_subscribe_messages=True) # type: ignore
self._prefix = prefix
self._session_ttl = session_ttl
# Maps session IDs to the callback and task group for that SSE session.
self._session_state: dict[UUID, tuple[MessageCallback, TaskGroup]] = {}
# Ensures only one polling task runs at a time for message handling
self._limiter = CapacityLimiter(1)
logger.debug(f"Redis message dispatch initialized: {redis_url}")
async def close(self):
await self._pubsub.aclose() # type: ignore
await self._redis.aclose() # type: ignore
def _session_channel(self, session_id: UUID) -> str:
"""Get the Redis channel for a session."""
return f"{self._prefix}session:{session_id.hex}"
def _session_key(self, session_id: UUID) -> str:
"""Get the Redis key for a session."""
return f"{self._prefix}session_active:{session_id.hex}"
@asynccontextmanager
async def subscribe(self, session_id: UUID, callback: MessageCallback):
"""Request-scoped context manager that subscribes to messages for a session."""
session_key = self._session_key(session_id)
await self._redis.setex(session_key, self._session_ttl, "1") # type: ignore
channel = self._session_channel(session_id)
await self._pubsub.subscribe(channel) # type: ignore
logger.debug(f"Subscribing to Redis channel for session {session_id}")
async with anyio.create_task_group() as tg:
self._session_state[session_id] = (callback, tg)
tg.start_soon(self._listen_for_messages)
# Start heartbeat for this session
tg.start_soon(self._session_heartbeat, session_id)
try:
yield
finally:
with anyio.CancelScope(shield=True):
tg.cancel_scope.cancel()
await self._pubsub.unsubscribe(channel) # type: ignore
await self._redis.delete(session_key) # type: ignore
del self._session_state[session_id]
logger.debug(f"Unsubscribed from Redis channel: {session_id}")
async def _session_heartbeat(self, session_id: UUID) -> None:
"""Periodically refresh the TTL for a session."""
session_key = self._session_key(session_id)
while True:
await lowlevel.checkpoint()
try:
# Refresh TTL at half the TTL interval to avoid expiration
await anyio.sleep(self._session_ttl / 2)
with anyio.CancelScope(shield=True):
await self._redis.expire(session_key, self._session_ttl) # type: ignore
except anyio.get_cancelled_exc_class():
break
except Exception as e:
logger.error(f"Error refreshing TTL for session {session_id}: {e}")
def _extract_session_id(self, channel: str) -> UUID | None:
"""Extract and validate session ID from channel."""
expected_prefix = f"{self._prefix}session:"
if not channel.startswith(expected_prefix):
return None
session_hex = channel[len(expected_prefix) :]
try:
session_id = UUID(hex=session_hex)
if channel != self._session_channel(session_id):
logger.error(f"Channel format mismatch: {channel}")
return None
return session_id
except ValueError:
logger.error(f"Invalid UUID in channel: {channel}")
return None
async def _listen_for_messages(self) -> None:
"""Background task that listens for messages on subscribed channels."""
async with self._limiter:
while True:
await lowlevel.checkpoint()
with CancelScope(shield=True):
message: None | dict[str, Any] = await self._pubsub.get_message( # type: ignore
ignore_subscribe_messages=True,
timeout=0.1, # type: ignore
)
if message is None:
continue
channel: str = cast(str, message["channel"])
session_id = self._extract_session_id(channel)
if session_id is None:
logger.debug(
f"Ignoring message from non-MCP channel: {channel}"
)
continue
data: str = cast(str, message["data"])
try:
if session_state := self._session_state.get(session_id):
session_state[1].start_soon(
self._handle_message, session_id, data
)
else:
logger.warning(
f"Message dropped: unknown session {session_id}"
)
except Exception as e:
logger.error(f"Error processing message for {session_id}: {e}")
async def _handle_message(self, session_id: UUID, data: str) -> None:
"""Process a message from Redis in the session's task group."""
if (session_state := self._session_state.get(session_id)) is None:
logger.warning(f"Message dropped: callback removed for {session_id}")
return
try:
# Parse message or pass validation error to callback
msg_or_error = None
try:
msg_or_error = SessionMessage.model_validate_json(data)
except ValidationError as exc:
msg_or_error = exc
await session_state[0](msg_or_error)
except Exception as e:
logger.error(f"Error in message handler for {session_id}: {e}")
async def publish_message(
self, session_id: UUID, message: SessionMessage | str
) -> bool:
"""Publish a message for the specified session."""
if not await self.session_exists(session_id):
logger.warning(f"Message dropped: unknown session {session_id}")
return False
# Pass raw JSON strings directly, preserving validation errors
if isinstance(message, str):
data = message
else:
data = message.model_dump_json()
channel = self._session_channel(session_id)
await self._redis.publish(channel, data) # type: ignore[attr-defined]
logger.debug(f"Message published to Redis channel for session {session_id}")
return True
async def session_exists(self, session_id: UUID) -> bool:
"""Check if a session exists."""
session_key = self._session_key(session_id)
return bool(await self._redis.exists(session_key)) # type: ignore

View File

@@ -52,11 +52,9 @@ from starlette.responses import Response
from starlette.types import Receive, Scope, Send
import mcp.types as types
from mcp.server.message_queue import InMemoryMessageDispatch, MessageDispatch
from mcp.shared.message import SessionMessage
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG)
class SseServerTransport:
@@ -72,24 +70,17 @@ class SseServerTransport:
"""
_endpoint: str
_message_dispatch: MessageDispatch
_read_stream_writers: dict[UUID, MemoryObjectSendStream[SessionMessage | Exception]]
def __init__(
self, endpoint: str, message_dispatch: MessageDispatch | None = None
) -> None:
def __init__(self, endpoint: str) -> None:
"""
Creates a new SSE server transport, which will direct the client to POST
messages to the relative or absolute URL given.
Args:
endpoint: The endpoint URL for SSE connections
message_dispatch: Optional message dispatch to use
"""
super().__init__()
self._endpoint = endpoint
self._message_dispatch = message_dispatch or InMemoryMessageDispatch()
self._read_stream_writers = {}
logger.debug(f"SseServerTransport initialized with endpoint: {endpoint}")
@asynccontextmanager
@@ -110,12 +101,7 @@ class SseServerTransport:
session_id = uuid4()
session_uri = f"{quote(self._endpoint)}?session_id={session_id.hex}"
async def message_callback(message: SessionMessage | Exception) -> None:
"""Callback that receives messages from the message queue"""
logger.debug(f"Got message from queue for session {session_id}")
await read_stream_writer.send(message)
self._read_stream_writers[session_id] = read_stream_writer
logger.debug(f"Created new session with ID: {session_id}")
sse_stream_writer, sse_stream_reader = anyio.create_memory_object_stream[
@@ -152,16 +138,13 @@ class SseServerTransport:
)(scope, receive, send)
await read_stream_writer.aclose()
await write_stream_reader.aclose()
await sse_stream_writer.aclose()
await sse_stream_reader.aclose()
logging.debug(f"Client session disconnected {session_id}")
logger.debug("Starting SSE response task")
tg.start_soon(response_wrapper, scope, receive, send)
async with self._message_dispatch.subscribe(session_id, message_callback):
logger.debug("Yielding read and write streams")
yield (read_stream, write_stream)
logger.debug("Yielding read and write streams")
yield (read_stream, write_stream)
async def handle_post_message(
self, scope: Scope, receive: Receive, send: Send
@@ -183,7 +166,8 @@ class SseServerTransport:
response = Response("Invalid session ID", status_code=400)
return await response(scope, receive, send)
if not await self._message_dispatch.session_exists(session_id):
writer = self._read_stream_writers.get(session_id)
if not writer:
logger.warning(f"Could not find session for ID: {session_id}")
response = Response("Could not find session", status_code=404)
return await response(scope, receive, send)
@@ -198,15 +182,11 @@ class SseServerTransport:
logger.error(f"Failed to parse message: {err}")
response = Response("Could not parse message", status_code=400)
await response(scope, receive, send)
# Pass raw JSON string; receiver will recreate identical ValidationError
# when parsing the same invalid JSON
await self._message_dispatch.publish_message(session_id, body.decode())
await writer.send(err)
return
logger.debug(f"Publishing message for session {session_id}: {message}")
session_message = SessionMessage(message)
logger.debug(f"Sending session message to writer: {session_message}")
response = Response("Accepted", status_code=202)
await response(scope, receive, send)
await self._message_dispatch.publish_message(
session_id, SessionMessage(message=message)
)
logger.debug(f"Sending session message to writer: {message}")
await writer.send(session_message)

View File

@@ -67,7 +67,7 @@ async def stdio_server(
await read_stream_writer.send(exc)
continue
session_message = SessionMessage(message=message)
session_message = SessionMessage(message)
await read_stream_writer.send(session_message)
except anyio.ClosedResourceError:
await anyio.lowlevel.checkpoint()

View File

@@ -398,7 +398,7 @@ class StreamableHTTPServerTransport:
await response(scope, receive, send)
# Process the message after sending the response
session_message = SessionMessage(message=message)
session_message = SessionMessage(message)
await writer.send(session_message)
return
@@ -413,7 +413,7 @@ class StreamableHTTPServerTransport:
if self.is_json_response_enabled:
# Process the message
session_message = SessionMessage(message=message)
session_message = SessionMessage(message)
await writer.send(session_message)
try:
# Process messages from the request-specific stream
@@ -512,7 +512,7 @@ class StreamableHTTPServerTransport:
async with anyio.create_task_group() as tg:
tg.start_soon(response, scope, receive, send)
# Then send the message to be processed by the server
session_message = SessionMessage(message=message)
session_message = SessionMessage(message)
await writer.send(session_message)
except Exception:
logger.exception("SSE response error")

View File

@@ -42,7 +42,7 @@ async def websocket_server(scope: Scope, receive: Receive, send: Send):
await read_stream_writer.send(exc)
continue
session_message = SessionMessage(message=client_message)
session_message = SessionMessage(client_message)
await read_stream_writer.send(session_message)
except anyio.ClosedResourceError:
await websocket.close()

View File

@@ -6,8 +6,7 @@ to support transport-specific features like resumability.
"""
from collections.abc import Awaitable, Callable
from pydantic import BaseModel
from dataclasses import dataclass
from mcp.types import JSONRPCMessage, RequestId
@@ -16,7 +15,8 @@ ResumptionToken = str
ResumptionTokenUpdateCallback = Callable[[ResumptionToken], Awaitable[None]]
class ClientMessageMetadata(BaseModel):
@dataclass
class ClientMessageMetadata:
"""Metadata specific to client messages."""
resumption_token: ResumptionToken | None = None
@@ -25,7 +25,8 @@ class ClientMessageMetadata(BaseModel):
)
class ServerMessageMetadata(BaseModel):
@dataclass
class ServerMessageMetadata:
"""Metadata specific to server messages."""
related_request_id: RequestId | None = None
@@ -34,8 +35,9 @@ class ServerMessageMetadata(BaseModel):
MessageMetadata = ClientMessageMetadata | ServerMessageMetadata | None
class SessionMessage(BaseModel):
@dataclass
class SessionMessage:
"""A message with specific metadata for transport-specific features."""
message: JSONRPCMessage
metadata: MessageMetadata | None = None
metadata: MessageMetadata = None

View File

@@ -62,7 +62,7 @@ async def test_client_session_initialize():
async with server_to_client_send:
await server_to_client_send.send(
SessionMessage(
message=JSONRPCMessage(
JSONRPCMessage(
JSONRPCResponse(
jsonrpc="2.0",
id=jsonrpc_request.root.id,
@@ -153,7 +153,7 @@ async def test_client_session_custom_client_info():
async with server_to_client_send:
await server_to_client_send.send(
SessionMessage(
message=JSONRPCMessage(
JSONRPCMessage(
JSONRPCResponse(
jsonrpc="2.0",
id=jsonrpc_request.root.id,
@@ -220,7 +220,7 @@ async def test_client_session_default_client_info():
async with server_to_client_send:
await server_to_client_send.send(
SessionMessage(
message=JSONRPCMessage(
JSONRPCMessage(
JSONRPCResponse(
jsonrpc="2.0",
id=jsonrpc_request.root.id,

View File

@@ -23,7 +23,7 @@ async def test_stdio_client():
async with write_stream:
for message in messages:
session_message = SessionMessage(message=message)
session_message = SessionMessage(message)
await write_stream.send(session_message)
read_messages = []

View File

@@ -65,7 +65,7 @@ async def test_request_id_match() -> None:
jsonrpc="2.0",
)
await client_writer.send(SessionMessage(message=JSONRPCMessage(root=init_req)))
await client_writer.send(SessionMessage(JSONRPCMessage(root=init_req)))
response = (
await server_reader.receive()
) # Get init response but don't need to check it
@@ -77,7 +77,7 @@ async def test_request_id_match() -> None:
jsonrpc="2.0",
)
await client_writer.send(
SessionMessage(message=JSONRPCMessage(root=initialized_notification))
SessionMessage(JSONRPCMessage(root=initialized_notification))
)
# Send ping request with custom ID
@@ -85,9 +85,7 @@ async def test_request_id_match() -> None:
id=custom_request_id, method="ping", params={}, jsonrpc="2.0"
)
await client_writer.send(
SessionMessage(message=JSONRPCMessage(root=ping_request))
)
await client_writer.send(SessionMessage(JSONRPCMessage(root=ping_request)))
# Read response
response = await server_reader.receive()

View File

@@ -1 +0,0 @@
# Message queue tests module

View File

@@ -1,28 +0,0 @@
"""Shared fixtures for message queue tests."""
from collections.abc import AsyncGenerator
from unittest.mock import patch
import pytest
from mcp.server.message_queue.redis import RedisMessageDispatch
# Set up fakeredis for testing
try:
from fakeredis import aioredis as fake_redis
except ImportError:
pytest.skip(
"fakeredis is required for testing Redis functionality", allow_module_level=True
)
@pytest.fixture
async def message_dispatch() -> AsyncGenerator[RedisMessageDispatch, None]:
"""Create a shared Redis message dispatch with a fake Redis client."""
with patch("mcp.server.message_queue.redis.redis", fake_redis.FakeRedis):
# Shorter TTL for testing
message_dispatch = RedisMessageDispatch(session_ttl=5)
try:
yield message_dispatch
finally:
await message_dispatch.close()

View File

@@ -1,355 +0,0 @@
from unittest.mock import AsyncMock
from uuid import uuid4
import anyio
import pytest
from pydantic import ValidationError
import mcp.types as types
from mcp.server.message_queue.redis import RedisMessageDispatch
from mcp.shared.message import SessionMessage
@pytest.mark.anyio
async def test_session_heartbeat(message_dispatch):
"""Test that session heartbeat refreshes TTL."""
session_id = uuid4()
async with message_dispatch.subscribe(session_id, AsyncMock()):
session_key = message_dispatch._session_key(session_id)
# Initial TTL
initial_ttl = await message_dispatch._redis.ttl(session_key) # type: ignore
assert initial_ttl > 0
# Wait for heartbeat to run
await anyio.sleep(message_dispatch._session_ttl / 2 + 0.5)
# TTL should be refreshed
refreshed_ttl = await message_dispatch._redis.ttl(session_key) # type: ignore
assert refreshed_ttl > 0
assert refreshed_ttl <= message_dispatch._session_ttl
@pytest.mark.anyio
async def test_subscribe_unsubscribe(message_dispatch):
"""Test subscribing and unsubscribing from a session."""
session_id = uuid4()
callback = AsyncMock()
# Subscribe
async with message_dispatch.subscribe(session_id, callback):
# Check that session is tracked
assert session_id in message_dispatch._session_state
assert await message_dispatch.session_exists(session_id)
# After context exit, session should be cleaned up
assert session_id not in message_dispatch._session_state
assert not await message_dispatch.session_exists(session_id)
@pytest.mark.anyio
async def test_publish_message_valid_json(message_dispatch: RedisMessageDispatch):
"""Test publishing a valid JSON-RPC message."""
session_id = uuid4()
callback = AsyncMock()
message = types.JSONRPCMessage.model_validate(
{"jsonrpc": "2.0", "method": "test", "params": {}, "id": 1}
)
# Subscribe to messages
async with message_dispatch.subscribe(session_id, callback):
# Publish message
published = await message_dispatch.publish_message(
session_id, SessionMessage(message=message)
)
assert published
# Give some time for the message to be processed
await anyio.sleep(0.1)
# Callback should have been called with the message
callback.assert_called_once()
call_args = callback.call_args[0][0]
assert isinstance(call_args, SessionMessage)
assert isinstance(call_args.message.root, types.JSONRPCRequest)
assert (
call_args.message.root.method == "test"
) # Access method through root attribute
@pytest.mark.anyio
async def test_publish_message_invalid_json(message_dispatch):
"""Test publishing an invalid JSON string."""
session_id = uuid4()
callback = AsyncMock()
invalid_json = '{"invalid": "json",,}' # Invalid JSON
# Subscribe to messages
async with message_dispatch.subscribe(session_id, callback):
# Publish invalid message
published = await message_dispatch.publish_message(session_id, invalid_json)
assert published
# Give some time for the message to be processed
await anyio.sleep(0.1)
# Callback should have been called with a ValidationError
callback.assert_called_once()
error = callback.call_args[0][0]
assert isinstance(error, ValidationError)
@pytest.mark.anyio
async def test_publish_to_nonexistent_session(message_dispatch: RedisMessageDispatch):
"""Test publishing to a session that doesn't exist."""
session_id = uuid4()
message = SessionMessage(
message=types.JSONRPCMessage.model_validate(
{"jsonrpc": "2.0", "method": "test", "params": {}, "id": 1}
)
)
published = await message_dispatch.publish_message(session_id, message)
assert not published
@pytest.mark.anyio
async def test_extract_session_id(message_dispatch):
"""Test extracting session ID from channel name."""
session_id = uuid4()
channel = message_dispatch._session_channel(session_id)
# Valid channel
extracted_id = message_dispatch._extract_session_id(channel)
assert extracted_id == session_id
# Invalid channel format
extracted_id = message_dispatch._extract_session_id("invalid_channel_name")
assert extracted_id is None
# Invalid UUID in channel
invalid_channel = f"{message_dispatch._prefix}session:invalid_uuid"
extracted_id = message_dispatch._extract_session_id(invalid_channel)
assert extracted_id is None
@pytest.mark.anyio
async def test_multiple_sessions(message_dispatch: RedisMessageDispatch):
"""Test handling multiple concurrent sessions."""
session1 = uuid4()
session2 = uuid4()
callback1 = AsyncMock()
callback2 = AsyncMock()
async with message_dispatch.subscribe(session1, callback1):
async with message_dispatch.subscribe(session2, callback2):
# Both sessions should exist
assert await message_dispatch.session_exists(session1)
assert await message_dispatch.session_exists(session2)
# Publish to session1
message1 = types.JSONRPCMessage.model_validate(
{"jsonrpc": "2.0", "method": "test1", "params": {}, "id": 1}
)
await message_dispatch.publish_message(
session1, SessionMessage(message=message1)
)
# Publish to session2
message2 = types.JSONRPCMessage.model_validate(
{"jsonrpc": "2.0", "method": "test2", "params": {}, "id": 2}
)
await message_dispatch.publish_message(
session2, SessionMessage(message=message2)
)
# Give some time for messages to be processed
await anyio.sleep(0.1)
# Check callbacks
callback1.assert_called_once()
callback2.assert_called_once()
call1_args = callback1.call_args[0][0]
assert isinstance(call1_args, SessionMessage)
assert call1_args.message.root.method == "test1" # type: ignore
call2_args = callback2.call_args[0][0]
assert isinstance(call2_args, SessionMessage)
assert call2_args.message.root.method == "test2" # type: ignore
@pytest.mark.anyio
async def test_task_group_cancellation(message_dispatch):
"""Test that task group is properly cancelled when context exits."""
session_id = uuid4()
callback = AsyncMock()
async with message_dispatch.subscribe(session_id, callback):
# Check that task group is active
_, task_group = message_dispatch._session_state[session_id]
assert task_group.cancel_scope.cancel_called is False
# After context exit, task group should be cancelled
# And session state should be cleaned up
assert session_id not in message_dispatch._session_state
@pytest.mark.anyio
async def test_session_cancellation_isolation(message_dispatch):
"""Test that cancelling one session doesn't affect other sessions."""
session1 = uuid4()
session2 = uuid4()
# Create a blocking callback for session1 to ensure it's running when cancelled
session1_event = anyio.Event()
session1_started = anyio.Event()
session1_cancelled = False
async def blocking_callback1(msg):
session1_started.set()
try:
await session1_event.wait()
except anyio.get_cancelled_exc_class():
nonlocal session1_cancelled
session1_cancelled = True
raise
callback2 = AsyncMock()
# Start session2 first
async with message_dispatch.subscribe(session2, callback2):
# Start session1 with a blocking callback
async with anyio.create_task_group() as tg:
async def session1_runner():
async with message_dispatch.subscribe(session1, blocking_callback1):
# Publish a message to trigger the blocking callback
message = types.JSONRPCMessage.model_validate(
{"jsonrpc": "2.0", "method": "test", "params": {}, "id": 1}
)
await message_dispatch.publish_message(session1, message)
# Wait for the callback to start
await session1_started.wait()
# Keep the context alive while we test cancellation
await anyio.sleep_forever()
tg.start_soon(session1_runner)
# Wait for session1's callback to start
await session1_started.wait()
# Cancel session1
tg.cancel_scope.cancel()
# Give some time for cancellation to propagate
await anyio.sleep(0.1)
# Verify session1 was cancelled
assert session1_cancelled
assert session1 not in message_dispatch._session_state
# Verify session2 is still active and can receive messages
assert await message_dispatch.session_exists(session2)
message2 = types.JSONRPCMessage.model_validate(
{"jsonrpc": "2.0", "method": "test2", "params": {}, "id": 2}
)
await message_dispatch.publish_message(session2, message2)
# Give some time for the message to be processed
await anyio.sleep(0.1)
# Verify session2 received the message
callback2.assert_called_once()
call_args = callback2.call_args[0][0]
assert call_args.root.method == "test2"
@pytest.mark.anyio
async def test_listener_task_handoff_on_cancellation(message_dispatch):
"""
Test that the single listening task is properly
handed off when a session is cancelled.
"""
session1 = uuid4()
session2 = uuid4()
session1_messages_received = 0
session2_messages_received = 0
async def callback1(msg):
nonlocal session1_messages_received
session1_messages_received += 1
async def callback2(msg):
nonlocal session2_messages_received
session2_messages_received += 1
# Create a cancel scope for session1
async with anyio.create_task_group() as tg:
session1_cancel_scope: anyio.CancelScope | None = None
async def session1_runner():
nonlocal session1_cancel_scope
with anyio.CancelScope() as cancel_scope:
session1_cancel_scope = cancel_scope
async with message_dispatch.subscribe(session1, callback1):
# Keep session alive until cancelled
await anyio.sleep_forever()
# Start session1
tg.start_soon(session1_runner)
# Wait for session1 to be established
await anyio.sleep(0.1)
assert session1 in message_dispatch._session_state
# Send message to session1 to verify it's working
message1 = types.JSONRPCMessage.model_validate(
{"jsonrpc": "2.0", "method": "test1", "params": {}, "id": 1}
)
await message_dispatch.publish_message(session1, message1)
await anyio.sleep(0.1)
assert session1_messages_received == 1
# Start session2 while session1 is still active
async with message_dispatch.subscribe(session2, callback2):
# Both sessions should be active
assert session1 in message_dispatch._session_state
assert session2 in message_dispatch._session_state
# Cancel session1
assert session1_cancel_scope is not None
session1_cancel_scope.cancel()
# Wait for cancellation to complete
await anyio.sleep(0.1)
# Session1 should be gone, session2 should remain
assert session1 not in message_dispatch._session_state
assert session2 in message_dispatch._session_state
# Send message to session2 to verify the listener was handed off
message2 = types.JSONRPCMessage.model_validate(
{"jsonrpc": "2.0", "method": "test2", "params": {}, "id": 2}
)
await message_dispatch.publish_message(session2, message2)
await anyio.sleep(0.1)
# Session2 should have received the message
assert session2_messages_received == 1
# Session1 shouldn't receive any more messages
assert session1_messages_received == 1
# Send another message to verify the listener is still working
message3 = types.JSONRPCMessage.model_validate(
{"jsonrpc": "2.0", "method": "test3", "params": {}, "id": 3}
)
await message_dispatch.publish_message(session2, message3)
await anyio.sleep(0.1)
assert session2_messages_received == 2

View File

@@ -1,260 +0,0 @@
"""
Integration tests for Redis message dispatch functionality.
These tests validate Redis message dispatch by making actual HTTP calls and testing
that messages flow correctly through the Redis backend.
This version runs the server in a task instead of a separate process to allow
access to the fakeredis instance for verification of Redis keys.
"""
import asyncio
import socket
from collections.abc import AsyncGenerator
from contextlib import asynccontextmanager
import anyio
import pytest
import uvicorn
from sse_starlette.sse import AppStatus
from starlette.applications import Starlette
from starlette.requests import Request
from starlette.responses import Response
from starlette.routing import Mount, Route
from mcp.client.session import ClientSession
from mcp.client.sse import sse_client
from mcp.server import Server
from mcp.server.message_queue.redis import RedisMessageDispatch
from mcp.server.sse import SseServerTransport
from mcp.types import TextContent, Tool
SERVER_NAME = "test_server_for_redis_integration_v3"
@pytest.fixture
def server_port() -> int:
with socket.socket() as s:
s.bind(("127.0.0.1", 0))
return s.getsockname()[1]
@pytest.fixture
def server_url(server_port: int) -> str:
return f"http://127.0.0.1:{server_port}"
class RedisTestServer(Server):
"""Test server with basic tool functionality."""
def __init__(self):
super().__init__(SERVER_NAME)
@self.list_tools()
async def handle_list_tools() -> list[Tool]:
return [
Tool(
name="test_tool",
description="A test tool",
inputSchema={"type": "object", "properties": {}},
),
Tool(
name="echo_message",
description="Echo a message back",
inputSchema={
"type": "object",
"properties": {"message": {"type": "string"}},
"required": ["message"],
},
),
]
@self.call_tool()
async def handle_call_tool(name: str, args: dict) -> list[TextContent]:
if name == "echo_message":
message = args.get("message", "")
return [TextContent(type="text", text=f"Echo: {message}")]
return [TextContent(type="text", text=f"Called {name}")]
@pytest.fixture()
async def redis_server_and_app(message_dispatch: RedisMessageDispatch):
"""Create a mock Redis instance and Starlette app for testing."""
# Create SSE transport with Redis message dispatch
sse = SseServerTransport("/messages/", message_dispatch=message_dispatch)
server = RedisTestServer()
async def handle_sse(request: Request):
async with sse.connect_sse(
request.scope, request.receive, request._send
) as streams:
await server.run(
streams[0], streams[1], server.create_initialization_options()
)
return Response()
@asynccontextmanager
async def lifespan(app: Starlette) -> AsyncGenerator[None, None]:
"""Manage the lifecycle of the application."""
try:
yield
finally:
await message_dispatch.close()
app = Starlette(
routes=[
Route("/sse", endpoint=handle_sse),
Mount("/messages/", app=sse.handle_post_message),
],
lifespan=lifespan,
)
return app, message_dispatch, message_dispatch._redis
@pytest.fixture()
async def server_and_redis(redis_server_and_app, server_port: int):
"""Run the server in a task and return the Redis instance for inspection."""
app, message_dispatch, mock_redis = redis_server_and_app
# Create a server config
config = uvicorn.Config(
app=app, host="127.0.0.1", port=server_port, log_level="error"
)
server = uvicorn.Server(config=config)
try:
async with anyio.create_task_group() as tg:
# Start server in background
tg.start_soon(server.serve)
# Wait for server to be ready
max_attempts = 20
attempt = 0
while attempt < max_attempts:
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect(("127.0.0.1", server_port))
break
except ConnectionRefusedError:
await anyio.sleep(0.1)
attempt += 1
else:
raise RuntimeError(
f"Server failed to start after {max_attempts} attempts"
)
try:
yield mock_redis, message_dispatch
finally:
server.should_exit = True
finally:
# These class variables are set top-level in starlette-sse
# It isn't designed to be run multiple times in a single
# Python process so we need to manually reset them.
AppStatus.should_exit = False
AppStatus.should_exit_event = None
@pytest.fixture()
async def client_session(server_and_redis, server_url: str):
"""Create a client session for testing."""
async with sse_client(server_url + "/sse") as streams:
async with ClientSession(*streams) as session:
result = await session.initialize()
assert result.serverInfo.name == SERVER_NAME
yield session
@pytest.mark.anyio
async def test_redis_integration_key_verification(
server_and_redis, client_session
) -> None:
"""Test that Redis keys are created correctly for sessions."""
mock_redis, _ = server_and_redis
all_keys = await mock_redis.keys("*") # type: ignore
assert len(all_keys) > 0
session_key = None
for key in all_keys:
if key.startswith("mcp:pubsub:session_active:"):
session_key = key
break
assert session_key is not None, f"No session key found. Keys: {all_keys}"
ttl = await mock_redis.ttl(session_key) # type: ignore
assert ttl > 0, f"Session key should have TTL, got: {ttl}"
@pytest.mark.anyio
async def test_tool_calls(server_and_redis, client_session) -> None:
"""Test that messages are properly published through Redis."""
mock_redis, _ = server_and_redis
for i in range(3):
tool_result = await client_session.call_tool(
"echo_message", {"message": f"Test {i}"}
)
assert tool_result.content[0].text == f"Echo: Test {i}" # type: ignore
@pytest.mark.anyio
async def test_session_cleanup(server_and_redis, server_url: str) -> None:
"""Test Redis key cleanup when sessions end."""
mock_redis, _ = server_and_redis
session_keys_seen = set()
for i in range(3):
async with sse_client(server_url + "/sse") as streams:
async with ClientSession(*streams) as session:
await session.initialize()
all_keys = await mock_redis.keys("*") # type: ignore
for key in all_keys:
if key.startswith("mcp:pubsub:session_active:"):
session_keys_seen.add(key)
value = await mock_redis.get(key) # type: ignore
assert value == "1"
await anyio.sleep(0.1) # Give time for cleanup
all_keys = await mock_redis.keys("*") # type: ignore
assert (
len(all_keys) == 0
), f"Session keys should be cleaned up, found: {all_keys}"
# Verify we saw different session keys for each session
assert len(session_keys_seen) == 3, "Should have seen 3 unique session keys"
@pytest.mark.anyio
async def concurrent_tool_call(server_and_redis, server_url: str) -> None:
"""Test multiple clients and verify Redis key management."""
mock_redis, _ = server_and_redis
async def client_task(client_id: int) -> str:
async with sse_client(server_url + "/sse") as streams:
async with ClientSession(*streams) as session:
await session.initialize()
result = await session.call_tool(
"echo_message",
{"message": f"Message from client {client_id}"},
)
return result.content[0].text # type: ignore
# Run multiple clients concurrently
client_tasks = [client_task(i) for i in range(3)]
results = await asyncio.gather(*client_tasks)
# Verify all clients received their respective messages
assert len(results) == 3
for i, result in enumerate(results):
assert result == f"Echo: Message from client {i}"
# After all clients disconnect, keys should be cleaned up
await anyio.sleep(0.1) # Give time for cleanup
all_keys = await mock_redis.keys("*") # type: ignore
assert len(all_keys) == 0, f"Session keys should be cleaned up, found: {all_keys}"

View File

@@ -84,7 +84,7 @@ async def test_lowlevel_server_lifespan():
)
await send_stream1.send(
SessionMessage(
message=JSONRPCMessage(
JSONRPCMessage(
root=JSONRPCRequest(
jsonrpc="2.0",
id=1,
@@ -100,7 +100,7 @@ async def test_lowlevel_server_lifespan():
# Send initialized notification
await send_stream1.send(
SessionMessage(
message=JSONRPCMessage(
JSONRPCMessage(
root=JSONRPCNotification(
jsonrpc="2.0",
method="notifications/initialized",
@@ -112,7 +112,7 @@ async def test_lowlevel_server_lifespan():
# Call the tool to verify lifespan context
await send_stream1.send(
SessionMessage(
message=JSONRPCMessage(
JSONRPCMessage(
root=JSONRPCRequest(
jsonrpc="2.0",
id=2,
@@ -188,7 +188,7 @@ async def test_fastmcp_server_lifespan():
)
await send_stream1.send(
SessionMessage(
message=JSONRPCMessage(
JSONRPCMessage(
root=JSONRPCRequest(
jsonrpc="2.0",
id=1,
@@ -204,7 +204,7 @@ async def test_fastmcp_server_lifespan():
# Send initialized notification
await send_stream1.send(
SessionMessage(
message=JSONRPCMessage(
JSONRPCMessage(
root=JSONRPCNotification(
jsonrpc="2.0",
method="notifications/initialized",
@@ -216,7 +216,7 @@ async def test_fastmcp_server_lifespan():
# Call the tool to verify lifespan context
await send_stream1.send(
SessionMessage(
message=JSONRPCMessage(
JSONRPCMessage(
root=JSONRPCRequest(
jsonrpc="2.0",
id=2,

View File

@@ -51,7 +51,7 @@ async def test_stdio_server():
async with write_stream:
for response in responses:
session_message = SessionMessage(message=response)
session_message = SessionMessage(response)
await write_stream.send(session_message)
stdout.seek(0)

149
uv.lock generated
View File

@@ -1,4 +1,5 @@
version = 1
revision = 1
requires-python = ">=3.10"
[options]
@@ -38,15 +39,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/3b/68/f9e9bf6324c46e6b8396610aef90ad423ec3e18c9079547ceafea3dce0ec/anyio-4.5.0-py3-none-any.whl", hash = "sha256:fdeb095b7cc5a5563175eedd926ec4ae55413bb4be5770c424af0ba46ccb4a78", size = 89250 },
]
[[package]]
name = "async-timeout"
version = "5.0.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233 },
]
[[package]]
name = "attrs"
version = "24.3.0"
@@ -275,51 +267,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 },
]
[[package]]
name = "cryptography"
version = "44.0.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/cd/25/4ce80c78963834b8a9fd1cc1266be5ed8d1840785c0f2e1b73b8d128d505/cryptography-44.0.2.tar.gz", hash = "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0", size = 710807 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/92/ef/83e632cfa801b221570c5f58c0369db6fa6cef7d9ff859feab1aae1a8a0f/cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7", size = 6676361 },
{ url = "https://files.pythonhosted.org/packages/30/ec/7ea7c1e4c8fc8329506b46c6c4a52e2f20318425d48e0fe597977c71dbce/cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1", size = 3952350 },
{ url = "https://files.pythonhosted.org/packages/27/61/72e3afdb3c5ac510330feba4fc1faa0fe62e070592d6ad00c40bb69165e5/cryptography-44.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb", size = 4166572 },
{ url = "https://files.pythonhosted.org/packages/26/e4/ba680f0b35ed4a07d87f9e98f3ebccb05091f3bf6b5a478b943253b3bbd5/cryptography-44.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843", size = 3958124 },
{ url = "https://files.pythonhosted.org/packages/9c/e8/44ae3e68c8b6d1cbc59040288056df2ad7f7f03bbcaca6b503c737ab8e73/cryptography-44.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5", size = 3678122 },
{ url = "https://files.pythonhosted.org/packages/27/7b/664ea5e0d1eab511a10e480baf1c5d3e681c7d91718f60e149cec09edf01/cryptography-44.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c", size = 4191831 },
{ url = "https://files.pythonhosted.org/packages/2a/07/79554a9c40eb11345e1861f46f845fa71c9e25bf66d132e123d9feb8e7f9/cryptography-44.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a", size = 3960583 },
{ url = "https://files.pythonhosted.org/packages/bb/6d/858e356a49a4f0b591bd6789d821427de18432212e137290b6d8a817e9bf/cryptography-44.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308", size = 4191753 },
{ url = "https://files.pythonhosted.org/packages/b2/80/62df41ba4916067fa6b125aa8c14d7e9181773f0d5d0bd4dcef580d8b7c6/cryptography-44.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688", size = 4079550 },
{ url = "https://files.pythonhosted.org/packages/f3/cd/2558cc08f7b1bb40683f99ff4327f8dcfc7de3affc669e9065e14824511b/cryptography-44.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7", size = 4298367 },
{ url = "https://files.pythonhosted.org/packages/71/59/94ccc74788945bc3bd4cf355d19867e8057ff5fdbcac781b1ff95b700fb1/cryptography-44.0.2-cp37-abi3-win32.whl", hash = "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79", size = 2772843 },
{ url = "https://files.pythonhosted.org/packages/ca/2c/0d0bbaf61ba05acb32f0841853cfa33ebb7a9ab3d9ed8bb004bd39f2da6a/cryptography-44.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa", size = 3209057 },
{ url = "https://files.pythonhosted.org/packages/9e/be/7a26142e6d0f7683d8a382dd963745e65db895a79a280a30525ec92be890/cryptography-44.0.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3", size = 6677789 },
{ url = "https://files.pythonhosted.org/packages/06/88/638865be7198a84a7713950b1db7343391c6066a20e614f8fa286eb178ed/cryptography-44.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639", size = 3951919 },
{ url = "https://files.pythonhosted.org/packages/d7/fc/99fe639bcdf58561dfad1faa8a7369d1dc13f20acd78371bb97a01613585/cryptography-44.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd", size = 4167812 },
{ url = "https://files.pythonhosted.org/packages/53/7b/aafe60210ec93d5d7f552592a28192e51d3c6b6be449e7fd0a91399b5d07/cryptography-44.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181", size = 3958571 },
{ url = "https://files.pythonhosted.org/packages/16/32/051f7ce79ad5a6ef5e26a92b37f172ee2d6e1cce09931646eef8de1e9827/cryptography-44.0.2-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea", size = 3679832 },
{ url = "https://files.pythonhosted.org/packages/78/2b/999b2a1e1ba2206f2d3bca267d68f350beb2b048a41ea827e08ce7260098/cryptography-44.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699", size = 4193719 },
{ url = "https://files.pythonhosted.org/packages/72/97/430e56e39a1356e8e8f10f723211a0e256e11895ef1a135f30d7d40f2540/cryptography-44.0.2-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9", size = 3960852 },
{ url = "https://files.pythonhosted.org/packages/89/33/c1cf182c152e1d262cac56850939530c05ca6c8d149aa0dcee490b417e99/cryptography-44.0.2-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23", size = 4193906 },
{ url = "https://files.pythonhosted.org/packages/e1/99/87cf26d4f125380dc674233971069bc28d19b07f7755b29861570e513650/cryptography-44.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922", size = 4081572 },
{ url = "https://files.pythonhosted.org/packages/b3/9f/6a3e0391957cc0c5f84aef9fbdd763035f2b52e998a53f99345e3ac69312/cryptography-44.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4", size = 4298631 },
{ url = "https://files.pythonhosted.org/packages/e2/a5/5bc097adb4b6d22a24dea53c51f37e480aaec3465285c253098642696423/cryptography-44.0.2-cp39-abi3-win32.whl", hash = "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5", size = 2773792 },
{ url = "https://files.pythonhosted.org/packages/33/cf/1f7649b8b9a3543e042d3f348e398a061923ac05b507f3f4d95f11938aa9/cryptography-44.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6", size = 3210957 },
{ url = "https://files.pythonhosted.org/packages/99/10/173be140714d2ebaea8b641ff801cbcb3ef23101a2981cbf08057876f89e/cryptography-44.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:af4ff3e388f2fa7bff9f7f2b31b87d5651c45731d3e8cfa0944be43dff5cfbdb", size = 3396886 },
{ url = "https://files.pythonhosted.org/packages/2f/b4/424ea2d0fce08c24ede307cead3409ecbfc2f566725d4701b9754c0a1174/cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0529b1d5a0105dd3731fa65680b45ce49da4d8115ea76e9da77a875396727b41", size = 3892387 },
{ url = "https://files.pythonhosted.org/packages/28/20/8eaa1a4f7c68a1cb15019dbaad59c812d4df4fac6fd5f7b0b9c5177f1edd/cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7ca25849404be2f8e4b3c59483d9d3c51298a22c1c61a0e84415104dacaf5562", size = 4109922 },
{ url = "https://files.pythonhosted.org/packages/11/25/5ed9a17d532c32b3bc81cc294d21a36c772d053981c22bd678396bc4ae30/cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:268e4e9b177c76d569e8a145a6939eca9a5fec658c932348598818acf31ae9a5", size = 3895715 },
{ url = "https://files.pythonhosted.org/packages/63/31/2aac03b19c6329b62c45ba4e091f9de0b8f687e1b0cd84f101401bece343/cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:9eb9d22b0a5d8fd9925a7764a054dca914000607dff201a24c791ff5c799e1fa", size = 4109876 },
{ url = "https://files.pythonhosted.org/packages/99/ec/6e560908349843718db1a782673f36852952d52a55ab14e46c42c8a7690a/cryptography-44.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2bf7bf75f7df9715f810d1b038870309342bff3069c5bd8c6b96128cb158668d", size = 3131719 },
{ url = "https://files.pythonhosted.org/packages/d6/d7/f30e75a6aa7d0f65031886fa4a1485c2fbfe25a1896953920f6a9cfe2d3b/cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:909c97ab43a9c0c0b0ada7a1281430e4e5ec0458e6d9244c0e821bbf152f061d", size = 3887513 },
{ url = "https://files.pythonhosted.org/packages/9c/b4/7a494ce1032323ca9db9a3661894c66e0d7142ad2079a4249303402d8c71/cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96e7a5e9d6e71f9f4fca8eebfd603f8e86c5225bb18eb621b2c1e50b290a9471", size = 4107432 },
{ url = "https://files.pythonhosted.org/packages/45/f8/6b3ec0bc56123b344a8d2b3264a325646d2dcdbdd9848b5e6f3d37db90b3/cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d1b3031093a366ac767b3feb8bcddb596671b3aaff82d4050f984da0c248b615", size = 3891421 },
{ url = "https://files.pythonhosted.org/packages/57/ff/f3b4b2d007c2a646b0f69440ab06224f9cf37a977a72cdb7b50632174e8a/cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:04abd71114848aa25edb28e225ab5f268096f44cf0127f3d36975bdf1bdf3390", size = 4107081 },
]
[[package]]
name = "cssselect2"
version = "0.8.0"
@@ -360,20 +307,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/43/09/2aea36ff60d16dd8879bdb2f5b3ee0ba8d08cbbdcdfe870e695ce3784385/execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc", size = 40612 },
]
[[package]]
name = "fakeredis"
version = "2.28.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "redis" },
{ name = "sortedcontainers" },
{ name = "typing-extensions", marker = "python_full_version < '3.11'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/60/32/8c1c215e50cb055e24a8d5a8981edab665d131ea9068c420bf81eb0fcb63/fakeredis-2.28.1.tar.gz", hash = "sha256:5e542200b945aa0a7afdc0396efefe3cdabab61bc0f41736cc45f68960255964", size = 161179 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e1/77/bca49c4960c22131da3acb647978983bea07f15c255fbef0a6559a774a7a/fakeredis-2.28.1-py3-none-any.whl", hash = "sha256:38c7c17fba5d5522af9d980a8f74a4da9900a3441e8f25c0fe93ea4205d695d1", size = 113685 },
]
[[package]]
name = "ghp-import"
version = "2.1.0"
@@ -574,10 +507,6 @@ cli = [
{ name = "python-dotenv" },
{ name = "typer" },
]
redis = [
{ name = "redis" },
{ name = "types-redis" },
]
rich = [
{ name = "rich" },
]
@@ -587,7 +516,6 @@ ws = [
[package.dev-dependencies]
dev = [
{ name = "fakeredis" },
{ name = "pyright" },
{ name = "pytest" },
{ name = "pytest-examples" },
@@ -613,19 +541,17 @@ requires-dist = [
{ name = "pydantic-settings", specifier = ">=2.5.2" },
{ name = "python-dotenv", marker = "extra == 'cli'", specifier = ">=1.0.0" },
{ name = "python-multipart", specifier = ">=0.0.9" },
{ name = "redis", marker = "extra == 'redis'", specifier = ">=5.2.1" },
{ name = "rich", marker = "extra == 'rich'", specifier = ">=13.9.4" },
{ name = "sse-starlette", specifier = ">=1.6.1" },
{ name = "starlette", specifier = ">=0.27" },
{ name = "typer", marker = "extra == 'cli'", specifier = ">=0.12.4" },
{ name = "types-redis", marker = "extra == 'redis'", specifier = ">=4.6.0.20241004" },
{ name = "uvicorn", marker = "sys_platform != 'emscripten'", specifier = ">=0.23.1" },
{ name = "websockets", marker = "extra == 'ws'", specifier = ">=15.0.1" },
]
provides-extras = ["cli", "rich", "ws"]
[package.metadata.requires-dev]
dev = [
{ name = "fakeredis", specifier = "==2.28.1" },
{ name = "pyright", specifier = ">=1.1.391" },
{ name = "pytest", specifier = ">=8.3.4" },
{ name = "pytest-examples", specifier = ">=0.0.14" },
@@ -1397,18 +1323,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/5a/66/bbb1dd374f5c870f59c5bb1db0e18cbe7fa739415a24cbd95b2d1f5ae0c4/pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069", size = 3911 },
]
[[package]]
name = "redis"
version = "5.2.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "async-timeout", marker = "python_full_version < '3.11.3'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/47/da/d283a37303a995cd36f8b92db85135153dc4f7a8e4441aa827721b442cfb/redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f", size = 4608355 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3c/5f/fa26b9b2672cbe30e07d9a5bdf39cf16e3b80b42916757c5f92bca88e4ba/redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4", size = 261502 },
]
[[package]]
name = "regex"
version = "2024.11.6"
@@ -1532,15 +1446,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/bf/5e/ffee22bf9f9e4b2669d1f0179ae8804584939fb6502b51f2401e26b1e028/ruff-0.8.5-py3-none-win_arm64.whl", hash = "sha256:134ae019ef13e1b060ab7136e7828a6d83ea727ba123381307eb37c6bd5e01cb", size = 9124741 },
]
[[package]]
name = "setuptools"
version = "78.1.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a9/5a/0db4da3bc908df06e5efae42b44e75c81dd52716e10192ff36d0c1c8e379/setuptools-78.1.0.tar.gz", hash = "sha256:18fd474d4a82a5f83dac888df697af65afa82dec7323d09c3e37d1f14288da54", size = 1367827 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/54/21/f43f0a1fa8b06b32812e0975981f4677d28e0f3271601dc88ac5a5b83220/setuptools-78.1.0-py3-none-any.whl", hash = "sha256:3e386e96793c8702ae83d17b853fb93d3e09ef82ec62722e61da5cd22376dcd8", size = 1256108 },
]
[[package]]
name = "shellingham"
version = "1.5.4"
@@ -1685,56 +1590,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ae/cc/15083dcde1252a663398b1b2a173637a3ec65adadfb95137dc95df1e6adc/typer-0.12.4-py3-none-any.whl", hash = "sha256:819aa03699f438397e876aa12b0d63766864ecba1b579092cc9fe35d886e34b6", size = 47402 },
]
[[package]]
name = "types-cffi"
version = "1.17.0.20250326"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "types-setuptools" },
]
sdist = { url = "https://files.pythonhosted.org/packages/3f/3b/d29491d754b9e42edd4890648311ffa5d4d000b7d97b92ac4d04faad40d8/types_cffi-1.17.0.20250326.tar.gz", hash = "sha256:6c8fea2c2f34b55e5fb77b1184c8ad849d57cf0ddccbc67a62121ac4b8b32254", size = 16887 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/61/49/ce473d7fbc2c80931ef9f7530fd3ddf31b8a5bca56340590334ce6ffbfb1/types_cffi-1.17.0.20250326-py3-none-any.whl", hash = "sha256:5af4ecd7374ae0d5fa9e80864e8d4b31088cc32c51c544e3af7ed5b5ed681447", size = 20133 },
]
[[package]]
name = "types-pyopenssl"
version = "24.1.0.20240722"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cryptography" },
{ name = "types-cffi" },
]
sdist = { url = "https://files.pythonhosted.org/packages/93/29/47a346550fd2020dac9a7a6d033ea03fccb92fa47c726056618cc889745e/types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39", size = 8458 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/98/05/c868a850b6fbb79c26f5f299b768ee0adc1f9816d3461dcf4287916f655b/types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54", size = 7499 },
]
[[package]]
name = "types-redis"
version = "4.6.0.20241004"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cryptography" },
{ name = "types-pyopenssl" },
]
sdist = { url = "https://files.pythonhosted.org/packages/3a/95/c054d3ac940e8bac4ca216470c80c26688a0e79e09f520a942bb27da3386/types-redis-4.6.0.20241004.tar.gz", hash = "sha256:5f17d2b3f9091ab75384153bfa276619ffa1cf6a38da60e10d5e6749cc5b902e", size = 49679 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/55/82/7d25dce10aad92d2226b269bce2f85cfd843b4477cd50245d7d40ecf8f89/types_redis-4.6.0.20241004-py3-none-any.whl", hash = "sha256:ef5da68cb827e5f606c8f9c0b49eeee4c2669d6d97122f301d3a55dc6a63f6ed", size = 58737 },
]
[[package]]
name = "types-setuptools"
version = "78.1.0.20250329"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "setuptools" },
]
sdist = { url = "https://files.pythonhosted.org/packages/e9/6e/c54e6705e5fe67c3606e4c7c91123ecf10d7e1e6d7a9c11b52970cf2196c/types_setuptools-78.1.0.20250329.tar.gz", hash = "sha256:31e62950c38b8cc1c5114b077504e36426860a064287cac11b9666ab3a483234", size = 43942 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7d/31/85d0264705d8ef47680d28f4dc9bb1e27d8cace785fbe3f8d009fad6cb88/types_setuptools-78.1.0.20250329-py3-none-any.whl", hash = "sha256:ea47eab891afb506f470eee581dcde44d64dc99796665da794da6f83f50f6776", size = 66985 },
]
[[package]]
name = "typing-extensions"
version = "4.12.2"