Huge v2 uplift, now deployable with real user management and tooling!

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Aaron D. Lee
2026-01-27 11:32:15 -05:00
parent c912a56c2d
commit bea85e6b28
61 changed files with 25153 additions and 362 deletions

26
server/stores/__init__.py Normal file
View File

@@ -0,0 +1,26 @@
"""Stores package for Golf game V2 persistence."""
from .event_store import EventStore, ConcurrencyError
from .state_cache import StateCache, get_state_cache, close_state_cache
from .pubsub import GamePubSub, PubSubMessage, MessageType, get_pubsub, close_pubsub
from .user_store import UserStore, get_user_store, close_user_store
__all__ = [
# Event store
"EventStore",
"ConcurrencyError",
# State cache
"StateCache",
"get_state_cache",
"close_state_cache",
# Pub/sub
"GamePubSub",
"PubSubMessage",
"MessageType",
"get_pubsub",
"close_pubsub",
# User store
"UserStore",
"get_user_store",
"close_user_store",
]

View File

@@ -0,0 +1,485 @@
"""
PostgreSQL-backed event store for Golf game.
The event store is an append-only log of all game events.
Events are immutable and ordered by sequence number within each game.
Features:
- Optimistic concurrency via unique constraint on (game_id, sequence_num)
- Batch appends for atomic multi-event writes
- Streaming for memory-efficient large game replay
- Game metadata table for efficient queries
"""
import json
import logging
from datetime import datetime, timezone
from typing import Optional, AsyncIterator
import asyncpg
from models.events import GameEvent, EventType
logger = logging.getLogger(__name__)
class ConcurrencyError(Exception):
"""Raised when optimistic concurrency check fails."""
pass
# SQL schema for event store
SCHEMA_SQL = """
-- Events table (append-only log)
CREATE TABLE IF NOT EXISTS events (
id BIGSERIAL PRIMARY KEY,
game_id UUID NOT NULL,
sequence_num INT NOT NULL,
event_type VARCHAR(50) NOT NULL,
player_id VARCHAR(50),
event_data JSONB NOT NULL,
created_at TIMESTAMPTZ DEFAULT NOW(),
-- Ensure events are ordered and unique per game
UNIQUE(game_id, sequence_num)
);
-- Games metadata (denormalized for queries, not source of truth)
CREATE TABLE IF NOT EXISTS games_v2 (
id UUID PRIMARY KEY,
room_code VARCHAR(10) NOT NULL,
status VARCHAR(20) DEFAULT 'active', -- active, completed, abandoned
created_at TIMESTAMPTZ DEFAULT NOW(),
started_at TIMESTAMPTZ,
completed_at TIMESTAMPTZ,
num_players INT,
num_rounds INT,
options JSONB,
winner_id VARCHAR(50),
host_id VARCHAR(50),
-- Denormalized for efficient queries
player_ids VARCHAR(50)[] DEFAULT '{}'
);
-- Indexes for common queries
CREATE INDEX IF NOT EXISTS idx_events_game_seq ON events(game_id, sequence_num);
CREATE INDEX IF NOT EXISTS idx_events_type ON events(event_type);
CREATE INDEX IF NOT EXISTS idx_events_player ON events(player_id) WHERE player_id IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_events_created ON events(created_at);
CREATE INDEX IF NOT EXISTS idx_games_status ON games_v2(status);
CREATE INDEX IF NOT EXISTS idx_games_room ON games_v2(room_code) WHERE status = 'active';
CREATE INDEX IF NOT EXISTS idx_games_players ON games_v2 USING GIN(player_ids);
CREATE INDEX IF NOT EXISTS idx_games_completed ON games_v2(completed_at) WHERE status = 'completed';
"""
class EventStore:
"""
PostgreSQL-backed event store.
Provides methods for appending events and querying event history.
Uses asyncpg for async database access.
"""
def __init__(self, pool: asyncpg.Pool):
"""
Initialize event store with connection pool.
Args:
pool: asyncpg connection pool.
"""
self.pool = pool
@classmethod
async def create(cls, postgres_url: str) -> "EventStore":
"""
Create an EventStore with a new connection pool.
Args:
postgres_url: PostgreSQL connection URL.
Returns:
Configured EventStore instance.
"""
pool = await asyncpg.create_pool(postgres_url, min_size=2, max_size=10)
store = cls(pool)
await store.initialize_schema()
return store
async def initialize_schema(self) -> None:
"""Create database tables if they don't exist."""
async with self.pool.acquire() as conn:
await conn.execute(SCHEMA_SQL)
logger.info("Event store schema initialized")
async def close(self) -> None:
"""Close the connection pool."""
await self.pool.close()
# -------------------------------------------------------------------------
# Event Writes
# -------------------------------------------------------------------------
async def append(self, event: GameEvent) -> int:
"""
Append an event to the store.
Args:
event: The event to append.
Returns:
The database ID of the inserted event.
Raises:
ConcurrencyError: If sequence_num already exists for this game.
"""
async with self.pool.acquire() as conn:
try:
row = await conn.fetchrow(
"""
INSERT INTO events (game_id, sequence_num, event_type, player_id, event_data)
VALUES ($1, $2, $3, $4, $5)
RETURNING id
""",
event.game_id,
event.sequence_num,
event.event_type.value,
event.player_id,
json.dumps(event.data),
)
return row["id"]
except asyncpg.UniqueViolationError:
raise ConcurrencyError(
f"Event {event.sequence_num} already exists for game {event.game_id}"
)
async def append_batch(self, events: list[GameEvent]) -> list[int]:
"""
Append multiple events atomically.
All events are inserted in a single transaction.
If any event fails (e.g., duplicate sequence), all are rolled back.
Args:
events: List of events to append.
Returns:
List of database IDs for inserted events.
Raises:
ConcurrencyError: If any sequence_num already exists.
"""
if not events:
return []
async with self.pool.acquire() as conn:
async with conn.transaction():
ids = []
for event in events:
try:
row = await conn.fetchrow(
"""
INSERT INTO events (game_id, sequence_num, event_type, player_id, event_data)
VALUES ($1, $2, $3, $4, $5)
RETURNING id
""",
event.game_id,
event.sequence_num,
event.event_type.value,
event.player_id,
json.dumps(event.data),
)
ids.append(row["id"])
except asyncpg.UniqueViolationError:
raise ConcurrencyError(
f"Event {event.sequence_num} already exists for game {event.game_id}"
)
return ids
# -------------------------------------------------------------------------
# Event Reads
# -------------------------------------------------------------------------
async def get_events(
self,
game_id: str,
from_sequence: int = 0,
to_sequence: Optional[int] = None,
) -> list[GameEvent]:
"""
Get events for a game, optionally within a sequence range.
Args:
game_id: Game UUID.
from_sequence: Start sequence (inclusive).
to_sequence: End sequence (inclusive), or None for all.
Returns:
List of events in sequence order.
"""
async with self.pool.acquire() as conn:
if to_sequence is not None:
rows = await conn.fetch(
"""
SELECT event_type, game_id, sequence_num, player_id, event_data, created_at
FROM events
WHERE game_id = $1 AND sequence_num >= $2 AND sequence_num <= $3
ORDER BY sequence_num
""",
game_id,
from_sequence,
to_sequence,
)
else:
rows = await conn.fetch(
"""
SELECT event_type, game_id, sequence_num, player_id, event_data, created_at
FROM events
WHERE game_id = $1 AND sequence_num >= $2
ORDER BY sequence_num
""",
game_id,
from_sequence,
)
return [self._row_to_event(row) for row in rows]
async def get_latest_sequence(self, game_id: str) -> int:
"""
Get the latest sequence number for a game.
Args:
game_id: Game UUID.
Returns:
Latest sequence number, or -1 if no events exist.
"""
async with self.pool.acquire() as conn:
row = await conn.fetchrow(
"""
SELECT COALESCE(MAX(sequence_num), -1) as seq
FROM events
WHERE game_id = $1
""",
game_id,
)
return row["seq"]
async def stream_events(
self,
game_id: str,
from_sequence: int = 0,
) -> AsyncIterator[GameEvent]:
"""
Stream events for memory-efficient processing.
Use this for replaying large games without loading all events into memory.
Args:
game_id: Game UUID.
from_sequence: Start sequence (inclusive).
Yields:
Events in sequence order.
"""
async with self.pool.acquire() as conn:
async with conn.transaction():
async for row in conn.cursor(
"""
SELECT event_type, game_id, sequence_num, player_id, event_data, created_at
FROM events
WHERE game_id = $1 AND sequence_num >= $2
ORDER BY sequence_num
""",
game_id,
from_sequence,
):
yield self._row_to_event(row)
async def get_event_count(self, game_id: str) -> int:
"""
Get the total number of events for a game.
Args:
game_id: Game UUID.
Returns:
Event count.
"""
async with self.pool.acquire() as conn:
row = await conn.fetchrow(
"SELECT COUNT(*) as count FROM events WHERE game_id = $1",
game_id,
)
return row["count"]
# -------------------------------------------------------------------------
# Game Metadata
# -------------------------------------------------------------------------
async def create_game(
self,
game_id: str,
room_code: str,
host_id: str,
options: Optional[dict] = None,
) -> None:
"""
Create a game metadata record.
Args:
game_id: Game UUID.
room_code: 4-letter room code.
host_id: Host player ID.
options: GameOptions as dict.
"""
async with self.pool.acquire() as conn:
await conn.execute(
"""
INSERT INTO games_v2 (id, room_code, host_id, options)
VALUES ($1, $2, $3, $4)
ON CONFLICT (id) DO NOTHING
""",
game_id,
room_code,
host_id,
json.dumps(options) if options else None,
)
async def update_game_started(
self,
game_id: str,
num_players: int,
num_rounds: int,
player_ids: list[str],
) -> None:
"""
Update game metadata when game starts.
Args:
game_id: Game UUID.
num_players: Number of players.
num_rounds: Number of rounds.
player_ids: List of player IDs.
"""
async with self.pool.acquire() as conn:
await conn.execute(
"""
UPDATE games_v2
SET started_at = NOW(), num_players = $2, num_rounds = $3, player_ids = $4
WHERE id = $1
""",
game_id,
num_players,
num_rounds,
player_ids,
)
async def update_game_completed(
self,
game_id: str,
winner_id: Optional[str] = None,
) -> None:
"""
Update game metadata when game completes.
Args:
game_id: Game UUID.
winner_id: ID of the winner.
"""
async with self.pool.acquire() as conn:
await conn.execute(
"""
UPDATE games_v2
SET status = 'completed', completed_at = NOW(), winner_id = $2
WHERE id = $1
""",
game_id,
winner_id,
)
async def get_active_games(self) -> list[dict]:
"""
Get all active games for recovery on server restart.
Returns:
List of active game metadata dicts.
"""
async with self.pool.acquire() as conn:
rows = await conn.fetch(
"""
SELECT id, room_code, status, created_at, started_at, num_players,
num_rounds, options, host_id, player_ids
FROM games_v2
WHERE status = 'active'
ORDER BY created_at DESC
"""
)
return [dict(row) for row in rows]
async def get_game(self, game_id: str) -> Optional[dict]:
"""
Get game metadata by ID.
Args:
game_id: Game UUID.
Returns:
Game metadata dict, or None if not found.
"""
async with self.pool.acquire() as conn:
row = await conn.fetchrow(
"""
SELECT id, room_code, status, created_at, started_at, completed_at,
num_players, num_rounds, options, winner_id, host_id, player_ids
FROM games_v2
WHERE id = $1
""",
game_id,
)
return dict(row) if row else None
# -------------------------------------------------------------------------
# Helpers
# -------------------------------------------------------------------------
def _row_to_event(self, row: asyncpg.Record) -> GameEvent:
"""Convert a database row to a GameEvent."""
return GameEvent(
event_type=EventType(row["event_type"]),
game_id=str(row["game_id"]),
sequence_num=row["sequence_num"],
player_id=row["player_id"],
data=json.loads(row["event_data"]) if row["event_data"] else {},
timestamp=row["created_at"].replace(tzinfo=timezone.utc),
)
# Global event store instance (initialized on first use)
_event_store: Optional[EventStore] = None
async def get_event_store(postgres_url: str) -> EventStore:
"""
Get or create the global event store instance.
Args:
postgres_url: PostgreSQL connection URL.
Returns:
EventStore instance.
"""
global _event_store
if _event_store is None:
_event_store = await EventStore.create(postgres_url)
return _event_store
async def close_event_store() -> None:
"""Close the global event store connection pool."""
global _event_store
if _event_store is not None:
await _event_store.close()
_event_store = None

306
server/stores/pubsub.py Normal file
View File

@@ -0,0 +1,306 @@
"""
Redis pub/sub for cross-server game events.
In a multi-server deployment, each server has its own WebSocket connections.
When a game action occurs, the server handling that action needs to notify
all other servers so they can update their connected clients.
This module provides:
- Pub/sub channels per room for targeted broadcasting
- Message types for state updates, player events, and broadcasts
- Async listener loop for handling incoming messages
- Clean subscription management
Usage:
pubsub = GamePubSub(redis_client)
await pubsub.start()
# Subscribe to room events
async def handle_message(msg: PubSubMessage):
print(f"Received: {msg.type} for room {msg.room_code}")
await pubsub.subscribe("ABCD", handle_message)
# Publish to room
await pubsub.publish(PubSubMessage(
type=MessageType.GAME_STATE_UPDATE,
room_code="ABCD",
data={"game_state": {...}},
))
await pubsub.stop()
"""
import asyncio
import json
import logging
from dataclasses import dataclass
from enum import Enum
from typing import Callable, Awaitable, Optional
import redis.asyncio as redis
logger = logging.getLogger(__name__)
class MessageType(str, Enum):
"""Types of messages that can be published via pub/sub."""
# Game state changed (other servers should update their cache)
GAME_STATE_UPDATE = "game_state_update"
# Player connected to room (for presence tracking)
PLAYER_JOINED = "player_joined"
# Player disconnected from room
PLAYER_LEFT = "player_left"
# Room is being closed (game ended or abandoned)
ROOM_CLOSED = "room_closed"
# Generic broadcast to all clients in room
BROADCAST = "broadcast"
@dataclass
class PubSubMessage:
"""
Message sent via Redis pub/sub.
Attributes:
type: Message type (determines how handlers process it).
room_code: Room this message is for.
data: Message payload (type-specific).
sender_id: Optional server ID of sender (to avoid echo).
"""
type: MessageType
room_code: str
data: dict
sender_id: Optional[str] = None
def to_json(self) -> str:
"""Serialize to JSON for Redis."""
return json.dumps({
"type": self.type.value,
"room_code": self.room_code,
"data": self.data,
"sender_id": self.sender_id,
})
@classmethod
def from_json(cls, raw: str) -> "PubSubMessage":
"""Deserialize from JSON."""
d = json.loads(raw)
return cls(
type=MessageType(d["type"]),
room_code=d["room_code"],
data=d.get("data", {}),
sender_id=d.get("sender_id"),
)
# Type alias for message handlers
MessageHandler = Callable[[PubSubMessage], Awaitable[None]]
class GamePubSub:
"""
Redis pub/sub for cross-server game events.
Manages subscriptions to room channels and dispatches incoming
messages to registered handlers.
"""
CHANNEL_PREFIX = "golf:room:"
def __init__(
self,
redis_client: redis.Redis,
server_id: str = "default",
):
"""
Initialize pub/sub with Redis client.
Args:
redis_client: Async Redis client.
server_id: Unique ID for this server instance.
"""
self.redis = redis_client
self.server_id = server_id
self.pubsub = redis_client.pubsub()
self._handlers: dict[str, list[MessageHandler]] = {}
self._running = False
self._task: Optional[asyncio.Task] = None
def _channel(self, room_code: str) -> str:
"""Get Redis channel name for a room."""
return f"{self.CHANNEL_PREFIX}{room_code}"
async def subscribe(
self,
room_code: str,
handler: MessageHandler,
) -> None:
"""
Subscribe to room events.
Args:
room_code: Room to subscribe to.
handler: Async function to call on each message.
"""
channel = self._channel(room_code)
if channel not in self._handlers:
self._handlers[channel] = []
await self.pubsub.subscribe(channel)
logger.debug(f"Subscribed to channel {channel}")
self._handlers[channel].append(handler)
async def unsubscribe(self, room_code: str) -> None:
"""
Unsubscribe from room events.
Args:
room_code: Room to unsubscribe from.
"""
channel = self._channel(room_code)
if channel in self._handlers:
del self._handlers[channel]
await self.pubsub.unsubscribe(channel)
logger.debug(f"Unsubscribed from channel {channel}")
async def remove_handler(self, room_code: str, handler: MessageHandler) -> None:
"""
Remove a specific handler from a room subscription.
Args:
room_code: Room the handler was registered for.
handler: Handler to remove.
"""
channel = self._channel(room_code)
if channel in self._handlers:
handlers = self._handlers[channel]
if handler in handlers:
handlers.remove(handler)
# If no handlers left, unsubscribe
if not handlers:
await self.unsubscribe(room_code)
async def publish(self, message: PubSubMessage) -> int:
"""
Publish a message to a room's channel.
Args:
message: Message to publish.
Returns:
Number of subscribers that received the message.
"""
# Add sender ID so we can filter out our own messages
message.sender_id = self.server_id
channel = self._channel(message.room_code)
count = await self.redis.publish(channel, message.to_json())
logger.debug(f"Published {message.type.value} to {channel} ({count} receivers)")
return count
async def start(self) -> None:
"""Start listening for messages."""
if self._running:
return
self._running = True
self._task = asyncio.create_task(self._listen())
logger.info("GamePubSub listener started")
async def stop(self) -> None:
"""Stop listening and clean up."""
self._running = False
if self._task:
self._task.cancel()
try:
await self._task
except asyncio.CancelledError:
pass
self._task = None
await self.pubsub.close()
self._handlers.clear()
logger.info("GamePubSub listener stopped")
async def _listen(self) -> None:
"""Main listener loop."""
while self._running:
try:
message = await self.pubsub.get_message(
ignore_subscribe_messages=True,
timeout=1.0,
)
if message and message["type"] == "message":
await self._handle_message(message)
except asyncio.CancelledError:
break
except redis.ConnectionError as e:
logger.error(f"PubSub connection error: {e}")
await asyncio.sleep(1)
except Exception as e:
logger.error(f"PubSub listener error: {e}", exc_info=True)
await asyncio.sleep(1)
async def _handle_message(self, raw_message: dict) -> None:
"""Handle an incoming Redis message."""
try:
channel = raw_message["channel"]
if isinstance(channel, bytes):
channel = channel.decode()
data = raw_message["data"]
if isinstance(data, bytes):
data = data.decode()
msg = PubSubMessage.from_json(data)
# Skip messages from ourselves
if msg.sender_id == self.server_id:
return
handlers = self._handlers.get(channel, [])
for handler in handlers:
try:
await handler(msg)
except Exception as e:
logger.error(f"Error in pubsub handler: {e}", exc_info=True)
except json.JSONDecodeError as e:
logger.warning(f"Invalid JSON in pubsub message: {e}")
except Exception as e:
logger.error(f"Error processing pubsub message: {e}", exc_info=True)
# Global pub/sub instance
_pubsub: Optional[GamePubSub] = None
async def get_pubsub(redis_client: redis.Redis, server_id: str = "default") -> GamePubSub:
"""
Get or create the global pub/sub instance.
Args:
redis_client: Redis client to use.
server_id: Unique ID for this server.
Returns:
GamePubSub instance.
"""
global _pubsub
if _pubsub is None:
_pubsub = GamePubSub(redis_client, server_id)
return _pubsub
async def close_pubsub() -> None:
"""Stop and close the global pub/sub instance."""
global _pubsub
if _pubsub is not None:
await _pubsub.stop()
_pubsub = None

View File

@@ -0,0 +1,389 @@
"""
Redis-backed live game state cache.
The state cache stores live game state for fast access during gameplay.
Redis provides:
- Sub-millisecond reads/writes for active game state
- TTL expiration for abandoned games
- Pub/sub for multi-server synchronization
- Atomic operations via pipelines
This is a CACHE, not the source of truth. Events in PostgreSQL are authoritative.
If Redis data is lost, games can be recovered from the event store.
Key patterns:
- golf:room:{room_code} -> Hash (room metadata)
- golf:game:{game_id} -> JSON (full game state)
- golf:room:{room_code}:players -> Set (connected player IDs)
- golf:rooms:active -> Set (active room codes)
- golf:player:{player_id}:room -> String (player's current room)
"""
import json
import logging
from datetime import datetime, timezone, timedelta
from typing import Optional
import redis.asyncio as redis
logger = logging.getLogger(__name__)
class StateCache:
"""Redis-backed live game state cache."""
# Key patterns
ROOM_KEY = "golf:room:{room_code}"
GAME_KEY = "golf:game:{game_id}"
ROOM_PLAYERS_KEY = "golf:room:{room_code}:players"
ACTIVE_ROOMS_KEY = "golf:rooms:active"
PLAYER_ROOM_KEY = "golf:player:{player_id}:room"
# TTLs
ROOM_TTL = timedelta(hours=4) # Inactive rooms expire
GAME_TTL = timedelta(hours=4)
def __init__(self, redis_client: redis.Redis):
"""
Initialize state cache with Redis client.
Args:
redis_client: Async Redis client.
"""
self.redis = redis_client
@classmethod
async def create(cls, redis_url: str) -> "StateCache":
"""
Create a StateCache with a new Redis connection.
Args:
redis_url: Redis connection URL.
Returns:
Configured StateCache instance.
"""
client = redis.from_url(redis_url, decode_responses=False)
# Test connection
await client.ping()
logger.info("StateCache connected to Redis")
return cls(client)
async def close(self) -> None:
"""Close the Redis connection."""
await self.redis.close()
# -------------------------------------------------------------------------
# Room Operations
# -------------------------------------------------------------------------
async def create_room(
self,
room_code: str,
game_id: str,
host_id: str,
server_id: str = "default",
) -> None:
"""
Create a new room.
Args:
room_code: 4-letter room code.
game_id: UUID of the game.
host_id: Player ID of the host.
server_id: Server instance ID (for multi-server).
"""
pipe = self.redis.pipeline()
room_key = self.ROOM_KEY.format(room_code=room_code)
now = datetime.now(timezone.utc).isoformat()
# Room metadata
pipe.hset(
room_key,
mapping={
"game_id": game_id,
"host_id": host_id,
"status": "waiting",
"server_id": server_id,
"created_at": now,
},
)
pipe.expire(room_key, int(self.ROOM_TTL.total_seconds()))
# Add to active rooms
pipe.sadd(self.ACTIVE_ROOMS_KEY, room_code)
# Track host's room
pipe.set(
self.PLAYER_ROOM_KEY.format(player_id=host_id),
room_code,
ex=int(self.ROOM_TTL.total_seconds()),
)
await pipe.execute()
logger.debug(f"Created room {room_code} with game {game_id}")
async def get_room(self, room_code: str) -> Optional[dict]:
"""
Get room metadata.
Args:
room_code: Room code to look up.
Returns:
Room metadata dict, or None if not found.
"""
data = await self.redis.hgetall(self.ROOM_KEY.format(room_code=room_code))
if not data:
return None
# Decode bytes to strings
return {k.decode(): v.decode() for k, v in data.items()}
async def room_exists(self, room_code: str) -> bool:
"""
Check if a room exists.
Args:
room_code: Room code to check.
Returns:
True if room exists.
"""
return await self.redis.exists(self.ROOM_KEY.format(room_code=room_code)) > 0
async def delete_room(self, room_code: str) -> None:
"""
Delete a room and all associated data.
Args:
room_code: Room code to delete.
"""
room = await self.get_room(room_code)
if not room:
return
pipe = self.redis.pipeline()
# Get players to clean up their mappings
players_key = self.ROOM_PLAYERS_KEY.format(room_code=room_code)
players = await self.redis.smembers(players_key)
for player_id in players:
pid = player_id.decode() if isinstance(player_id, bytes) else player_id
pipe.delete(self.PLAYER_ROOM_KEY.format(player_id=pid))
# Delete room data
pipe.delete(self.ROOM_KEY.format(room_code=room_code))
pipe.delete(players_key)
pipe.srem(self.ACTIVE_ROOMS_KEY, room_code)
# Delete game state if exists
if "game_id" in room:
pipe.delete(self.GAME_KEY.format(game_id=room["game_id"]))
await pipe.execute()
logger.debug(f"Deleted room {room_code}")
async def get_active_rooms(self) -> set[str]:
"""
Get all active room codes.
Returns:
Set of active room codes.
"""
rooms = await self.redis.smembers(self.ACTIVE_ROOMS_KEY)
return {r.decode() if isinstance(r, bytes) else r for r in rooms}
# -------------------------------------------------------------------------
# Player Operations
# -------------------------------------------------------------------------
async def add_player_to_room(self, room_code: str, player_id: str) -> None:
"""
Add a player to a room.
Args:
room_code: Room to add player to.
player_id: Player to add.
"""
pipe = self.redis.pipeline()
pipe.sadd(self.ROOM_PLAYERS_KEY.format(room_code=room_code), player_id)
pipe.set(
self.PLAYER_ROOM_KEY.format(player_id=player_id),
room_code,
ex=int(self.ROOM_TTL.total_seconds()),
)
# Refresh room TTL on activity
pipe.expire(
self.ROOM_KEY.format(room_code=room_code),
int(self.ROOM_TTL.total_seconds()),
)
await pipe.execute()
async def remove_player_from_room(self, room_code: str, player_id: str) -> None:
"""
Remove a player from a room.
Args:
room_code: Room to remove player from.
player_id: Player to remove.
"""
pipe = self.redis.pipeline()
pipe.srem(self.ROOM_PLAYERS_KEY.format(room_code=room_code), player_id)
pipe.delete(self.PLAYER_ROOM_KEY.format(player_id=player_id))
await pipe.execute()
async def get_room_players(self, room_code: str) -> set[str]:
"""
Get player IDs in a room.
Args:
room_code: Room to query.
Returns:
Set of player IDs.
"""
players = await self.redis.smembers(
self.ROOM_PLAYERS_KEY.format(room_code=room_code)
)
return {p.decode() if isinstance(p, bytes) else p for p in players}
async def get_player_room(self, player_id: str) -> Optional[str]:
"""
Get the room a player is in.
Args:
player_id: Player to look up.
Returns:
Room code, or None if not in a room.
"""
room = await self.redis.get(self.PLAYER_ROOM_KEY.format(player_id=player_id))
if room is None:
return None
return room.decode() if isinstance(room, bytes) else room
# -------------------------------------------------------------------------
# Game State Operations
# -------------------------------------------------------------------------
async def save_game_state(self, game_id: str, state: dict) -> None:
"""
Save full game state.
Args:
game_id: Game UUID.
state: Game state dict (will be JSON serialized).
"""
await self.redis.set(
self.GAME_KEY.format(game_id=game_id),
json.dumps(state),
ex=int(self.GAME_TTL.total_seconds()),
)
async def get_game_state(self, game_id: str) -> Optional[dict]:
"""
Get full game state.
Args:
game_id: Game UUID.
Returns:
Game state dict, or None if not found.
"""
data = await self.redis.get(self.GAME_KEY.format(game_id=game_id))
if not data:
return None
if isinstance(data, bytes):
data = data.decode()
return json.loads(data)
async def update_game_state(self, game_id: str, updates: dict) -> None:
"""
Partial update to game state (get, merge, set).
Args:
game_id: Game UUID.
updates: Fields to update.
"""
state = await self.get_game_state(game_id)
if state:
state.update(updates)
await self.save_game_state(game_id, state)
async def delete_game_state(self, game_id: str) -> None:
"""
Delete game state.
Args:
game_id: Game UUID.
"""
await self.redis.delete(self.GAME_KEY.format(game_id=game_id))
# -------------------------------------------------------------------------
# Room Status
# -------------------------------------------------------------------------
async def set_room_status(self, room_code: str, status: str) -> None:
"""
Update room status.
Args:
room_code: Room to update.
status: New status (waiting, playing, finished).
"""
await self.redis.hset(
self.ROOM_KEY.format(room_code=room_code),
"status",
status,
)
async def refresh_room_ttl(self, room_code: str) -> None:
"""
Refresh room TTL on activity.
Args:
room_code: Room to refresh.
"""
pipe = self.redis.pipeline()
pipe.expire(
self.ROOM_KEY.format(room_code=room_code),
int(self.ROOM_TTL.total_seconds()),
)
room = await self.get_room(room_code)
if room and "game_id" in room:
pipe.expire(
self.GAME_KEY.format(game_id=room["game_id"]),
int(self.GAME_TTL.total_seconds()),
)
await pipe.execute()
# Global state cache instance (initialized on first use)
_state_cache: Optional[StateCache] = None
async def get_state_cache(redis_url: str) -> StateCache:
"""
Get or create the global state cache instance.
Args:
redis_url: Redis connection URL.
Returns:
StateCache instance.
"""
global _state_cache
if _state_cache is None:
_state_cache = await StateCache.create(redis_url)
return _state_cache
async def close_state_cache() -> None:
"""Close the global state cache connection."""
global _state_cache
if _state_cache is not None:
await _state_cache.close()
_state_cache = None

1029
server/stores/user_store.py Normal file

File diff suppressed because it is too large Load Diff