feat: enhance weekly report and realtime notifications
Weekly Report (fix-weekly-report): - Remove 5-task limit, show all tasks per category - Add blocked tasks with blocker_reason and blocked_since - Add next week tasks (due in coming week) - Add assignee_name, completed_at, days_overdue to task details - Frontend collapsible sections for each task category - 8 new tests for enhanced report content Realtime Notifications (fix-realtime-notifications): - SQLAlchemy event-based notification publishing - Redis Pub/Sub for multi-process broadcast - Fix soft rollback handler stacking issue - Fix ping scheduling drift (send immediately when interval expires) - Frontend NotificationContext with WebSocket reconnection Spec Fixes: - Add missing ## Purpose sections to 5 specs 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -1,9 +1,95 @@
|
||||
import json
|
||||
import uuid
|
||||
import re
|
||||
from typing import List, Optional
|
||||
import asyncio
|
||||
import logging
|
||||
import threading
|
||||
from datetime import datetime
|
||||
from typing import List, Optional, Dict, Set
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import event
|
||||
|
||||
from app.models import User, Notification, Task, Comment, Mention
|
||||
from app.core.redis_pubsub import publish_notification as redis_publish, get_channel_name
|
||||
from app.core.redis import get_redis_sync
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Thread-safe lock for module-level state
|
||||
_lock = threading.Lock()
|
||||
# Module-level queue for notifications pending publish after commit
|
||||
_pending_publish: Dict[int, List[dict]] = {}
|
||||
# Track which sessions have handlers registered
|
||||
_registered_sessions: Set[int] = set()
|
||||
|
||||
|
||||
def _sync_publish(user_id: str, data: dict):
|
||||
"""Sync fallback to publish notification via Redis when no event loop available."""
|
||||
try:
|
||||
redis_client = get_redis_sync()
|
||||
channel = get_channel_name(user_id)
|
||||
message = json.dumps(data, default=str)
|
||||
redis_client.publish(channel, message)
|
||||
logger.debug(f"Sync published notification to channel {channel}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to sync publish notification to Redis: {e}")
|
||||
|
||||
|
||||
def _cleanup_session(session_id: int, remove_registration: bool = True):
|
||||
"""Clean up session state after commit/rollback. Thread-safe.
|
||||
|
||||
Args:
|
||||
session_id: The session ID to clean up
|
||||
remove_registration: If True, also remove from _registered_sessions.
|
||||
Set to False for soft_rollback to avoid handler stacking.
|
||||
"""
|
||||
with _lock:
|
||||
if remove_registration:
|
||||
_registered_sessions.discard(session_id)
|
||||
return _pending_publish.pop(session_id, [])
|
||||
|
||||
|
||||
def _register_session_handlers(db: Session, session_id: int):
|
||||
"""Register after_commit, after_rollback, and after_soft_rollback handlers for a session."""
|
||||
with _lock:
|
||||
if session_id in _registered_sessions:
|
||||
return
|
||||
_registered_sessions.add(session_id)
|
||||
|
||||
@event.listens_for(db, "after_commit", once=True)
|
||||
def _after_commit(session):
|
||||
notifications = _cleanup_session(session_id)
|
||||
if notifications:
|
||||
try:
|
||||
loop = asyncio.get_running_loop()
|
||||
for n in notifications:
|
||||
loop.create_task(_async_publish(n["user_id"], n["data"]))
|
||||
except RuntimeError:
|
||||
# No running event loop - use sync fallback
|
||||
logger.info(f"No event loop, using sync publish for {len(notifications)} notification(s)")
|
||||
for n in notifications:
|
||||
_sync_publish(n["user_id"], n["data"])
|
||||
|
||||
@event.listens_for(db, "after_rollback", once=True)
|
||||
def _after_rollback(session):
|
||||
cleared = _cleanup_session(session_id)
|
||||
if cleared:
|
||||
logger.debug(f"Cleared {len(cleared)} pending notification(s) after rollback")
|
||||
|
||||
@event.listens_for(db, "after_soft_rollback", once=True)
|
||||
def _after_soft_rollback(session, previous_transaction):
|
||||
# Only clear pending notifications, keep handler registration to avoid stacking
|
||||
cleared = _cleanup_session(session_id, remove_registration=False)
|
||||
if cleared:
|
||||
logger.debug(f"Cleared {len(cleared)} pending notification(s) after soft rollback")
|
||||
|
||||
|
||||
async def _async_publish(user_id: str, data: dict):
|
||||
"""Async helper to publish notification to Redis."""
|
||||
try:
|
||||
await redis_publish(user_id, data)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to publish notification to Redis: {e}")
|
||||
|
||||
|
||||
class NotificationService:
|
||||
@@ -11,6 +97,56 @@ class NotificationService:
|
||||
|
||||
MAX_MENTIONS_PER_COMMENT = 10
|
||||
|
||||
@staticmethod
|
||||
def notification_to_dict(notification: Notification) -> dict:
|
||||
"""Convert a Notification to a dict for publishing."""
|
||||
created_at = notification.created_at
|
||||
if created_at is None:
|
||||
created_at = datetime.utcnow()
|
||||
return {
|
||||
"id": notification.id,
|
||||
"type": notification.type,
|
||||
"reference_type": notification.reference_type,
|
||||
"reference_id": notification.reference_id,
|
||||
"title": notification.title,
|
||||
"message": notification.message,
|
||||
"is_read": notification.is_read,
|
||||
"created_at": created_at.isoformat() if created_at else None,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
async def publish_notifications(notifications: List[Notification]) -> None:
|
||||
"""Publish notifications to Redis for real-time WebSocket delivery."""
|
||||
for notification in notifications:
|
||||
if notification and notification.user_id:
|
||||
data = NotificationService.notification_to_dict(notification)
|
||||
await redis_publish(notification.user_id, data)
|
||||
|
||||
@staticmethod
|
||||
async def publish_notification(notification: Optional[Notification]) -> None:
|
||||
"""Publish a single notification to Redis."""
|
||||
if notification:
|
||||
await NotificationService.publish_notifications([notification])
|
||||
|
||||
@staticmethod
|
||||
def _queue_for_publish(db: Session, notification: Notification):
|
||||
"""Queue notification for auto-publish after commit. Thread-safe."""
|
||||
session_id = id(db)
|
||||
|
||||
# Register handlers first (has its own lock)
|
||||
_register_session_handlers(db, session_id)
|
||||
|
||||
# Store notification data (not object) for publishing
|
||||
notification_data = {
|
||||
"user_id": notification.user_id,
|
||||
"data": NotificationService.notification_to_dict(notification),
|
||||
}
|
||||
|
||||
with _lock:
|
||||
if session_id not in _pending_publish:
|
||||
_pending_publish[session_id] = []
|
||||
_pending_publish[session_id].append(notification_data)
|
||||
|
||||
@staticmethod
|
||||
def create_notification(
|
||||
db: Session,
|
||||
@@ -21,7 +157,7 @@ class NotificationService:
|
||||
title: str,
|
||||
message: Optional[str] = None,
|
||||
) -> Notification:
|
||||
"""Create a notification for a user."""
|
||||
"""Create a notification for a user. Auto-publishes via Redis after commit."""
|
||||
notification = Notification(
|
||||
id=str(uuid.uuid4()),
|
||||
user_id=user_id,
|
||||
@@ -32,6 +168,10 @@ class NotificationService:
|
||||
message=message,
|
||||
)
|
||||
db.add(notification)
|
||||
|
||||
# Queue for auto-publish after commit
|
||||
NotificationService._queue_for_publish(db, notification)
|
||||
|
||||
return notification
|
||||
|
||||
@staticmethod
|
||||
|
||||
Reference in New Issue
Block a user