feat: enhance weekly report and realtime notifications

Weekly Report (fix-weekly-report):
- Remove 5-task limit, show all tasks per category
- Add blocked tasks with blocker_reason and blocked_since
- Add next week tasks (due in coming week)
- Add assignee_name, completed_at, days_overdue to task details
- Frontend collapsible sections for each task category
- 8 new tests for enhanced report content

Realtime Notifications (fix-realtime-notifications):
- SQLAlchemy event-based notification publishing
- Redis Pub/Sub for multi-process broadcast
- Fix soft rollback handler stacking issue
- Fix ping scheduling drift (send immediately when interval expires)
- Frontend NotificationContext with WebSocket reconnection

Spec Fixes:
- Add missing ## Purpose sections to 5 specs

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
beabigegg
2025-12-30 20:52:08 +08:00
parent 10db2c9d1f
commit 64874d5425
25 changed files with 1034 additions and 140 deletions

View File

@@ -85,7 +85,7 @@ async def create_blocker(
# Update task blocker_flag
task.blocker_flag = True
# Notify project owner
# Notify project owner (auto-publishes after commit)
NotificationService.notify_blocker(db, task, current_user, blocker_data.reason)
# Audit log
@@ -150,7 +150,7 @@ async def resolve_blocker(
if other_blockers == 0:
task.blocker_flag = False
# Notify reporter that blocker is resolved
# Notify reporter that blocker is resolved (auto-publishes after commit)
NotificationService.notify_blocker_resolved(db, task, current_user, blocker.reported_by)
db.commit()

View File

@@ -104,10 +104,10 @@ async def create_comment(
db.add(comment)
db.flush()
# Process mentions and create notifications
# Process mentions and create notifications (auto-publishes after commit)
NotificationService.process_mentions(db, comment, task, current_user)
# Notify parent comment author if this is a reply
# Notify parent comment author if this is a reply (auto-publishes after commit)
if parent_author_id:
NotificationService.notify_comment_reply(db, comment, task, current_user, parent_author_id)

View File

@@ -1,15 +1,23 @@
import asyncio
import logging
import time
from fastapi import APIRouter, WebSocket, WebSocketDisconnect, Query
from sqlalchemy.orm import Session
from app.core.database import SessionLocal
from app.core.security import decode_access_token
from app.core.redis import get_redis_sync
from app.models import User
from app.models import User, Notification
from app.services.websocket_manager import manager
from app.core.redis_pubsub import NotificationSubscriber
logger = logging.getLogger(__name__)
router = APIRouter(tags=["websocket"])
# Heartbeat configuration
PING_INTERVAL = 60.0 # Send ping after this many seconds of no messages
PONG_TIMEOUT = 30.0 # Disconnect if no pong received within this time after ping
async def get_user_from_token(token: str) -> tuple[str | None, User | None]:
"""Validate token and return user_id and user object."""
@@ -38,6 +46,46 @@ async def get_user_from_token(token: str) -> tuple[str | None, User | None]:
db.close()
async def get_unread_notifications(user_id: str) -> list[dict]:
"""Query all unread notifications for a user."""
db = SessionLocal()
try:
notifications = (
db.query(Notification)
.filter(Notification.user_id == user_id, Notification.is_read == False)
.order_by(Notification.created_at.desc())
.all()
)
return [
{
"id": n.id,
"type": n.type,
"reference_type": n.reference_type,
"reference_id": n.reference_id,
"title": n.title,
"message": n.message,
"is_read": n.is_read,
"created_at": n.created_at.isoformat() if n.created_at else None,
}
for n in notifications
]
finally:
db.close()
async def get_unread_count(user_id: str) -> int:
"""Get the count of unread notifications for a user."""
db = SessionLocal()
try:
return (
db.query(Notification)
.filter(Notification.user_id == user_id, Notification.is_read == False)
.count()
)
finally:
db.close()
@router.websocket("/ws/notifications")
async def websocket_notifications(
websocket: WebSocket,
@@ -49,12 +97,15 @@ async def websocket_notifications(
Connect with: ws://host/ws/notifications?token=<jwt_token>
Messages sent by server:
- {"type": "connected", "data": {"user_id": "...", "message": "..."}} - Connection success
- {"type": "unread_sync", "data": {"notifications": [...], "unread_count": N}} - All unread on connect
- {"type": "notification", "data": {...}} - New notification
- {"type": "unread_count", "data": {"unread_count": N}} - Unread count update
- {"type": "pong"} - Response to ping
- {"type": "ping"} - Server keepalive ping
- {"type": "pong"} - Response to client ping
Messages accepted from client:
- {"type": "ping"} - Keepalive ping
- {"type": "ping"} - Client keepalive ping
"""
user_id, user = await get_user_from_token(token)
@@ -63,6 +114,25 @@ async def websocket_notifications(
return
await manager.connect(websocket, user_id)
subscriber = NotificationSubscriber(user_id)
async def handle_redis_message(notification_data: dict):
"""Forward Redis pub/sub messages to WebSocket."""
try:
await websocket.send_json({
"type": "notification",
"data": notification_data,
})
# Also send updated unread count
unread_count = await get_unread_count(user_id)
await websocket.send_json({
"type": "unread_count",
"data": {"unread_count": unread_count},
})
except Exception as e:
logger.error(f"Error forwarding notification to WebSocket: {e}")
redis_task = None
try:
# Send initial connection success message
@@ -71,28 +141,88 @@ async def websocket_notifications(
"data": {"user_id": user_id, "message": "Connected to notification service"},
})
# Send all unread notifications on connect (unread_sync)
unread_notifications = await get_unread_notifications(user_id)
await websocket.send_json({
"type": "unread_sync",
"data": {
"notifications": unread_notifications,
"unread_count": len(unread_notifications),
},
})
# Start Redis pub/sub subscription in background
await subscriber.start()
redis_task = asyncio.create_task(subscriber.listen(handle_redis_message))
# Heartbeat tracking
waiting_for_pong = False
ping_sent_at = 0.0
last_activity = time.time()
while True:
# Calculate appropriate timeout based on state
if waiting_for_pong:
# When waiting for pong, use remaining pong timeout
remaining = PONG_TIMEOUT - (time.time() - ping_sent_at)
if remaining <= 0:
logger.warning(f"Pong timeout for user {user_id}, disconnecting")
break
timeout = remaining
else:
# When not waiting, use remaining ping interval
remaining = PING_INTERVAL - (time.time() - last_activity)
if remaining <= 0:
# Time to send ping immediately
try:
await websocket.send_json({"type": "ping"})
waiting_for_pong = True
ping_sent_at = time.time()
last_activity = ping_sent_at
timeout = PONG_TIMEOUT
except Exception:
break
else:
timeout = remaining
try:
# Wait for messages from client (ping/pong for keepalive)
# Wait for messages from client
data = await asyncio.wait_for(
websocket.receive_json(),
timeout=60.0 # 60 second timeout
timeout=timeout
)
# Handle ping message
if data.get("type") == "ping":
last_activity = time.time()
msg_type = data.get("type")
# Handle ping message from client
if msg_type == "ping":
await websocket.send_json({"type": "pong"})
# Handle pong message from client (response to our ping)
elif msg_type == "pong":
waiting_for_pong = False
logger.debug(f"Pong received from user {user_id}")
except asyncio.TimeoutError:
# Send keepalive ping if no message received
try:
await websocket.send_json({"type": "ping"})
except Exception:
break
if waiting_for_pong:
# Strict timeout check
if time.time() - ping_sent_at >= PONG_TIMEOUT:
logger.warning(f"Pong timeout for user {user_id}, disconnecting")
break
# If not waiting_for_pong, loop will handle sending ping at top
except WebSocketDisconnect:
pass
except Exception:
pass
except Exception as e:
logger.error(f"WebSocket error: {e}")
finally:
# Clean up Redis subscription
if redis_task:
redis_task.cancel()
try:
await redis_task
except asyncio.CancelledError:
pass
await subscriber.stop()
await manager.disconnect(websocket, user_id)

View File

@@ -0,0 +1,122 @@
"""Redis Pub/Sub service for cross-process notification broadcasting."""
import json
import logging
from typing import Optional, Callable, Any
import redis.asyncio as aioredis
from app.core.config import settings
logger = logging.getLogger(__name__)
# Global async Redis client for pub/sub
_pubsub_redis: Optional[aioredis.Redis] = None
def get_channel_name(user_id: str) -> str:
"""Get the Redis channel name for a user's notifications."""
return f"notifications:{user_id}"
async def get_pubsub_redis() -> aioredis.Redis:
"""Get or create the async Redis client for pub/sub."""
global _pubsub_redis
if _pubsub_redis is None:
_pubsub_redis = aioredis.from_url(
settings.REDIS_URL,
encoding="utf-8",
decode_responses=True,
)
return _pubsub_redis
async def close_pubsub_redis() -> None:
"""Close the async Redis client."""
global _pubsub_redis
if _pubsub_redis is not None:
await _pubsub_redis.close()
_pubsub_redis = None
async def publish_notification(user_id: str, notification: dict) -> bool:
"""
Publish a notification to a user's channel.
Args:
user_id: The user ID to send the notification to
notification: The notification data (will be JSON serialized)
Returns:
True if published successfully, False otherwise
"""
try:
redis_client = await get_pubsub_redis()
channel = get_channel_name(user_id)
message = json.dumps(notification, default=str)
await redis_client.publish(channel, message)
logger.debug(f"Published notification to channel {channel}")
return True
except Exception as e:
logger.error(f"Failed to publish notification: {e}")
return False
class NotificationSubscriber:
"""
Subscriber for user notification channels.
Used by WebSocket connections to receive real-time updates.
"""
def __init__(self, user_id: str):
self.user_id = user_id
self.channel = get_channel_name(user_id)
self.pubsub: Optional[aioredis.client.PubSub] = None
self._running = False
async def start(self) -> None:
"""Start subscribing to the user's notification channel."""
redis_client = await get_pubsub_redis()
self.pubsub = redis_client.pubsub()
await self.pubsub.subscribe(self.channel)
self._running = True
logger.debug(f"Subscribed to channel {self.channel}")
async def stop(self) -> None:
"""Stop subscribing and clean up."""
self._running = False
if self.pubsub:
await self.pubsub.unsubscribe(self.channel)
await self.pubsub.close()
self.pubsub = None
logger.debug(f"Unsubscribed from channel {self.channel}")
async def listen(self, callback: Callable[[dict], Any]) -> None:
"""
Listen for messages and call the callback for each notification.
Args:
callback: Async function to call with each notification dict
"""
if not self.pubsub:
raise RuntimeError("Subscriber not started. Call start() first.")
try:
async for message in self.pubsub.listen():
if not self._running:
break
if message["type"] == "message":
try:
data = json.loads(message["data"])
await callback(data)
except json.JSONDecodeError:
logger.warning(f"Invalid JSON in notification: {message['data']}")
except Exception as e:
logger.error(f"Error processing notification: {e}")
except Exception as e:
if self._running:
logger.error(f"Error in notification listener: {e}")
@property
def is_running(self) -> bool:
return self._running

View File

@@ -1,9 +1,95 @@
import json
import uuid
import re
from typing import List, Optional
import asyncio
import logging
import threading
from datetime import datetime
from typing import List, Optional, Dict, Set
from sqlalchemy.orm import Session
from sqlalchemy import event
from app.models import User, Notification, Task, Comment, Mention
from app.core.redis_pubsub import publish_notification as redis_publish, get_channel_name
from app.core.redis import get_redis_sync
logger = logging.getLogger(__name__)
# Thread-safe lock for module-level state
_lock = threading.Lock()
# Module-level queue for notifications pending publish after commit
_pending_publish: Dict[int, List[dict]] = {}
# Track which sessions have handlers registered
_registered_sessions: Set[int] = set()
def _sync_publish(user_id: str, data: dict):
"""Sync fallback to publish notification via Redis when no event loop available."""
try:
redis_client = get_redis_sync()
channel = get_channel_name(user_id)
message = json.dumps(data, default=str)
redis_client.publish(channel, message)
logger.debug(f"Sync published notification to channel {channel}")
except Exception as e:
logger.error(f"Failed to sync publish notification to Redis: {e}")
def _cleanup_session(session_id: int, remove_registration: bool = True):
"""Clean up session state after commit/rollback. Thread-safe.
Args:
session_id: The session ID to clean up
remove_registration: If True, also remove from _registered_sessions.
Set to False for soft_rollback to avoid handler stacking.
"""
with _lock:
if remove_registration:
_registered_sessions.discard(session_id)
return _pending_publish.pop(session_id, [])
def _register_session_handlers(db: Session, session_id: int):
"""Register after_commit, after_rollback, and after_soft_rollback handlers for a session."""
with _lock:
if session_id in _registered_sessions:
return
_registered_sessions.add(session_id)
@event.listens_for(db, "after_commit", once=True)
def _after_commit(session):
notifications = _cleanup_session(session_id)
if notifications:
try:
loop = asyncio.get_running_loop()
for n in notifications:
loop.create_task(_async_publish(n["user_id"], n["data"]))
except RuntimeError:
# No running event loop - use sync fallback
logger.info(f"No event loop, using sync publish for {len(notifications)} notification(s)")
for n in notifications:
_sync_publish(n["user_id"], n["data"])
@event.listens_for(db, "after_rollback", once=True)
def _after_rollback(session):
cleared = _cleanup_session(session_id)
if cleared:
logger.debug(f"Cleared {len(cleared)} pending notification(s) after rollback")
@event.listens_for(db, "after_soft_rollback", once=True)
def _after_soft_rollback(session, previous_transaction):
# Only clear pending notifications, keep handler registration to avoid stacking
cleared = _cleanup_session(session_id, remove_registration=False)
if cleared:
logger.debug(f"Cleared {len(cleared)} pending notification(s) after soft rollback")
async def _async_publish(user_id: str, data: dict):
"""Async helper to publish notification to Redis."""
try:
await redis_publish(user_id, data)
except Exception as e:
logger.error(f"Failed to publish notification to Redis: {e}")
class NotificationService:
@@ -11,6 +97,56 @@ class NotificationService:
MAX_MENTIONS_PER_COMMENT = 10
@staticmethod
def notification_to_dict(notification: Notification) -> dict:
"""Convert a Notification to a dict for publishing."""
created_at = notification.created_at
if created_at is None:
created_at = datetime.utcnow()
return {
"id": notification.id,
"type": notification.type,
"reference_type": notification.reference_type,
"reference_id": notification.reference_id,
"title": notification.title,
"message": notification.message,
"is_read": notification.is_read,
"created_at": created_at.isoformat() if created_at else None,
}
@staticmethod
async def publish_notifications(notifications: List[Notification]) -> None:
"""Publish notifications to Redis for real-time WebSocket delivery."""
for notification in notifications:
if notification and notification.user_id:
data = NotificationService.notification_to_dict(notification)
await redis_publish(notification.user_id, data)
@staticmethod
async def publish_notification(notification: Optional[Notification]) -> None:
"""Publish a single notification to Redis."""
if notification:
await NotificationService.publish_notifications([notification])
@staticmethod
def _queue_for_publish(db: Session, notification: Notification):
"""Queue notification for auto-publish after commit. Thread-safe."""
session_id = id(db)
# Register handlers first (has its own lock)
_register_session_handlers(db, session_id)
# Store notification data (not object) for publishing
notification_data = {
"user_id": notification.user_id,
"data": NotificationService.notification_to_dict(notification),
}
with _lock:
if session_id not in _pending_publish:
_pending_publish[session_id] = []
_pending_publish[session_id].append(notification_data)
@staticmethod
def create_notification(
db: Session,
@@ -21,7 +157,7 @@ class NotificationService:
title: str,
message: Optional[str] = None,
) -> Notification:
"""Create a notification for a user."""
"""Create a notification for a user. Auto-publishes via Redis after commit."""
notification = Notification(
id=str(uuid.uuid4()),
user_id=user_id,
@@ -32,6 +168,10 @@ class NotificationService:
message=message,
)
db.add(notification)
# Queue for auto-publish after commit
NotificationService._queue_for_publish(db, notification)
return notification
@staticmethod

View File

@@ -5,7 +5,7 @@ from sqlalchemy.orm import Session
from sqlalchemy import func
from app.models import (
User, Task, Project, ScheduledReport, ReportHistory
User, Task, Project, ScheduledReport, ReportHistory, Blocker
)
from app.services.notification_service import NotificationService
@@ -29,11 +29,15 @@ class ReportService:
Get weekly task statistics for a user's projects.
Returns stats for all projects where the user is the owner.
Includes: completed, in_progress, overdue, blocked, and next_week tasks.
"""
if week_start is None:
week_start = ReportService.get_week_start()
week_end = week_start + timedelta(days=7)
next_week_start = week_end
next_week_end = next_week_start + timedelta(days=7)
now = datetime.utcnow()
# Get projects owned by the user
projects = db.query(Project).filter(Project.owner_id == user_id).all()
@@ -47,36 +51,71 @@ class ReportService:
"completed_count": 0,
"in_progress_count": 0,
"overdue_count": 0,
"blocked_count": 0,
"next_week_count": 0,
"total_tasks": 0,
}
}
project_ids = [p.id for p in projects]
# Get all tasks for these projects
# Get all tasks for these projects with assignee info
all_tasks = db.query(Task).filter(Task.project_id.in_(project_ids)).all()
# Get active blockers (unresolved) for these projects
active_blockers = db.query(Blocker).join(Task).filter(
Task.project_id.in_(project_ids),
Blocker.resolved_at.is_(None)
).all()
# Map task_id to blocker info
blocker_map: Dict[str, Blocker] = {b.task_id: b for b in active_blockers}
blocked_task_ids = set(blocker_map.keys())
# Categorize tasks
completed_tasks = []
in_progress_tasks = []
overdue_tasks = []
now = datetime.utcnow()
blocked_tasks = []
next_week_tasks = []
for task in all_tasks:
status_name = task.status.name.lower() if task.status else ""
is_done = status_name in ["done", "completed", "完成"]
# Check if completed (updated this week)
if status_name in ["done", "completed", "完成"]:
if is_done:
if task.updated_at and task.updated_at >= week_start:
completed_tasks.append(task)
# Check if in progress
elif status_name in ["in progress", "進行中", "doing"]:
in_progress_tasks.append(task)
else:
# Check if in progress
if status_name in ["in progress", "進行中", "doing"]:
in_progress_tasks.append(task)
# Check if overdue
if task.due_date and task.due_date < now and status_name not in ["done", "completed", "完成"]:
overdue_tasks.append(task)
# Check if overdue
if task.due_date and task.due_date < now:
overdue_tasks.append(task)
# Check if blocked
if task.id in blocked_task_ids:
blocked_tasks.append(task)
# Check if due next week
if task.due_date and next_week_start <= task.due_date < next_week_end:
next_week_tasks.append(task)
# Helper to get assignee name
def get_assignee_name(task: Task) -> Optional[str]:
if task.assignee:
return task.assignee.name
return None
# Helper to calculate days overdue
def get_days_overdue(task: Task) -> int:
if task.due_date:
delta = now - task.due_date
return max(0, delta.days)
return 0
# Build project details
project_details = []
@@ -85,6 +124,8 @@ class ReportService:
project_completed = [t for t in completed_tasks if t.project_id == project.id]
project_in_progress = [t for t in in_progress_tasks if t.project_id == project.id]
project_overdue = [t for t in overdue_tasks if t.project_id == project.id]
project_blocked = [t for t in blocked_tasks if t.project_id == project.id]
project_next_week = [t for t in next_week_tasks if t.project_id == project.id]
project_details.append({
"project_id": project.id,
@@ -92,9 +133,57 @@ class ReportService:
"completed_count": len(project_completed),
"in_progress_count": len(project_in_progress),
"overdue_count": len(project_overdue),
"blocked_count": len(project_blocked),
"next_week_count": len(project_next_week),
"total_tasks": len(project_tasks),
"completed_tasks": [{"id": t.id, "title": t.title} for t in project_completed[:5]],
"overdue_tasks": [{"id": t.id, "title": t.title, "due_date": t.due_date.isoformat() if t.due_date else None} for t in project_overdue[:5]],
# Full task lists with detailed fields
"completed_tasks": [
{
"id": t.id,
"title": t.title,
"completed_at": t.updated_at.isoformat() if t.updated_at else None,
"assignee_name": get_assignee_name(t),
}
for t in project_completed
],
"in_progress_tasks": [
{
"id": t.id,
"title": t.title,
"assignee_name": get_assignee_name(t),
"due_date": t.due_date.isoformat() if t.due_date else None,
}
for t in project_in_progress
],
"overdue_tasks": [
{
"id": t.id,
"title": t.title,
"due_date": t.due_date.isoformat() if t.due_date else None,
"days_overdue": get_days_overdue(t),
"assignee_name": get_assignee_name(t),
}
for t in project_overdue
],
"blocked_tasks": [
{
"id": t.id,
"title": t.title,
"blocker_reason": blocker_map[t.id].reason if t.id in blocker_map else None,
"blocked_since": blocker_map[t.id].created_at.isoformat() if t.id in blocker_map else None,
"assignee_name": get_assignee_name(t),
}
for t in project_blocked
],
"next_week_tasks": [
{
"id": t.id,
"title": t.title,
"due_date": t.due_date.isoformat() if t.due_date else None,
"assignee_name": get_assignee_name(t),
}
for t in project_next_week
],
})
return {
@@ -106,6 +195,8 @@ class ReportService:
"completed_count": len(completed_tasks),
"in_progress_count": len(in_progress_tasks),
"overdue_count": len(overdue_tasks),
"blocked_count": len(blocked_tasks),
"next_week_count": len(next_week_tasks),
"total_tasks": len(all_tasks),
}
}
@@ -161,10 +252,18 @@ class ReportService:
completed = summary.get("completed_count", 0)
in_progress = summary.get("in_progress_count", 0)
overdue = summary.get("overdue_count", 0)
blocked = summary.get("blocked_count", 0)
next_week = summary.get("next_week_count", 0)
message = f"本週完成 {completed} 項任務,進行中 {in_progress}"
if overdue > 0:
message += f",逾期 {overdue}需關注"
message += f",逾期 {overdue}"
if blocked > 0:
message += f",阻礙 {blocked}"
if overdue > 0 or blocked > 0:
message += " 需關注"
if next_week > 0:
message += f"。下週預計 {next_week}"
NotificationService.create_notification(
db=db,

View File

@@ -1,7 +1,7 @@
import pytest
import uuid
from datetime import datetime, timedelta
from app.models import User, Space, Project, Task, TaskStatus, ScheduledReport, ReportHistory
from app.models import User, Space, Project, Task, TaskStatus, ScheduledReport, ReportHistory, Blocker
from app.services.report_service import ReportService
@@ -258,3 +258,186 @@ class TestReportAPI:
data = response.json()
assert data["id"] == report.id
assert "content" in data
class TestWeeklyReportContent:
"""Tests for enhanced weekly report content (blocked/next_week tasks)."""
def test_blocked_tasks_included(self, db, test_user, test_project, test_statuses):
"""Test that blocked tasks are included in weekly stats."""
# Create a task with blocker
blocked_task = Task(
id=str(uuid.uuid4()),
project_id=test_project.id,
title="Blocked Task",
status_id=test_statuses["in_progress"].id,
created_by=test_user.id,
assignee_id=test_user.id,
)
db.add(blocked_task)
db.flush()
# Create an unresolved blocker
blocker = Blocker(
id=str(uuid.uuid4()),
task_id=blocked_task.id,
reported_by=test_user.id,
reason="Waiting for external dependency",
)
db.add(blocker)
db.commit()
stats = ReportService.get_weekly_stats(db, test_user.id)
assert stats["summary"]["blocked_count"] == 1
assert len(stats["projects"]) == 1
assert stats["projects"][0]["blocked_count"] == 1
assert len(stats["projects"][0]["blocked_tasks"]) == 1
assert stats["projects"][0]["blocked_tasks"][0]["title"] == "Blocked Task"
assert stats["projects"][0]["blocked_tasks"][0]["blocker_reason"] == "Waiting for external dependency"
def test_resolved_blocker_not_included(self, db, test_user, test_project, test_statuses):
"""Test that resolved blockers are not counted."""
# Create a task with resolved blocker
task = Task(
id=str(uuid.uuid4()),
project_id=test_project.id,
title="Previously Blocked Task",
status_id=test_statuses["in_progress"].id,
created_by=test_user.id,
)
db.add(task)
db.flush()
# Create a resolved blocker
blocker = Blocker(
id=str(uuid.uuid4()),
task_id=task.id,
reported_by=test_user.id,
reason="Was blocked",
resolved_by=test_user.id,
resolved_at=datetime.utcnow(),
resolution_note="Fixed",
)
db.add(blocker)
db.commit()
stats = ReportService.get_weekly_stats(db, test_user.id)
assert stats["summary"]["blocked_count"] == 0
assert stats["projects"][0]["blocked_count"] == 0
def test_next_week_tasks_included(self, db, test_user, test_project, test_statuses):
"""Test that next week tasks are included in weekly stats."""
# Calculate next week dates
week_start = ReportService.get_week_start()
next_week_date = week_start + timedelta(days=10) # Next week
# Create a task due next week
next_week_task = Task(
id=str(uuid.uuid4()),
project_id=test_project.id,
title="Next Week Task",
status_id=test_statuses["todo"].id,
due_date=next_week_date,
created_by=test_user.id,
assignee_id=test_user.id,
)
db.add(next_week_task)
db.commit()
stats = ReportService.get_weekly_stats(db, test_user.id)
assert stats["summary"]["next_week_count"] == 1
assert len(stats["projects"][0]["next_week_tasks"]) == 1
assert stats["projects"][0]["next_week_tasks"][0]["title"] == "Next Week Task"
def test_completed_task_not_in_next_week(self, db, test_user, test_project, test_statuses):
"""Test that completed tasks are not included in next week list."""
week_start = ReportService.get_week_start()
next_week_date = week_start + timedelta(days=10)
# Create a completed task due next week
task = Task(
id=str(uuid.uuid4()),
project_id=test_project.id,
title="Done Next Week Task",
status_id=test_statuses["done"].id,
due_date=next_week_date,
created_by=test_user.id,
)
db.add(task)
db.commit()
stats = ReportService.get_weekly_stats(db, test_user.id)
assert stats["summary"]["next_week_count"] == 0
def test_task_details_include_assignee_name(self, db, test_user, test_project, test_statuses):
"""Test that task details include assignee name."""
task = Task(
id=str(uuid.uuid4()),
project_id=test_project.id,
title="Assigned Task",
status_id=test_statuses["in_progress"].id,
created_by=test_user.id,
assignee_id=test_user.id,
)
db.add(task)
db.commit()
stats = ReportService.get_weekly_stats(db, test_user.id)
assert len(stats["projects"][0]["in_progress_tasks"]) == 1
assert stats["projects"][0]["in_progress_tasks"][0]["assignee_name"] == "Report User"
def test_overdue_days_calculated(self, db, test_user, test_project, test_statuses):
"""Test that days_overdue is correctly calculated."""
# Create task overdue by 5 days
overdue_task = Task(
id=str(uuid.uuid4()),
project_id=test_project.id,
title="5 Days Overdue",
status_id=test_statuses["todo"].id,
due_date=datetime.utcnow() - timedelta(days=5),
created_by=test_user.id,
)
db.add(overdue_task)
db.commit()
stats = ReportService.get_weekly_stats(db, test_user.id)
assert len(stats["projects"][0]["overdue_tasks"]) == 1
assert stats["projects"][0]["overdue_tasks"][0]["days_overdue"] >= 5
def test_full_task_lists_no_limit(self, db, test_user, test_project, test_statuses):
"""Test that task lists have no 5-item limit."""
# Create 10 completed tasks
for i in range(10):
task = Task(
id=str(uuid.uuid4()),
project_id=test_project.id,
title=f"Completed Task {i}",
status_id=test_statuses["done"].id,
created_by=test_user.id,
)
task.updated_at = datetime.utcnow()
db.add(task)
db.commit()
stats = ReportService.get_weekly_stats(db, test_user.id)
assert stats["summary"]["completed_count"] == 10
assert len(stats["projects"][0]["completed_tasks"]) == 10 # No limit
def test_summary_includes_all_counts(self, db, test_user, test_project, test_statuses):
"""Test that summary includes all new count fields."""
stats = ReportService.get_weekly_stats(db, test_user.id)
summary = stats["summary"]
assert "completed_count" in summary
assert "in_progress_count" in summary
assert "overdue_count" in summary
assert "blocked_count" in summary
assert "next_week_count" in summary
assert "total_tasks" in summary