feat: implement kanban real-time sync and fix workload cache
## Kanban Real-time Sync (NEW-002)
- Backend:
- WebSocket endpoint: /ws/projects/{project_id}
- Project room management in ConnectionManager
- Redis Pub/Sub: project:{project_id}:tasks channel
- Task CRUD event publishing (5 event types)
- Redis connection retry with exponential backoff
- Race condition fix in broadcast_to_project
- Frontend:
- ProjectSyncContext for WebSocket management
- Reconnection with exponential backoff (max 5 attempts)
- Multi-tab event deduplication via event_id
- Live/Offline connection indicator
- Optimistic updates with rollback
- Spec:
- collaboration spec: +1 requirement (Project Real-time Sync)
- 7 new scenarios for real-time sync
## Workload Cache Fix (NEW-001)
- Added cache invalidation to all task endpoints:
- create_task, update_task, update_task_status
- delete_task, restore_task, assign_task
- Extended to clear heatmap cache as well
## OpenSpec Archive
- 2026-01-05-add-kanban-realtime-sync
🤖 Generated with [Claude Code](https://claude.com/claude-code)
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -1,3 +1,4 @@
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from typing import List, Optional
|
||||
@@ -5,6 +6,7 @@ from fastapi import APIRouter, Depends, HTTPException, status, Query, Request
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.redis_pubsub import publish_task_event
|
||||
from app.models import User, Project, Task, TaskStatus, AuditAction, Blocker
|
||||
from app.schemas.task import (
|
||||
TaskCreate, TaskUpdate, TaskResponse, TaskWithDetails, TaskListResponse,
|
||||
@@ -16,6 +18,9 @@ from app.middleware.auth import (
|
||||
from app.middleware.audit import get_audit_metadata
|
||||
from app.services.audit_service import AuditService
|
||||
from app.services.trigger_service import TriggerService
|
||||
from app.services.workload_cache import invalidate_user_workload_cache
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(tags=["tasks"])
|
||||
|
||||
@@ -231,6 +236,40 @@ async def create_task(
|
||||
db.commit()
|
||||
db.refresh(task)
|
||||
|
||||
# Invalidate workload cache if task has an assignee
|
||||
if task.assignee_id:
|
||||
invalidate_user_workload_cache(task.assignee_id)
|
||||
|
||||
# Publish real-time event
|
||||
try:
|
||||
await publish_task_event(
|
||||
project_id=str(task.project_id),
|
||||
event_type="task_created",
|
||||
task_data={
|
||||
"task_id": str(task.id),
|
||||
"project_id": str(task.project_id),
|
||||
"title": task.title,
|
||||
"description": task.description,
|
||||
"status_id": str(task.status_id) if task.status_id else None,
|
||||
"status_name": task.status.name if task.status else None,
|
||||
"status_color": task.status.color if task.status else None,
|
||||
"assignee_id": str(task.assignee_id) if task.assignee_id else None,
|
||||
"assignee_name": task.assignee.name if task.assignee else None,
|
||||
"priority": task.priority,
|
||||
"due_date": str(task.due_date) if task.due_date else None,
|
||||
"time_estimate": task.original_estimate,
|
||||
"original_estimate": task.original_estimate,
|
||||
"parent_task_id": str(task.parent_task_id) if task.parent_task_id else None,
|
||||
"position": task.position,
|
||||
"created_by": str(task.created_by),
|
||||
"creator_name": task.creator.name if task.creator else None,
|
||||
"created_at": str(task.created_at),
|
||||
},
|
||||
triggered_by=str(current_user.id)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to publish task_created event: {e}")
|
||||
|
||||
return task
|
||||
|
||||
|
||||
@@ -341,6 +380,40 @@ async def update_task(
|
||||
db.commit()
|
||||
db.refresh(task)
|
||||
|
||||
# Invalidate workload cache if original_estimate changed and task has an assignee
|
||||
if "original_estimate" in update_data and task.assignee_id:
|
||||
invalidate_user_workload_cache(task.assignee_id)
|
||||
|
||||
# Publish real-time event
|
||||
try:
|
||||
await publish_task_event(
|
||||
project_id=str(task.project_id),
|
||||
event_type="task_updated",
|
||||
task_data={
|
||||
"task_id": str(task.id),
|
||||
"project_id": str(task.project_id),
|
||||
"title": task.title,
|
||||
"description": task.description,
|
||||
"status_id": str(task.status_id) if task.status_id else None,
|
||||
"status_name": task.status.name if task.status else None,
|
||||
"status_color": task.status.color if task.status else None,
|
||||
"assignee_id": str(task.assignee_id) if task.assignee_id else None,
|
||||
"assignee_name": task.assignee.name if task.assignee else None,
|
||||
"priority": task.priority,
|
||||
"due_date": str(task.due_date) if task.due_date else None,
|
||||
"time_estimate": task.original_estimate,
|
||||
"original_estimate": task.original_estimate,
|
||||
"time_spent": task.time_spent,
|
||||
"parent_task_id": str(task.parent_task_id) if task.parent_task_id else None,
|
||||
"position": task.position,
|
||||
"updated_at": str(task.updated_at),
|
||||
"updated_fields": list(update_data.keys()),
|
||||
},
|
||||
triggered_by=str(current_user.id)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to publish task_updated event: {e}")
|
||||
|
||||
return task
|
||||
|
||||
|
||||
@@ -408,6 +481,26 @@ async def delete_task(
|
||||
db.commit()
|
||||
db.refresh(task)
|
||||
|
||||
# Invalidate workload cache for assignee
|
||||
if task.assignee_id:
|
||||
invalidate_user_workload_cache(task.assignee_id)
|
||||
|
||||
# Publish real-time event
|
||||
try:
|
||||
await publish_task_event(
|
||||
project_id=str(task.project_id),
|
||||
event_type="task_deleted",
|
||||
task_data={
|
||||
"task_id": str(task.id),
|
||||
"project_id": str(task.project_id),
|
||||
"title": task.title,
|
||||
"parent_task_id": str(task.parent_task_id) if task.parent_task_id else None,
|
||||
},
|
||||
triggered_by=str(current_user.id)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to publish task_deleted event: {e}")
|
||||
|
||||
return task
|
||||
|
||||
|
||||
@@ -461,6 +554,10 @@ async def restore_task(
|
||||
db.commit()
|
||||
db.refresh(task)
|
||||
|
||||
# Invalidate workload cache for assignee
|
||||
if task.assignee_id:
|
||||
invalidate_user_workload_cache(task.assignee_id)
|
||||
|
||||
return task
|
||||
|
||||
|
||||
@@ -500,8 +597,9 @@ async def update_task_status(
|
||||
detail="Status not found in this project",
|
||||
)
|
||||
|
||||
# Capture old status for triggers
|
||||
# Capture old status for triggers and event publishing
|
||||
old_status_id = task.status_id
|
||||
old_status_name = task.status.name if task.status else None
|
||||
|
||||
task.status_id = status_data.status_id
|
||||
|
||||
@@ -530,6 +628,32 @@ async def update_task_status(
|
||||
db.commit()
|
||||
db.refresh(task)
|
||||
|
||||
# Invalidate workload cache when status changes (affects completed/incomplete task calculations)
|
||||
if old_status_id != status_data.status_id and task.assignee_id:
|
||||
invalidate_user_workload_cache(task.assignee_id)
|
||||
|
||||
# Publish real-time event
|
||||
try:
|
||||
await publish_task_event(
|
||||
project_id=str(task.project_id),
|
||||
event_type="task_status_changed",
|
||||
task_data={
|
||||
"task_id": str(task.id),
|
||||
"project_id": str(task.project_id),
|
||||
"title": task.title,
|
||||
"old_status_id": str(old_status_id) if old_status_id else None,
|
||||
"old_status_name": old_status_name,
|
||||
"new_status_id": str(task.status_id) if task.status_id else None,
|
||||
"new_status_name": task.status.name if task.status else None,
|
||||
"new_status_color": task.status.color if task.status else None,
|
||||
"assignee_id": str(task.assignee_id) if task.assignee_id else None,
|
||||
"blocker_flag": task.blocker_flag,
|
||||
},
|
||||
triggered_by=str(current_user.id)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to publish task_status_changed event: {e}")
|
||||
|
||||
return task
|
||||
|
||||
|
||||
@@ -568,6 +692,7 @@ async def assign_task(
|
||||
)
|
||||
|
||||
old_assignee_id = task.assignee_id
|
||||
old_assignee_name = task.assignee.name if task.assignee else None
|
||||
task.assignee_id = assign_data.assignee_id
|
||||
|
||||
# Audit log
|
||||
@@ -594,6 +719,34 @@ async def assign_task(
|
||||
db.commit()
|
||||
db.refresh(task)
|
||||
|
||||
# Invalidate workload cache for both old and new assignees
|
||||
if old_assignee_id != assign_data.assignee_id:
|
||||
if old_assignee_id:
|
||||
invalidate_user_workload_cache(old_assignee_id)
|
||||
if assign_data.assignee_id:
|
||||
invalidate_user_workload_cache(assign_data.assignee_id)
|
||||
|
||||
# Publish real-time event
|
||||
try:
|
||||
await publish_task_event(
|
||||
project_id=str(task.project_id),
|
||||
event_type="task_assigned",
|
||||
task_data={
|
||||
"task_id": str(task.id),
|
||||
"project_id": str(task.project_id),
|
||||
"title": task.title,
|
||||
"old_assignee_id": str(old_assignee_id) if old_assignee_id else None,
|
||||
"old_assignee_name": old_assignee_name,
|
||||
"new_assignee_id": str(task.assignee_id) if task.assignee_id else None,
|
||||
"new_assignee_name": task.assignee.name if task.assignee else None,
|
||||
"status_id": str(task.status_id) if task.status_id else None,
|
||||
"status_name": task.status.name if task.status else None,
|
||||
},
|
||||
triggered_by=str(current_user.id)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to publish task_assigned event: {e}")
|
||||
|
||||
return task
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user