feat: implement workload heatmap module

- Backend (FastAPI):
  - Workload heatmap API with load level calculation
  - User workload detail endpoint with task breakdown
  - Redis caching for workload calculations (1hr TTL)
  - Department isolation and access control
  - WorkloadSnapshot model for historical data
  - Alembic migration for workload_snapshots table

- API Endpoints:
  - GET /api/workload/heatmap - Team workload overview
  - GET /api/workload/user/{id} - User workload detail
  - GET /api/workload/me - Current user workload

- Load Levels:
  - normal: <80%, warning: 80-99%, overloaded: >=100%

- Tests:
  - 26 unit/API tests
  - 15 E2E automated tests
  - 77 total tests passing

- OpenSpec:
  - add-resource-workload change archived
  - resource-management spec updated

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
beabigegg
2025-12-29 01:13:21 +08:00
parent daca7798e3
commit 61fe01cb6b
17 changed files with 2517 additions and 30 deletions

View File

@@ -0,0 +1,3 @@
from app.api.workload.router import router
__all__ = ["router"]

View File

@@ -0,0 +1,217 @@
"""Workload API endpoints.
Provides endpoints for workload heatmap, user workload details,
and capacity management.
"""
from datetime import date
from typing import Optional, List
from fastapi import APIRouter, Depends, HTTPException, Query, status
from sqlalchemy.orm import Session
from app.core.database import get_db
from app.middleware.auth import get_current_user
from app.models.user import User
from app.schemas.workload import (
WorkloadHeatmapResponse,
UserWorkloadDetail,
CapacityUpdate,
UserWorkloadSummary,
)
from app.services.workload_service import (
get_week_bounds,
get_current_week_start,
get_workload_heatmap,
get_user_workload_detail,
)
from app.services.workload_cache import (
get_cached_heatmap,
set_cached_heatmap,
)
router = APIRouter()
def check_workload_access(
current_user: User,
target_user_id: Optional[str] = None,
target_user_department_id: Optional[str] = None,
department_id: Optional[str] = None,
) -> None:
"""
Check if current user has access to view workload data.
Raises HTTPException if access is denied.
"""
# System admin can access all
if current_user.is_system_admin:
return
# If querying specific user, must be self
# (Phase 1: only self access for non-admin users)
if target_user_id and target_user_id != current_user.id:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Access denied: Cannot view other users' workload",
)
# If querying by department, must be same department
if department_id and department_id != current_user.department_id:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Access denied: Cannot view other departments' workload",
)
def filter_accessible_users(
current_user: User,
user_ids: Optional[List[str]] = None,
) -> Optional[List[str]]:
"""
Filter user IDs to only those accessible by current user.
Returns None if user can access all (system admin).
"""
# System admin can access all
if current_user.is_system_admin:
return user_ids
# Regular user can only see themselves
if user_ids:
# Filter to only accessible users
accessible = [uid for uid in user_ids if uid == current_user.id]
if not accessible:
return [current_user.id] # Default to self if no accessible users
return accessible
else:
# No filter specified, return only self
return [current_user.id]
@router.get("/heatmap", response_model=WorkloadHeatmapResponse)
async def get_heatmap(
week_start: Optional[date] = Query(
None,
description="Start of week (ISO date, defaults to current Monday)"
),
department_id: Optional[str] = Query(
None,
description="Filter by department ID"
),
user_ids: Optional[str] = Query(
None,
description="Comma-separated list of user IDs to include"
),
db: Session = Depends(get_db),
current_user: User = Depends(get_current_user),
):
"""
Get workload heatmap for users.
Returns workload summaries for users showing:
- allocated_hours: Total estimated hours from tasks due this week
- capacity_hours: User's weekly capacity
- load_percentage: Percentage of capacity used
- load_level: normal (<80%), warning (80-99%), overloaded (>=100%)
"""
# Parse user_ids if provided
parsed_user_ids = None
if user_ids:
parsed_user_ids = [uid.strip() for uid in user_ids.split(",") if uid.strip()]
# Check department access
if department_id:
check_workload_access(current_user, department_id=department_id)
# Filter user_ids based on access
accessible_user_ids = filter_accessible_users(current_user, parsed_user_ids)
# Normalize week_start
if week_start is None:
week_start = get_current_week_start()
else:
week_start = get_week_bounds(week_start)[0]
week_start, week_end = get_week_bounds(week_start)
# Try cache first
cached = get_cached_heatmap(week_start, department_id, accessible_user_ids)
if cached:
return WorkloadHeatmapResponse(
week_start=week_start,
week_end=week_end,
users=cached,
)
# Calculate from database
summaries = get_workload_heatmap(
db=db,
week_start=week_start,
department_id=department_id,
user_ids=accessible_user_ids,
)
# Cache the result
set_cached_heatmap(week_start, summaries, department_id, accessible_user_ids)
return WorkloadHeatmapResponse(
week_start=week_start,
week_end=week_end,
users=summaries,
)
@router.get("/user/{user_id}", response_model=UserWorkloadDetail)
async def get_user_workload(
user_id: str,
week_start: Optional[date] = Query(
None,
description="Start of week (ISO date, defaults to current Monday)"
),
db: Session = Depends(get_db),
current_user: User = Depends(get_current_user),
):
"""
Get detailed workload for a specific user.
Returns:
- Workload summary (same as heatmap)
- List of tasks contributing to the workload
"""
# Check access
check_workload_access(current_user, target_user_id=user_id)
# Calculate workload detail
detail = get_user_workload_detail(db, user_id, week_start)
if detail is None:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="User not found",
)
return detail
@router.get("/me", response_model=UserWorkloadDetail)
async def get_my_workload(
week_start: Optional[date] = Query(
None,
description="Start of week (ISO date, defaults to current Monday)"
),
db: Session = Depends(get_db),
current_user: User = Depends(get_current_user),
):
"""
Get workload for the current authenticated user.
Convenience endpoint that doesn't require specifying user ID.
"""
detail = get_user_workload_detail(db, current_user.id, week_start)
if detail is None:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to calculate workload",
)
return detail

View File

@@ -7,6 +7,7 @@ from app.api.departments import router as departments_router
from app.api.spaces import router as spaces_router
from app.api.projects import router as projects_router
from app.api.tasks import router as tasks_router
from app.api.workload import router as workload_router
from app.core.config import settings
app = FastAPI(
@@ -31,6 +32,7 @@ app.include_router(departments_router.router, prefix="/api/departments", tags=["
app.include_router(spaces_router)
app.include_router(projects_router)
app.include_router(tasks_router)
app.include_router(workload_router, prefix="/api/workload", tags=["Workload"])
@app.get("/health")

View File

@@ -5,5 +5,6 @@ from app.models.space import Space
from app.models.project import Project
from app.models.task_status import TaskStatus
from app.models.task import Task
from app.models.workload_snapshot import WorkloadSnapshot
__all__ = ["User", "Role", "Department", "Space", "Project", "TaskStatus", "Task"]
__all__ = ["User", "Role", "Department", "Space", "Project", "TaskStatus", "Task", "WorkloadSnapshot"]

View File

@@ -0,0 +1,29 @@
import uuid
from sqlalchemy import Column, String, ForeignKey, Date, Integer, Numeric, DateTime, UniqueConstraint, Index
from sqlalchemy.sql import func
from sqlalchemy.orm import relationship
from app.core.database import Base
class WorkloadSnapshot(Base):
"""Stores historical workload snapshots for trend analysis."""
__tablename__ = "pjctrl_workload_snapshots"
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
user_id = Column(String(36), ForeignKey("pjctrl_users.id", ondelete="CASCADE"), nullable=False)
week_start = Column(Date, nullable=False)
allocated_hours = Column(Numeric(8, 2), nullable=False, default=0)
capacity_hours = Column(Numeric(8, 2), nullable=False, default=40)
load_percentage = Column(Numeric(5, 2), nullable=False, default=0)
task_count = Column(Integer, nullable=False, default=0)
created_at = Column(DateTime, server_default=func.now(), nullable=False)
updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now(), nullable=False)
# Relationships
user = relationship("User", backref="workload_snapshots")
# Constraints
__table_args__ = (
UniqueConstraint('user_id', 'week_start', name='uk_user_week'),
Index('idx_workload_week_start', 'week_start'),
)

View File

@@ -0,0 +1,78 @@
from pydantic import BaseModel
from typing import Optional, List
from datetime import date, datetime
from decimal import Decimal
from enum import Enum
class LoadLevel(str, Enum):
"""Workload level classification."""
NORMAL = "normal"
WARNING = "warning"
OVERLOADED = "overloaded"
UNAVAILABLE = "unavailable"
class TaskWorkloadInfo(BaseModel):
"""Task information for workload detail view."""
task_id: str
title: str
project_id: str
project_name: str
due_date: Optional[datetime] = None
original_estimate: Optional[Decimal] = None
status: Optional[str] = None
class UserWorkloadSummary(BaseModel):
"""Summary of a user's workload for heatmap display."""
user_id: str
user_name: str
department_id: Optional[str] = None
department_name: Optional[str] = None
capacity_hours: Decimal
allocated_hours: Decimal
load_percentage: Optional[Decimal] = None
load_level: LoadLevel
task_count: int
class WorkloadHeatmapResponse(BaseModel):
"""Response for workload heatmap API."""
week_start: date
week_end: date
users: List[UserWorkloadSummary]
class UserWorkloadDetail(BaseModel):
"""Detailed workload for a specific user."""
user_id: str
user_name: str
week_start: date
week_end: date
capacity_hours: Decimal
allocated_hours: Decimal
load_percentage: Optional[Decimal] = None
load_level: LoadLevel
tasks: List[TaskWorkloadInfo]
class WorkloadSnapshotResponse(BaseModel):
"""Response for workload snapshot."""
id: str
user_id: str
week_start: date
allocated_hours: Decimal
capacity_hours: Decimal
load_percentage: Decimal
task_count: int
created_at: datetime
updated_at: datetime
class Config:
from_attributes = True
class CapacityUpdate(BaseModel):
"""Request to update user capacity."""
capacity: Decimal

View File

@@ -0,0 +1,163 @@
"""Workload cache service using Redis.
Provides caching for workload calculations to improve API response times.
"""
import json
from datetime import date
from decimal import Decimal
from typing import Optional, List
from app.core.redis import redis_client
from app.schemas.workload import UserWorkloadSummary, LoadLevel
# Cache TTL in seconds (1 hour)
WORKLOAD_CACHE_TTL = 3600
def _make_heatmap_cache_key(
week_start: date,
department_id: Optional[str] = None,
user_ids: Optional[List[str]] = None,
) -> str:
"""Generate cache key for heatmap query."""
parts = ["workload", "heatmap", str(week_start)]
if department_id:
parts.append(f"dept:{department_id}")
if user_ids:
parts.append(f"users:{','.join(sorted(user_ids))}")
return ":".join(parts)
def _make_user_cache_key(user_id: str, week_start: date) -> str:
"""Generate cache key for user workload."""
return f"workload:user:{user_id}:{week_start}"
def _serialize_workload_summary(summary: UserWorkloadSummary) -> dict:
"""Serialize UserWorkloadSummary for JSON storage."""
return {
"user_id": summary.user_id,
"user_name": summary.user_name,
"department_id": summary.department_id,
"department_name": summary.department_name,
"capacity_hours": str(summary.capacity_hours),
"allocated_hours": str(summary.allocated_hours),
"load_percentage": str(summary.load_percentage) if summary.load_percentage else None,
"load_level": summary.load_level.value,
"task_count": summary.task_count,
}
def _deserialize_workload_summary(data: dict) -> UserWorkloadSummary:
"""Deserialize UserWorkloadSummary from JSON."""
return UserWorkloadSummary(
user_id=data["user_id"],
user_name=data["user_name"],
department_id=data["department_id"],
department_name=data["department_name"],
capacity_hours=Decimal(data["capacity_hours"]),
allocated_hours=Decimal(data["allocated_hours"]),
load_percentage=Decimal(data["load_percentage"]) if data["load_percentage"] else None,
load_level=LoadLevel(data["load_level"]),
task_count=data["task_count"],
)
def get_cached_heatmap(
week_start: date,
department_id: Optional[str] = None,
user_ids: Optional[List[str]] = None,
) -> Optional[List[UserWorkloadSummary]]:
"""
Get cached heatmap data.
Args:
week_start: Start of week
department_id: Department filter
user_ids: User IDs filter
Returns:
List of UserWorkloadSummary or None if not cached
"""
cache_key = _make_heatmap_cache_key(week_start, department_id, user_ids)
cached = redis_client.get(cache_key)
if cached:
data = json.loads(cached)
return [_deserialize_workload_summary(item) for item in data]
return None
def set_cached_heatmap(
week_start: date,
summaries: List[UserWorkloadSummary],
department_id: Optional[str] = None,
user_ids: Optional[List[str]] = None,
) -> None:
"""
Cache heatmap data.
Args:
week_start: Start of week
summaries: List of workload summaries
department_id: Department filter
user_ids: User IDs filter
"""
cache_key = _make_heatmap_cache_key(week_start, department_id, user_ids)
data = [_serialize_workload_summary(s) for s in summaries]
redis_client.setex(cache_key, WORKLOAD_CACHE_TTL, json.dumps(data))
def get_cached_user_workload(
user_id: str,
week_start: date,
) -> Optional[UserWorkloadSummary]:
"""
Get cached user workload.
Args:
user_id: User ID
week_start: Start of week
Returns:
UserWorkloadSummary or None if not cached
"""
cache_key = _make_user_cache_key(user_id, week_start)
cached = redis_client.get(cache_key)
if cached:
data = json.loads(cached)
return _deserialize_workload_summary(data)
return None
def set_cached_user_workload(
user_id: str,
week_start: date,
summary: UserWorkloadSummary,
) -> None:
"""
Cache user workload.
Args:
user_id: User ID
week_start: Start of week
summary: Workload summary
"""
cache_key = _make_user_cache_key(user_id, week_start)
data = _serialize_workload_summary(summary)
redis_client.setex(cache_key, WORKLOAD_CACHE_TTL, json.dumps(data))
def invalidate_user_workload_cache(user_id: str) -> None:
"""
Invalidate all cached workload data for a user.
Note: This uses pattern matching which may be slow for large datasets.
For Phase 1, we rely on TTL expiration instead of active invalidation.
"""
pattern = f"workload:*:{user_id}:*"
for key in redis_client.scan_iter(match=pattern):
redis_client.delete(key)

View File

@@ -0,0 +1,281 @@
"""Workload calculation service.
Provides functionality to calculate and retrieve user workload data
including weekly load percentages, task allocations, and load level classification.
"""
from datetime import date, timedelta
from decimal import Decimal
from typing import List, Optional, Tuple
from sqlalchemy import func, and_
from sqlalchemy.orm import Session, joinedload
from app.models.user import User
from app.models.task import Task
from app.models.task_status import TaskStatus
from app.models.project import Project
from app.schemas.workload import (
LoadLevel,
UserWorkloadSummary,
UserWorkloadDetail,
TaskWorkloadInfo,
)
def get_week_bounds(d: date) -> Tuple[date, date]:
"""
Get ISO week boundaries (Monday to Sunday).
Args:
d: Any date within the week
Returns:
Tuple of (week_start, week_end) where week_start is Monday
"""
week_start = d - timedelta(days=d.weekday())
week_end = week_start + timedelta(days=6)
return week_start, week_end
def get_current_week_start() -> date:
"""Get the Monday of the current week."""
return get_week_bounds(date.today())[0]
def determine_load_level(load_percentage: Optional[Decimal]) -> LoadLevel:
"""
Determine the load level based on percentage.
Args:
load_percentage: The calculated load percentage (None if capacity is 0)
Returns:
LoadLevel enum value
"""
if load_percentage is None:
return LoadLevel.UNAVAILABLE
if load_percentage < 80:
return LoadLevel.NORMAL
elif load_percentage < 100:
return LoadLevel.WARNING
else:
return LoadLevel.OVERLOADED
def calculate_load_percentage(
allocated_hours: Decimal,
capacity_hours: Decimal
) -> Optional[Decimal]:
"""
Calculate load percentage avoiding division by zero.
Args:
allocated_hours: Total allocated hours
capacity_hours: User's weekly capacity
Returns:
Load percentage or None if capacity is 0
"""
if capacity_hours == 0:
return None
return (allocated_hours / capacity_hours * 100).quantize(Decimal("0.01"))
def get_user_tasks_in_week(
db: Session,
user_id: str,
week_start: date,
week_end: date,
) -> List[Task]:
"""
Get all tasks assigned to a user with due_date in the specified week.
Excludes tasks with is_done=True status.
Args:
db: Database session
user_id: User ID
week_start: Start of week (Monday)
week_end: End of week (Sunday)
Returns:
List of Task objects
"""
# Convert date to datetime for comparison
from datetime import datetime
week_start_dt = datetime.combine(week_start, datetime.min.time())
week_end_dt = datetime.combine(week_end, datetime.max.time())
return (
db.query(Task)
.join(Task.status, isouter=True)
.join(Task.project)
.filter(
Task.assignee_id == user_id,
Task.due_date >= week_start_dt,
Task.due_date <= week_end_dt,
# Exclude completed tasks
(TaskStatus.is_done == False) | (Task.status_id == None)
)
.options(joinedload(Task.project), joinedload(Task.status))
.all()
)
def calculate_user_workload(
db: Session,
user: User,
week_start: date,
) -> UserWorkloadSummary:
"""
Calculate workload summary for a single user.
Args:
db: Database session
user: User object
week_start: Start of week (Monday)
Returns:
UserWorkloadSummary object
"""
week_start, week_end = get_week_bounds(week_start)
# Get tasks for this user in this week
tasks = get_user_tasks_in_week(db, user.id, week_start, week_end)
# Calculate allocated hours from original_estimate
allocated_hours = Decimal("0")
for task in tasks:
if task.original_estimate:
allocated_hours += task.original_estimate
capacity_hours = Decimal(str(user.capacity)) if user.capacity else Decimal("40")
load_percentage = calculate_load_percentage(allocated_hours, capacity_hours)
load_level = determine_load_level(load_percentage)
return UserWorkloadSummary(
user_id=user.id,
user_name=user.name,
department_id=user.department_id,
department_name=user.department.name if user.department else None,
capacity_hours=capacity_hours,
allocated_hours=allocated_hours,
load_percentage=load_percentage,
load_level=load_level,
task_count=len(tasks),
)
def get_workload_heatmap(
db: Session,
week_start: Optional[date] = None,
department_id: Optional[str] = None,
user_ids: Optional[List[str]] = None,
) -> List[UserWorkloadSummary]:
"""
Get workload heatmap for multiple users.
Args:
db: Database session
week_start: Start of week (defaults to current week)
department_id: Filter by department
user_ids: Filter by specific user IDs
Returns:
List of UserWorkloadSummary objects
"""
if week_start is None:
week_start = get_current_week_start()
else:
# Normalize to week start (Monday)
week_start = get_week_bounds(week_start)[0]
# Build user query
query = db.query(User).filter(User.is_active == True)
if department_id:
query = query.filter(User.department_id == department_id)
if user_ids:
query = query.filter(User.id.in_(user_ids))
users = query.options(joinedload(User.department)).all()
# Calculate workload for each user
results = []
for user in users:
summary = calculate_user_workload(db, user, week_start)
results.append(summary)
return results
def get_user_workload_detail(
db: Session,
user_id: str,
week_start: Optional[date] = None,
) -> Optional[UserWorkloadDetail]:
"""
Get detailed workload for a specific user including task list.
Args:
db: Database session
user_id: User ID
week_start: Start of week (defaults to current week)
Returns:
UserWorkloadDetail object or None if user not found
"""
user = (
db.query(User)
.filter(User.id == user_id)
.options(joinedload(User.department))
.first()
)
if not user:
return None
if week_start is None:
week_start = get_current_week_start()
else:
week_start = get_week_bounds(week_start)[0]
week_start, week_end = get_week_bounds(week_start)
# Get tasks
tasks = get_user_tasks_in_week(db, user_id, week_start, week_end)
# Calculate totals
allocated_hours = Decimal("0")
task_infos = []
for task in tasks:
if task.original_estimate:
allocated_hours += task.original_estimate
task_infos.append(TaskWorkloadInfo(
task_id=task.id,
title=task.title,
project_id=task.project_id,
project_name=task.project.title if task.project else "Unknown",
due_date=task.due_date,
original_estimate=task.original_estimate,
status=task.status.name if task.status else None,
))
capacity_hours = Decimal(str(user.capacity)) if user.capacity else Decimal("40")
load_percentage = calculate_load_percentage(allocated_hours, capacity_hours)
load_level = determine_load_level(load_percentage)
return UserWorkloadDetail(
user_id=user.id,
user_name=user.name,
week_start=week_start,
week_end=week_end,
capacity_hours=capacity_hours,
allocated_hours=allocated_hours,
load_percentage=load_percentage,
load_level=load_level,
tasks=task_infos,
)