feat: implement workload heatmap module
- Backend (FastAPI):
- Workload heatmap API with load level calculation
- User workload detail endpoint with task breakdown
- Redis caching for workload calculations (1hr TTL)
- Department isolation and access control
- WorkloadSnapshot model for historical data
- Alembic migration for workload_snapshots table
- API Endpoints:
- GET /api/workload/heatmap - Team workload overview
- GET /api/workload/user/{id} - User workload detail
- GET /api/workload/me - Current user workload
- Load Levels:
- normal: <80%, warning: 80-99%, overloaded: >=100%
- Tests:
- 26 unit/API tests
- 15 E2E automated tests
- 77 total tests passing
- OpenSpec:
- add-resource-workload change archived
- resource-management spec updated
🤖 Generated with [Claude Code](https://claude.com/claude-code)
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
163
backend/app/services/workload_cache.py
Normal file
163
backend/app/services/workload_cache.py
Normal file
@@ -0,0 +1,163 @@
|
||||
"""Workload cache service using Redis.
|
||||
|
||||
Provides caching for workload calculations to improve API response times.
|
||||
"""
|
||||
import json
|
||||
from datetime import date
|
||||
from decimal import Decimal
|
||||
from typing import Optional, List
|
||||
|
||||
from app.core.redis import redis_client
|
||||
from app.schemas.workload import UserWorkloadSummary, LoadLevel
|
||||
|
||||
# Cache TTL in seconds (1 hour)
|
||||
WORKLOAD_CACHE_TTL = 3600
|
||||
|
||||
|
||||
def _make_heatmap_cache_key(
|
||||
week_start: date,
|
||||
department_id: Optional[str] = None,
|
||||
user_ids: Optional[List[str]] = None,
|
||||
) -> str:
|
||||
"""Generate cache key for heatmap query."""
|
||||
parts = ["workload", "heatmap", str(week_start)]
|
||||
if department_id:
|
||||
parts.append(f"dept:{department_id}")
|
||||
if user_ids:
|
||||
parts.append(f"users:{','.join(sorted(user_ids))}")
|
||||
return ":".join(parts)
|
||||
|
||||
|
||||
def _make_user_cache_key(user_id: str, week_start: date) -> str:
|
||||
"""Generate cache key for user workload."""
|
||||
return f"workload:user:{user_id}:{week_start}"
|
||||
|
||||
|
||||
def _serialize_workload_summary(summary: UserWorkloadSummary) -> dict:
|
||||
"""Serialize UserWorkloadSummary for JSON storage."""
|
||||
return {
|
||||
"user_id": summary.user_id,
|
||||
"user_name": summary.user_name,
|
||||
"department_id": summary.department_id,
|
||||
"department_name": summary.department_name,
|
||||
"capacity_hours": str(summary.capacity_hours),
|
||||
"allocated_hours": str(summary.allocated_hours),
|
||||
"load_percentage": str(summary.load_percentage) if summary.load_percentage else None,
|
||||
"load_level": summary.load_level.value,
|
||||
"task_count": summary.task_count,
|
||||
}
|
||||
|
||||
|
||||
def _deserialize_workload_summary(data: dict) -> UserWorkloadSummary:
|
||||
"""Deserialize UserWorkloadSummary from JSON."""
|
||||
return UserWorkloadSummary(
|
||||
user_id=data["user_id"],
|
||||
user_name=data["user_name"],
|
||||
department_id=data["department_id"],
|
||||
department_name=data["department_name"],
|
||||
capacity_hours=Decimal(data["capacity_hours"]),
|
||||
allocated_hours=Decimal(data["allocated_hours"]),
|
||||
load_percentage=Decimal(data["load_percentage"]) if data["load_percentage"] else None,
|
||||
load_level=LoadLevel(data["load_level"]),
|
||||
task_count=data["task_count"],
|
||||
)
|
||||
|
||||
|
||||
def get_cached_heatmap(
|
||||
week_start: date,
|
||||
department_id: Optional[str] = None,
|
||||
user_ids: Optional[List[str]] = None,
|
||||
) -> Optional[List[UserWorkloadSummary]]:
|
||||
"""
|
||||
Get cached heatmap data.
|
||||
|
||||
Args:
|
||||
week_start: Start of week
|
||||
department_id: Department filter
|
||||
user_ids: User IDs filter
|
||||
|
||||
Returns:
|
||||
List of UserWorkloadSummary or None if not cached
|
||||
"""
|
||||
cache_key = _make_heatmap_cache_key(week_start, department_id, user_ids)
|
||||
cached = redis_client.get(cache_key)
|
||||
|
||||
if cached:
|
||||
data = json.loads(cached)
|
||||
return [_deserialize_workload_summary(item) for item in data]
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def set_cached_heatmap(
|
||||
week_start: date,
|
||||
summaries: List[UserWorkloadSummary],
|
||||
department_id: Optional[str] = None,
|
||||
user_ids: Optional[List[str]] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Cache heatmap data.
|
||||
|
||||
Args:
|
||||
week_start: Start of week
|
||||
summaries: List of workload summaries
|
||||
department_id: Department filter
|
||||
user_ids: User IDs filter
|
||||
"""
|
||||
cache_key = _make_heatmap_cache_key(week_start, department_id, user_ids)
|
||||
data = [_serialize_workload_summary(s) for s in summaries]
|
||||
redis_client.setex(cache_key, WORKLOAD_CACHE_TTL, json.dumps(data))
|
||||
|
||||
|
||||
def get_cached_user_workload(
|
||||
user_id: str,
|
||||
week_start: date,
|
||||
) -> Optional[UserWorkloadSummary]:
|
||||
"""
|
||||
Get cached user workload.
|
||||
|
||||
Args:
|
||||
user_id: User ID
|
||||
week_start: Start of week
|
||||
|
||||
Returns:
|
||||
UserWorkloadSummary or None if not cached
|
||||
"""
|
||||
cache_key = _make_user_cache_key(user_id, week_start)
|
||||
cached = redis_client.get(cache_key)
|
||||
|
||||
if cached:
|
||||
data = json.loads(cached)
|
||||
return _deserialize_workload_summary(data)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def set_cached_user_workload(
|
||||
user_id: str,
|
||||
week_start: date,
|
||||
summary: UserWorkloadSummary,
|
||||
) -> None:
|
||||
"""
|
||||
Cache user workload.
|
||||
|
||||
Args:
|
||||
user_id: User ID
|
||||
week_start: Start of week
|
||||
summary: Workload summary
|
||||
"""
|
||||
cache_key = _make_user_cache_key(user_id, week_start)
|
||||
data = _serialize_workload_summary(summary)
|
||||
redis_client.setex(cache_key, WORKLOAD_CACHE_TTL, json.dumps(data))
|
||||
|
||||
|
||||
def invalidate_user_workload_cache(user_id: str) -> None:
|
||||
"""
|
||||
Invalidate all cached workload data for a user.
|
||||
|
||||
Note: This uses pattern matching which may be slow for large datasets.
|
||||
For Phase 1, we rely on TTL expiration instead of active invalidation.
|
||||
"""
|
||||
pattern = f"workload:*:{user_id}:*"
|
||||
for key in redis_client.scan_iter(match=pattern):
|
||||
redis_client.delete(key)
|
||||
281
backend/app/services/workload_service.py
Normal file
281
backend/app/services/workload_service.py
Normal file
@@ -0,0 +1,281 @@
|
||||
"""Workload calculation service.
|
||||
|
||||
Provides functionality to calculate and retrieve user workload data
|
||||
including weekly load percentages, task allocations, and load level classification.
|
||||
"""
|
||||
from datetime import date, timedelta
|
||||
from decimal import Decimal
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from sqlalchemy import func, and_
|
||||
from sqlalchemy.orm import Session, joinedload
|
||||
|
||||
from app.models.user import User
|
||||
from app.models.task import Task
|
||||
from app.models.task_status import TaskStatus
|
||||
from app.models.project import Project
|
||||
from app.schemas.workload import (
|
||||
LoadLevel,
|
||||
UserWorkloadSummary,
|
||||
UserWorkloadDetail,
|
||||
TaskWorkloadInfo,
|
||||
)
|
||||
|
||||
|
||||
def get_week_bounds(d: date) -> Tuple[date, date]:
|
||||
"""
|
||||
Get ISO week boundaries (Monday to Sunday).
|
||||
|
||||
Args:
|
||||
d: Any date within the week
|
||||
|
||||
Returns:
|
||||
Tuple of (week_start, week_end) where week_start is Monday
|
||||
"""
|
||||
week_start = d - timedelta(days=d.weekday())
|
||||
week_end = week_start + timedelta(days=6)
|
||||
return week_start, week_end
|
||||
|
||||
|
||||
def get_current_week_start() -> date:
|
||||
"""Get the Monday of the current week."""
|
||||
return get_week_bounds(date.today())[0]
|
||||
|
||||
|
||||
def determine_load_level(load_percentage: Optional[Decimal]) -> LoadLevel:
|
||||
"""
|
||||
Determine the load level based on percentage.
|
||||
|
||||
Args:
|
||||
load_percentage: The calculated load percentage (None if capacity is 0)
|
||||
|
||||
Returns:
|
||||
LoadLevel enum value
|
||||
"""
|
||||
if load_percentage is None:
|
||||
return LoadLevel.UNAVAILABLE
|
||||
|
||||
if load_percentage < 80:
|
||||
return LoadLevel.NORMAL
|
||||
elif load_percentage < 100:
|
||||
return LoadLevel.WARNING
|
||||
else:
|
||||
return LoadLevel.OVERLOADED
|
||||
|
||||
|
||||
def calculate_load_percentage(
|
||||
allocated_hours: Decimal,
|
||||
capacity_hours: Decimal
|
||||
) -> Optional[Decimal]:
|
||||
"""
|
||||
Calculate load percentage avoiding division by zero.
|
||||
|
||||
Args:
|
||||
allocated_hours: Total allocated hours
|
||||
capacity_hours: User's weekly capacity
|
||||
|
||||
Returns:
|
||||
Load percentage or None if capacity is 0
|
||||
"""
|
||||
if capacity_hours == 0:
|
||||
return None
|
||||
return (allocated_hours / capacity_hours * 100).quantize(Decimal("0.01"))
|
||||
|
||||
|
||||
def get_user_tasks_in_week(
|
||||
db: Session,
|
||||
user_id: str,
|
||||
week_start: date,
|
||||
week_end: date,
|
||||
) -> List[Task]:
|
||||
"""
|
||||
Get all tasks assigned to a user with due_date in the specified week.
|
||||
Excludes tasks with is_done=True status.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
user_id: User ID
|
||||
week_start: Start of week (Monday)
|
||||
week_end: End of week (Sunday)
|
||||
|
||||
Returns:
|
||||
List of Task objects
|
||||
"""
|
||||
# Convert date to datetime for comparison
|
||||
from datetime import datetime
|
||||
week_start_dt = datetime.combine(week_start, datetime.min.time())
|
||||
week_end_dt = datetime.combine(week_end, datetime.max.time())
|
||||
|
||||
return (
|
||||
db.query(Task)
|
||||
.join(Task.status, isouter=True)
|
||||
.join(Task.project)
|
||||
.filter(
|
||||
Task.assignee_id == user_id,
|
||||
Task.due_date >= week_start_dt,
|
||||
Task.due_date <= week_end_dt,
|
||||
# Exclude completed tasks
|
||||
(TaskStatus.is_done == False) | (Task.status_id == None)
|
||||
)
|
||||
.options(joinedload(Task.project), joinedload(Task.status))
|
||||
.all()
|
||||
)
|
||||
|
||||
|
||||
def calculate_user_workload(
|
||||
db: Session,
|
||||
user: User,
|
||||
week_start: date,
|
||||
) -> UserWorkloadSummary:
|
||||
"""
|
||||
Calculate workload summary for a single user.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
user: User object
|
||||
week_start: Start of week (Monday)
|
||||
|
||||
Returns:
|
||||
UserWorkloadSummary object
|
||||
"""
|
||||
week_start, week_end = get_week_bounds(week_start)
|
||||
|
||||
# Get tasks for this user in this week
|
||||
tasks = get_user_tasks_in_week(db, user.id, week_start, week_end)
|
||||
|
||||
# Calculate allocated hours from original_estimate
|
||||
allocated_hours = Decimal("0")
|
||||
for task in tasks:
|
||||
if task.original_estimate:
|
||||
allocated_hours += task.original_estimate
|
||||
|
||||
capacity_hours = Decimal(str(user.capacity)) if user.capacity else Decimal("40")
|
||||
load_percentage = calculate_load_percentage(allocated_hours, capacity_hours)
|
||||
load_level = determine_load_level(load_percentage)
|
||||
|
||||
return UserWorkloadSummary(
|
||||
user_id=user.id,
|
||||
user_name=user.name,
|
||||
department_id=user.department_id,
|
||||
department_name=user.department.name if user.department else None,
|
||||
capacity_hours=capacity_hours,
|
||||
allocated_hours=allocated_hours,
|
||||
load_percentage=load_percentage,
|
||||
load_level=load_level,
|
||||
task_count=len(tasks),
|
||||
)
|
||||
|
||||
|
||||
def get_workload_heatmap(
|
||||
db: Session,
|
||||
week_start: Optional[date] = None,
|
||||
department_id: Optional[str] = None,
|
||||
user_ids: Optional[List[str]] = None,
|
||||
) -> List[UserWorkloadSummary]:
|
||||
"""
|
||||
Get workload heatmap for multiple users.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
week_start: Start of week (defaults to current week)
|
||||
department_id: Filter by department
|
||||
user_ids: Filter by specific user IDs
|
||||
|
||||
Returns:
|
||||
List of UserWorkloadSummary objects
|
||||
"""
|
||||
if week_start is None:
|
||||
week_start = get_current_week_start()
|
||||
else:
|
||||
# Normalize to week start (Monday)
|
||||
week_start = get_week_bounds(week_start)[0]
|
||||
|
||||
# Build user query
|
||||
query = db.query(User).filter(User.is_active == True)
|
||||
|
||||
if department_id:
|
||||
query = query.filter(User.department_id == department_id)
|
||||
|
||||
if user_ids:
|
||||
query = query.filter(User.id.in_(user_ids))
|
||||
|
||||
users = query.options(joinedload(User.department)).all()
|
||||
|
||||
# Calculate workload for each user
|
||||
results = []
|
||||
for user in users:
|
||||
summary = calculate_user_workload(db, user, week_start)
|
||||
results.append(summary)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def get_user_workload_detail(
|
||||
db: Session,
|
||||
user_id: str,
|
||||
week_start: Optional[date] = None,
|
||||
) -> Optional[UserWorkloadDetail]:
|
||||
"""
|
||||
Get detailed workload for a specific user including task list.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
user_id: User ID
|
||||
week_start: Start of week (defaults to current week)
|
||||
|
||||
Returns:
|
||||
UserWorkloadDetail object or None if user not found
|
||||
"""
|
||||
user = (
|
||||
db.query(User)
|
||||
.filter(User.id == user_id)
|
||||
.options(joinedload(User.department))
|
||||
.first()
|
||||
)
|
||||
|
||||
if not user:
|
||||
return None
|
||||
|
||||
if week_start is None:
|
||||
week_start = get_current_week_start()
|
||||
else:
|
||||
week_start = get_week_bounds(week_start)[0]
|
||||
|
||||
week_start, week_end = get_week_bounds(week_start)
|
||||
|
||||
# Get tasks
|
||||
tasks = get_user_tasks_in_week(db, user_id, week_start, week_end)
|
||||
|
||||
# Calculate totals
|
||||
allocated_hours = Decimal("0")
|
||||
task_infos = []
|
||||
|
||||
for task in tasks:
|
||||
if task.original_estimate:
|
||||
allocated_hours += task.original_estimate
|
||||
|
||||
task_infos.append(TaskWorkloadInfo(
|
||||
task_id=task.id,
|
||||
title=task.title,
|
||||
project_id=task.project_id,
|
||||
project_name=task.project.title if task.project else "Unknown",
|
||||
due_date=task.due_date,
|
||||
original_estimate=task.original_estimate,
|
||||
status=task.status.name if task.status else None,
|
||||
))
|
||||
|
||||
capacity_hours = Decimal(str(user.capacity)) if user.capacity else Decimal("40")
|
||||
load_percentage = calculate_load_percentage(allocated_hours, capacity_hours)
|
||||
load_level = determine_load_level(load_percentage)
|
||||
|
||||
return UserWorkloadDetail(
|
||||
user_id=user.id,
|
||||
user_name=user.name,
|
||||
week_start=week_start,
|
||||
week_end=week_end,
|
||||
capacity_hours=capacity_hours,
|
||||
allocated_hours=allocated_hours,
|
||||
load_percentage=load_percentage,
|
||||
load_level=load_level,
|
||||
tasks=task_infos,
|
||||
)
|
||||
Reference in New Issue
Block a user