## Security Enhancements (P0) - Add input validation with max_length and numeric range constraints - Implement WebSocket token authentication via first message - Add path traversal prevention in file storage service ## Permission Enhancements (P0) - Add project member management for cross-department access - Implement is_department_manager flag for workload visibility ## Cycle Detection (P0) - Add DFS-based cycle detection for task dependencies - Add formula field circular reference detection - Display user-friendly cycle path visualization ## Concurrency & Reliability (P1) - Implement optimistic locking with version field (409 Conflict on mismatch) - Add trigger retry mechanism with exponential backoff (1s, 2s, 4s) - Implement cascade restore for soft-deleted tasks ## Rate Limiting (P1) - Add tiered rate limits: standard (60/min), sensitive (20/min), heavy (5/min) - Apply rate limits to tasks, reports, attachments, and comments ## Frontend Improvements (P1) - Add responsive sidebar with hamburger menu for mobile - Improve touch-friendly UI with proper tap target sizes - Complete i18n translations for all components ## Backend Reliability (P2) - Configure database connection pool (size=10, overflow=20) - Add Redis fallback mechanism with message queue - Add blocker check before task deletion ## API Enhancements (P3) - Add standardized response wrapper utility - Add /health/ready and /health/live endpoints - Implement project templates with status/field copying ## Tests Added - test_input_validation.py - Schema and path traversal tests - test_concurrency_reliability.py - Optimistic locking and retry tests - test_backend_reliability.py - Connection pool and Redis tests - test_api_enhancements.py - Health check and template tests Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
1109 lines
38 KiB
Python
1109 lines
38 KiB
Python
import logging
|
|
import uuid
|
|
from datetime import datetime, timezone, timedelta
|
|
from typing import List, Optional
|
|
from fastapi import APIRouter, Depends, HTTPException, status, Query, Request
|
|
from sqlalchemy.orm import Session
|
|
|
|
from app.core.database import get_db
|
|
from app.core.redis_pubsub import publish_task_event
|
|
from app.core.rate_limiter import limiter
|
|
from app.core.config import settings
|
|
from app.models import User, Project, Task, TaskStatus, AuditAction, Blocker
|
|
from app.schemas.task import (
|
|
TaskCreate, TaskUpdate, TaskResponse, TaskWithDetails, TaskListResponse,
|
|
TaskStatusUpdate, TaskAssignUpdate, CustomValueResponse,
|
|
TaskRestoreRequest, TaskRestoreResponse,
|
|
TaskDeleteWarningResponse, TaskDeleteResponse
|
|
)
|
|
from app.middleware.auth import (
|
|
get_current_user, check_project_access, check_task_access, check_task_edit_access
|
|
)
|
|
from app.middleware.audit import get_audit_metadata
|
|
from app.services.audit_service import AuditService
|
|
from app.services.trigger_service import TriggerService
|
|
from app.services.workload_cache import invalidate_user_workload_cache
|
|
from app.services.custom_value_service import CustomValueService
|
|
from app.services.dependency_service import DependencyService
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
router = APIRouter(tags=["tasks"])
|
|
|
|
# Maximum subtask depth
|
|
MAX_SUBTASK_DEPTH = 2
|
|
|
|
|
|
def get_task_depth(db: Session, task: Task) -> int:
|
|
"""Calculate the depth of a task in the hierarchy."""
|
|
depth = 1
|
|
current = task
|
|
while current.parent_task_id:
|
|
depth += 1
|
|
current = db.query(Task).filter(Task.id == current.parent_task_id).first()
|
|
if not current:
|
|
break
|
|
return depth
|
|
|
|
|
|
def task_to_response(task: Task, db: Session = None, include_custom_values: bool = False) -> TaskWithDetails:
|
|
"""Convert a Task model to TaskWithDetails response."""
|
|
# Count only non-deleted subtasks
|
|
subtask_count = 0
|
|
if task.subtasks:
|
|
subtask_count = sum(1 for st in task.subtasks if not st.is_deleted)
|
|
|
|
# Get custom values if requested
|
|
custom_values = None
|
|
if include_custom_values and db:
|
|
custom_values = CustomValueService.get_custom_values_for_task(db, task)
|
|
|
|
return TaskWithDetails(
|
|
id=task.id,
|
|
project_id=task.project_id,
|
|
parent_task_id=task.parent_task_id,
|
|
title=task.title,
|
|
description=task.description,
|
|
priority=task.priority,
|
|
original_estimate=task.original_estimate,
|
|
time_spent=task.time_spent,
|
|
start_date=task.start_date,
|
|
due_date=task.due_date,
|
|
assignee_id=task.assignee_id,
|
|
status_id=task.status_id,
|
|
blocker_flag=task.blocker_flag,
|
|
position=task.position,
|
|
created_by=task.created_by,
|
|
created_at=task.created_at,
|
|
updated_at=task.updated_at,
|
|
version=task.version,
|
|
assignee_name=task.assignee.name if task.assignee else None,
|
|
status_name=task.status.name if task.status else None,
|
|
status_color=task.status.color if task.status else None,
|
|
creator_name=task.creator.name if task.creator else None,
|
|
subtask_count=subtask_count,
|
|
custom_values=custom_values,
|
|
)
|
|
|
|
|
|
@router.get("/api/projects/{project_id}/tasks", response_model=TaskListResponse)
|
|
async def list_tasks(
|
|
project_id: str,
|
|
parent_task_id: Optional[str] = Query(None, description="Filter by parent task"),
|
|
status_id: Optional[str] = Query(None, description="Filter by status"),
|
|
assignee_id: Optional[str] = Query(None, description="Filter by assignee"),
|
|
due_after: Optional[datetime] = Query(None, description="Filter tasks with due_date >= this value (for calendar view)"),
|
|
due_before: Optional[datetime] = Query(None, description="Filter tasks with due_date <= this value (for calendar view)"),
|
|
include_deleted: bool = Query(False, description="Include deleted tasks (admin only)"),
|
|
db: Session = Depends(get_db),
|
|
current_user: User = Depends(get_current_user),
|
|
):
|
|
"""
|
|
List all tasks in a project.
|
|
|
|
Supports filtering by:
|
|
- parent_task_id: Filter by parent task (empty string for root tasks only)
|
|
- status_id: Filter by task status
|
|
- assignee_id: Filter by assigned user
|
|
- due_after: Filter tasks with due_date >= this value (ISO 8601 datetime)
|
|
- due_before: Filter tasks with due_date <= this value (ISO 8601 datetime)
|
|
|
|
The due_after and due_before parameters are useful for calendar view
|
|
to fetch tasks within a specific date range.
|
|
"""
|
|
project = db.query(Project).filter(Project.id == project_id).first()
|
|
|
|
if not project:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail="Project not found",
|
|
)
|
|
|
|
if not check_project_access(current_user, project):
|
|
raise HTTPException(
|
|
status_code=status.HTTP_403_FORBIDDEN,
|
|
detail="Access denied",
|
|
)
|
|
|
|
query = db.query(Task).filter(Task.project_id == project_id)
|
|
|
|
# Filter deleted tasks (only admin can include deleted)
|
|
if include_deleted and current_user.is_system_admin:
|
|
pass # Don't filter by is_deleted
|
|
else:
|
|
query = query.filter(Task.is_deleted == False)
|
|
|
|
# Apply filters
|
|
if parent_task_id is not None:
|
|
if parent_task_id == "":
|
|
# Root tasks only
|
|
query = query.filter(Task.parent_task_id == None)
|
|
else:
|
|
query = query.filter(Task.parent_task_id == parent_task_id)
|
|
else:
|
|
# By default, show only root tasks
|
|
query = query.filter(Task.parent_task_id == None)
|
|
|
|
if status_id:
|
|
query = query.filter(Task.status_id == status_id)
|
|
|
|
if assignee_id:
|
|
query = query.filter(Task.assignee_id == assignee_id)
|
|
|
|
# Date range filter for calendar view
|
|
if due_after:
|
|
query = query.filter(Task.due_date >= due_after)
|
|
|
|
if due_before:
|
|
query = query.filter(Task.due_date <= due_before)
|
|
|
|
tasks = query.order_by(Task.position, Task.created_at).all()
|
|
|
|
return TaskListResponse(
|
|
tasks=[task_to_response(t, db=db, include_custom_values=True) for t in tasks],
|
|
total=len(tasks),
|
|
)
|
|
|
|
|
|
@router.post("/api/projects/{project_id}/tasks", response_model=TaskResponse, status_code=status.HTTP_201_CREATED)
|
|
@limiter.limit(settings.RATE_LIMIT_STANDARD)
|
|
async def create_task(
|
|
request: Request,
|
|
project_id: str,
|
|
task_data: TaskCreate,
|
|
db: Session = Depends(get_db),
|
|
current_user: User = Depends(get_current_user),
|
|
):
|
|
"""
|
|
Create a new task in a project.
|
|
|
|
Rate limited: 60 requests per minute (standard tier).
|
|
"""
|
|
project = db.query(Project).filter(Project.id == project_id).first()
|
|
|
|
if not project:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail="Project not found",
|
|
)
|
|
|
|
if not check_project_access(current_user, project):
|
|
raise HTTPException(
|
|
status_code=status.HTTP_403_FORBIDDEN,
|
|
detail="Access denied",
|
|
)
|
|
|
|
# Validate parent task and check depth
|
|
if task_data.parent_task_id:
|
|
parent_task = db.query(Task).filter(
|
|
Task.id == task_data.parent_task_id,
|
|
Task.project_id == project_id
|
|
).first()
|
|
|
|
if not parent_task:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail="Parent task not found in this project",
|
|
)
|
|
|
|
# Check depth limit
|
|
parent_depth = get_task_depth(db, parent_task)
|
|
if parent_depth >= MAX_SUBTASK_DEPTH:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail=f"Maximum subtask depth ({MAX_SUBTASK_DEPTH}) exceeded",
|
|
)
|
|
|
|
# Validate status_id belongs to this project
|
|
if task_data.status_id:
|
|
status_obj = db.query(TaskStatus).filter(
|
|
TaskStatus.id == task_data.status_id,
|
|
TaskStatus.project_id == project_id
|
|
).first()
|
|
|
|
if not status_obj:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail="Status not found in this project",
|
|
)
|
|
else:
|
|
# Use first status (To Do) as default
|
|
default_status = db.query(TaskStatus).filter(
|
|
TaskStatus.project_id == project_id
|
|
).order_by(TaskStatus.position).first()
|
|
task_data.status_id = default_status.id if default_status else None
|
|
|
|
# Get max position
|
|
max_pos_result = db.query(Task).filter(
|
|
Task.project_id == project_id,
|
|
Task.parent_task_id == task_data.parent_task_id
|
|
).order_by(Task.position.desc()).first()
|
|
next_position = (max_pos_result.position + 1) if max_pos_result else 0
|
|
|
|
# Validate required custom fields
|
|
if task_data.custom_values:
|
|
missing_fields = CustomValueService.validate_required_fields(
|
|
db, project_id, task_data.custom_values
|
|
)
|
|
if missing_fields:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail=f"Missing required custom fields: {', '.join(missing_fields)}",
|
|
)
|
|
|
|
# Validate start_date <= due_date
|
|
if task_data.start_date and task_data.due_date:
|
|
if task_data.start_date > task_data.due_date:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail="Start date cannot be after due date",
|
|
)
|
|
|
|
task = Task(
|
|
id=str(uuid.uuid4()),
|
|
project_id=project_id,
|
|
parent_task_id=task_data.parent_task_id,
|
|
title=task_data.title,
|
|
description=task_data.description,
|
|
priority=task_data.priority.value if task_data.priority else "medium",
|
|
original_estimate=task_data.original_estimate,
|
|
start_date=task_data.start_date,
|
|
due_date=task_data.due_date,
|
|
assignee_id=task_data.assignee_id,
|
|
status_id=task_data.status_id,
|
|
position=next_position,
|
|
created_by=current_user.id,
|
|
)
|
|
|
|
db.add(task)
|
|
db.flush() # Flush to get task.id for custom values
|
|
|
|
# Save custom values
|
|
if task_data.custom_values:
|
|
try:
|
|
CustomValueService.save_custom_values(db, task, task_data.custom_values)
|
|
except ValueError as e:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail=str(e),
|
|
)
|
|
|
|
# Audit log
|
|
AuditService.log_event(
|
|
db=db,
|
|
event_type="task.create",
|
|
resource_type="task",
|
|
action=AuditAction.CREATE,
|
|
user_id=current_user.id,
|
|
resource_id=task.id,
|
|
changes=[{"field": "title", "old_value": None, "new_value": task.title}],
|
|
request_metadata=get_audit_metadata(request),
|
|
)
|
|
|
|
db.commit()
|
|
db.refresh(task)
|
|
|
|
# Invalidate workload cache if task has an assignee
|
|
if task.assignee_id:
|
|
invalidate_user_workload_cache(task.assignee_id)
|
|
|
|
# Publish real-time event
|
|
try:
|
|
await publish_task_event(
|
|
project_id=str(task.project_id),
|
|
event_type="task_created",
|
|
task_data={
|
|
"task_id": str(task.id),
|
|
"project_id": str(task.project_id),
|
|
"title": task.title,
|
|
"description": task.description,
|
|
"status_id": str(task.status_id) if task.status_id else None,
|
|
"status_name": task.status.name if task.status else None,
|
|
"status_color": task.status.color if task.status else None,
|
|
"assignee_id": str(task.assignee_id) if task.assignee_id else None,
|
|
"assignee_name": task.assignee.name if task.assignee else None,
|
|
"priority": task.priority,
|
|
"start_date": str(task.start_date) if task.start_date else None,
|
|
"due_date": str(task.due_date) if task.due_date else None,
|
|
"time_estimate": task.original_estimate,
|
|
"original_estimate": task.original_estimate,
|
|
"parent_task_id": str(task.parent_task_id) if task.parent_task_id else None,
|
|
"position": task.position,
|
|
"created_by": str(task.created_by),
|
|
"creator_name": task.creator.name if task.creator else None,
|
|
"created_at": str(task.created_at),
|
|
},
|
|
triggered_by=str(current_user.id)
|
|
)
|
|
except Exception as e:
|
|
logger.warning(f"Failed to publish task_created event: {e}")
|
|
|
|
return task
|
|
|
|
|
|
@router.get("/api/tasks/{task_id}", response_model=TaskWithDetails)
|
|
async def get_task(
|
|
task_id: str,
|
|
db: Session = Depends(get_db),
|
|
current_user: User = Depends(get_current_user),
|
|
):
|
|
"""
|
|
Get a task by ID.
|
|
"""
|
|
task = db.query(Task).filter(Task.id == task_id).first()
|
|
|
|
if not task:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail="Task not found",
|
|
)
|
|
|
|
# Check if task is deleted (only admin can view deleted)
|
|
if task.is_deleted and not current_user.is_system_admin:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail="Task not found",
|
|
)
|
|
|
|
if not check_task_access(current_user, task, task.project):
|
|
raise HTTPException(
|
|
status_code=status.HTTP_403_FORBIDDEN,
|
|
detail="Access denied",
|
|
)
|
|
|
|
return task_to_response(task, db, include_custom_values=True)
|
|
|
|
|
|
@router.patch("/api/tasks/{task_id}", response_model=TaskResponse)
|
|
@limiter.limit(settings.RATE_LIMIT_STANDARD)
|
|
async def update_task(
|
|
request: Request,
|
|
task_id: str,
|
|
task_data: TaskUpdate,
|
|
db: Session = Depends(get_db),
|
|
current_user: User = Depends(get_current_user),
|
|
):
|
|
"""
|
|
Update a task.
|
|
|
|
Rate limited: 60 requests per minute (standard tier).
|
|
"""
|
|
task = db.query(Task).filter(Task.id == task_id).first()
|
|
|
|
if not task:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail="Task not found",
|
|
)
|
|
|
|
if not check_task_edit_access(current_user, task, task.project):
|
|
raise HTTPException(
|
|
status_code=status.HTTP_403_FORBIDDEN,
|
|
detail="Permission denied",
|
|
)
|
|
|
|
# Optimistic locking: validate version if provided
|
|
if task_data.version is not None:
|
|
if task_data.version != task.version:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_409_CONFLICT,
|
|
detail={
|
|
"message": "Task has been modified by another user",
|
|
"current_version": task.version,
|
|
"provided_version": task_data.version,
|
|
},
|
|
)
|
|
|
|
# Capture old values for audit and triggers
|
|
old_values = {
|
|
"title": task.title,
|
|
"description": task.description,
|
|
"priority": task.priority,
|
|
"start_date": task.start_date,
|
|
"due_date": task.due_date,
|
|
"original_estimate": task.original_estimate,
|
|
"time_spent": task.time_spent,
|
|
}
|
|
|
|
# Update fields (exclude custom_values and version, handle separately)
|
|
update_data = task_data.model_dump(exclude_unset=True)
|
|
custom_values_data = update_data.pop("custom_values", None)
|
|
update_data.pop("version", None) # version is handled separately for optimistic locking
|
|
|
|
# Track old assignee for workload cache invalidation
|
|
old_assignee_id = task.assignee_id
|
|
|
|
# Validate assignee_id if provided
|
|
if "assignee_id" in update_data and update_data["assignee_id"]:
|
|
assignee = db.query(User).filter(User.id == update_data["assignee_id"]).first()
|
|
if not assignee:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail="Assignee not found",
|
|
)
|
|
|
|
# Validate status_id if provided
|
|
if "status_id" in update_data and update_data["status_id"]:
|
|
from app.models.task_status import TaskStatus
|
|
task_status = db.query(TaskStatus).filter(TaskStatus.id == update_data["status_id"]).first()
|
|
if not task_status:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail="Status not found",
|
|
)
|
|
|
|
# Get the proposed start_date and due_date for validation
|
|
new_start_date = update_data.get("start_date", task.start_date)
|
|
new_due_date = update_data.get("due_date", task.due_date)
|
|
|
|
# Validate start_date <= due_date
|
|
if new_start_date and new_due_date:
|
|
if new_start_date > new_due_date:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail="Start date cannot be after due date",
|
|
)
|
|
|
|
# Validate date constraints against dependencies
|
|
if "start_date" in update_data or "due_date" in update_data:
|
|
violations = DependencyService.validate_date_constraints(
|
|
task, new_start_date, new_due_date, db
|
|
)
|
|
if violations:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail={
|
|
"message": "Date change violates dependency constraints",
|
|
"violations": violations
|
|
}
|
|
)
|
|
|
|
for field, value in update_data.items():
|
|
if field == "priority" and value:
|
|
setattr(task, field, value.value)
|
|
else:
|
|
setattr(task, field, value)
|
|
|
|
# Capture new values for audit and triggers
|
|
new_values = {
|
|
"title": task.title,
|
|
"description": task.description,
|
|
"priority": task.priority,
|
|
"start_date": task.start_date,
|
|
"due_date": task.due_date,
|
|
"original_estimate": task.original_estimate,
|
|
"time_spent": task.time_spent,
|
|
}
|
|
|
|
# Detect changes and log
|
|
changes = AuditService.detect_changes(old_values, new_values)
|
|
if changes:
|
|
AuditService.log_event(
|
|
db=db,
|
|
event_type="task.update",
|
|
resource_type="task",
|
|
action=AuditAction.UPDATE,
|
|
user_id=current_user.id,
|
|
resource_id=task.id,
|
|
changes=changes,
|
|
request_metadata=get_audit_metadata(request),
|
|
)
|
|
|
|
# Evaluate triggers for priority changes
|
|
if "priority" in update_data:
|
|
TriggerService.evaluate_triggers(db, task, old_values, new_values, current_user)
|
|
|
|
# Handle custom values update
|
|
if custom_values_data:
|
|
try:
|
|
from app.schemas.task import CustomValueInput
|
|
custom_values = [CustomValueInput(**cv) for cv in custom_values_data]
|
|
CustomValueService.save_custom_values(db, task, custom_values)
|
|
except ValueError as e:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail=str(e),
|
|
)
|
|
|
|
# Increment version for optimistic locking
|
|
task.version += 1
|
|
|
|
db.commit()
|
|
db.refresh(task)
|
|
|
|
# Invalidate workload cache if original_estimate changed and task has an assignee
|
|
if "original_estimate" in update_data and task.assignee_id:
|
|
invalidate_user_workload_cache(task.assignee_id)
|
|
|
|
# Invalidate workload cache if assignee changed
|
|
if "assignee_id" in update_data:
|
|
# Invalidate old assignee's cache
|
|
if old_assignee_id and old_assignee_id != task.assignee_id:
|
|
invalidate_user_workload_cache(old_assignee_id)
|
|
# Invalidate new assignee's cache
|
|
if task.assignee_id:
|
|
invalidate_user_workload_cache(task.assignee_id)
|
|
|
|
# Publish real-time event
|
|
try:
|
|
await publish_task_event(
|
|
project_id=str(task.project_id),
|
|
event_type="task_updated",
|
|
task_data={
|
|
"task_id": str(task.id),
|
|
"project_id": str(task.project_id),
|
|
"title": task.title,
|
|
"description": task.description,
|
|
"status_id": str(task.status_id) if task.status_id else None,
|
|
"status_name": task.status.name if task.status else None,
|
|
"status_color": task.status.color if task.status else None,
|
|
"assignee_id": str(task.assignee_id) if task.assignee_id else None,
|
|
"assignee_name": task.assignee.name if task.assignee else None,
|
|
"priority": task.priority,
|
|
"start_date": str(task.start_date) if task.start_date else None,
|
|
"due_date": str(task.due_date) if task.due_date else None,
|
|
"time_estimate": task.original_estimate,
|
|
"original_estimate": task.original_estimate,
|
|
"time_spent": task.time_spent,
|
|
"parent_task_id": str(task.parent_task_id) if task.parent_task_id else None,
|
|
"position": task.position,
|
|
"updated_at": str(task.updated_at),
|
|
"updated_fields": list(update_data.keys()),
|
|
},
|
|
triggered_by=str(current_user.id)
|
|
)
|
|
except Exception as e:
|
|
logger.warning(f"Failed to publish task_updated event: {e}")
|
|
|
|
return task
|
|
|
|
|
|
@router.delete("/api/tasks/{task_id}")
|
|
async def delete_task(
|
|
task_id: str,
|
|
request: Request,
|
|
force_delete: bool = Query(False, description="Force delete even if task has unresolved blockers"),
|
|
db: Session = Depends(get_db),
|
|
current_user: User = Depends(get_current_user),
|
|
):
|
|
"""
|
|
Soft delete a task (cascades to subtasks).
|
|
|
|
If the task has unresolved blockers and force_delete is False,
|
|
returns a warning response with status 200 and blocker count.
|
|
Use force_delete=true to delete anyway (auto-resolves blockers).
|
|
"""
|
|
task = db.query(Task).filter(Task.id == task_id).first()
|
|
|
|
if not task:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail="Task not found",
|
|
)
|
|
|
|
if task.is_deleted:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail="Task is already deleted",
|
|
)
|
|
|
|
if not check_task_edit_access(current_user, task, task.project):
|
|
raise HTTPException(
|
|
status_code=status.HTTP_403_FORBIDDEN,
|
|
detail="Permission denied",
|
|
)
|
|
|
|
# Check for unresolved blockers
|
|
unresolved_blockers = db.query(Blocker).filter(
|
|
Blocker.task_id == task.id,
|
|
Blocker.resolved_at == None,
|
|
).all()
|
|
|
|
blocker_count = len(unresolved_blockers)
|
|
|
|
# If there are unresolved blockers and force_delete is False, return warning
|
|
if blocker_count > 0 and not force_delete:
|
|
return TaskDeleteWarningResponse(
|
|
warning="Task has unresolved blockers",
|
|
blocker_count=blocker_count,
|
|
message=f"Task has {blocker_count} unresolved blocker(s). Use force_delete=true to delete anyway.",
|
|
)
|
|
|
|
# Use naive datetime for consistency with database storage
|
|
now = datetime.now(timezone.utc).replace(tzinfo=None)
|
|
|
|
# Auto-resolve blockers if force deleting
|
|
blockers_resolved = 0
|
|
if force_delete and blocker_count > 0:
|
|
for blocker in unresolved_blockers:
|
|
blocker.resolved_at = now
|
|
blocker.resolved_by = current_user.id
|
|
blocker.resolution_note = "Auto-resolved due to task deletion"
|
|
blockers_resolved += 1
|
|
logger.info(f"Auto-resolved {blockers_resolved} blocker(s) for task {task_id} during force delete")
|
|
|
|
# Soft delete the task
|
|
task.is_deleted = True
|
|
task.deleted_at = now
|
|
task.deleted_by = current_user.id
|
|
|
|
# Cascade soft delete to subtasks
|
|
def soft_delete_subtasks(parent_task):
|
|
for subtask in parent_task.subtasks:
|
|
if not subtask.is_deleted:
|
|
subtask.is_deleted = True
|
|
subtask.deleted_at = now
|
|
subtask.deleted_by = current_user.id
|
|
soft_delete_subtasks(subtask)
|
|
|
|
soft_delete_subtasks(task)
|
|
|
|
# Audit log
|
|
AuditService.log_event(
|
|
db=db,
|
|
event_type="task.delete",
|
|
resource_type="task",
|
|
action=AuditAction.DELETE,
|
|
user_id=current_user.id,
|
|
resource_id=task.id,
|
|
changes=[
|
|
{"field": "is_deleted", "old_value": False, "new_value": True},
|
|
{"field": "force_delete", "old_value": None, "new_value": force_delete},
|
|
{"field": "blockers_resolved", "old_value": None, "new_value": blockers_resolved},
|
|
],
|
|
request_metadata=get_audit_metadata(request),
|
|
)
|
|
|
|
db.commit()
|
|
db.refresh(task)
|
|
|
|
# Invalidate workload cache for assignee
|
|
if task.assignee_id:
|
|
invalidate_user_workload_cache(task.assignee_id)
|
|
|
|
# Publish real-time event
|
|
try:
|
|
await publish_task_event(
|
|
project_id=str(task.project_id),
|
|
event_type="task_deleted",
|
|
task_data={
|
|
"task_id": str(task.id),
|
|
"project_id": str(task.project_id),
|
|
"title": task.title,
|
|
"parent_task_id": str(task.parent_task_id) if task.parent_task_id else None,
|
|
},
|
|
triggered_by=str(current_user.id)
|
|
)
|
|
except Exception as e:
|
|
logger.warning(f"Failed to publish task_deleted event: {e}")
|
|
|
|
return TaskDeleteResponse(
|
|
task=task,
|
|
blockers_resolved=blockers_resolved,
|
|
force_deleted=force_delete and blocker_count > 0,
|
|
)
|
|
|
|
|
|
@router.post("/api/tasks/{task_id}/restore", response_model=TaskRestoreResponse)
|
|
async def restore_task(
|
|
task_id: str,
|
|
request: Request,
|
|
restore_data: TaskRestoreRequest = None,
|
|
db: Session = Depends(get_db),
|
|
current_user: User = Depends(get_current_user),
|
|
):
|
|
"""
|
|
Restore a soft-deleted task (admin only).
|
|
|
|
Supports cascade restore: when enabled (default), also restores child tasks
|
|
that were deleted at the same time as the parent task.
|
|
|
|
Args:
|
|
task_id: ID of the task to restore
|
|
restore_data: Optional restore options (cascade=True by default)
|
|
|
|
Returns:
|
|
TaskRestoreResponse with restored task and list of restored children
|
|
"""
|
|
if not current_user.is_system_admin:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_403_FORBIDDEN,
|
|
detail="Only system administrators can restore deleted tasks",
|
|
)
|
|
|
|
# Handle default for optional body
|
|
if restore_data is None:
|
|
restore_data = TaskRestoreRequest()
|
|
|
|
task = db.query(Task).filter(Task.id == task_id).first()
|
|
|
|
if not task:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail="Task not found",
|
|
)
|
|
|
|
if not task.is_deleted:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail="Task is not deleted",
|
|
)
|
|
|
|
# Store the parent's deleted_at timestamp for cascade restore
|
|
parent_deleted_at = task.deleted_at
|
|
restored_children_ids = []
|
|
|
|
# Restore the parent task
|
|
task.is_deleted = False
|
|
task.deleted_at = None
|
|
task.deleted_by = None
|
|
|
|
# Audit log for parent task
|
|
AuditService.log_event(
|
|
db=db,
|
|
event_type="task.restore",
|
|
resource_type="task",
|
|
action=AuditAction.UPDATE,
|
|
user_id=current_user.id,
|
|
resource_id=task.id,
|
|
changes=[
|
|
{"field": "is_deleted", "old_value": True, "new_value": False},
|
|
{"field": "cascade", "old_value": None, "new_value": restore_data.cascade},
|
|
],
|
|
request_metadata=get_audit_metadata(request),
|
|
)
|
|
|
|
# Cascade restore child tasks if requested
|
|
if restore_data.cascade and parent_deleted_at:
|
|
restored_children_ids = _cascade_restore_children(
|
|
db=db,
|
|
parent_task=task,
|
|
parent_deleted_at=parent_deleted_at,
|
|
current_user=current_user,
|
|
request=request,
|
|
)
|
|
|
|
db.commit()
|
|
db.refresh(task)
|
|
|
|
# Invalidate workload cache for parent task assignee
|
|
if task.assignee_id:
|
|
invalidate_user_workload_cache(task.assignee_id)
|
|
|
|
# Invalidate workload cache for all restored children's assignees
|
|
for child_id in restored_children_ids:
|
|
child_task = db.query(Task).filter(Task.id == child_id).first()
|
|
if child_task and child_task.assignee_id:
|
|
invalidate_user_workload_cache(child_task.assignee_id)
|
|
|
|
return TaskRestoreResponse(
|
|
restored_task=task,
|
|
restored_children_count=len(restored_children_ids),
|
|
restored_children_ids=restored_children_ids,
|
|
)
|
|
|
|
|
|
def _cascade_restore_children(
|
|
db: Session,
|
|
parent_task: Task,
|
|
parent_deleted_at: datetime,
|
|
current_user: User,
|
|
request: Request,
|
|
tolerance_seconds: int = 5,
|
|
) -> List[str]:
|
|
"""
|
|
Recursively restore child tasks that were deleted at the same time as the parent.
|
|
|
|
Args:
|
|
db: Database session
|
|
parent_task: The parent task being restored
|
|
parent_deleted_at: Timestamp when the parent was deleted
|
|
current_user: Current user performing the restore
|
|
request: HTTP request for audit metadata
|
|
tolerance_seconds: Time tolerance for matching deleted_at timestamps
|
|
|
|
Returns:
|
|
List of restored child task IDs
|
|
"""
|
|
restored_ids = []
|
|
|
|
# Find all deleted child tasks with matching deleted_at timestamp
|
|
# Use a small tolerance window to account for slight timing differences
|
|
time_window_start = parent_deleted_at - timedelta(seconds=tolerance_seconds)
|
|
time_window_end = parent_deleted_at + timedelta(seconds=tolerance_seconds)
|
|
|
|
deleted_children = db.query(Task).filter(
|
|
Task.parent_task_id == parent_task.id,
|
|
Task.is_deleted == True,
|
|
Task.deleted_at >= time_window_start,
|
|
Task.deleted_at <= time_window_end,
|
|
).all()
|
|
|
|
for child in deleted_children:
|
|
# Store child's deleted_at before restoring
|
|
child_deleted_at = child.deleted_at
|
|
|
|
# Restore the child
|
|
child.is_deleted = False
|
|
child.deleted_at = None
|
|
child.deleted_by = None
|
|
restored_ids.append(child.id)
|
|
|
|
# Audit log for child task
|
|
AuditService.log_event(
|
|
db=db,
|
|
event_type="task.restore",
|
|
resource_type="task",
|
|
action=AuditAction.UPDATE,
|
|
user_id=current_user.id,
|
|
resource_id=child.id,
|
|
changes=[
|
|
{"field": "is_deleted", "old_value": True, "new_value": False},
|
|
{"field": "restored_via_cascade", "old_value": None, "new_value": parent_task.id},
|
|
],
|
|
request_metadata=get_audit_metadata(request),
|
|
)
|
|
|
|
logger.info(f"Cascade restored child task {child.id} (parent: {parent_task.id})")
|
|
|
|
# Recursively restore grandchildren
|
|
if child_deleted_at:
|
|
grandchildren_ids = _cascade_restore_children(
|
|
db=db,
|
|
parent_task=child,
|
|
parent_deleted_at=child_deleted_at,
|
|
current_user=current_user,
|
|
request=request,
|
|
tolerance_seconds=tolerance_seconds,
|
|
)
|
|
restored_ids.extend(grandchildren_ids)
|
|
|
|
return restored_ids
|
|
|
|
|
|
@router.patch("/api/tasks/{task_id}/status", response_model=TaskResponse)
|
|
async def update_task_status(
|
|
task_id: str,
|
|
status_data: TaskStatusUpdate,
|
|
db: Session = Depends(get_db),
|
|
current_user: User = Depends(get_current_user),
|
|
):
|
|
"""
|
|
Update task status.
|
|
"""
|
|
task = db.query(Task).filter(Task.id == task_id).first()
|
|
|
|
if not task:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail="Task not found",
|
|
)
|
|
|
|
if not check_task_edit_access(current_user, task, task.project):
|
|
raise HTTPException(
|
|
status_code=status.HTTP_403_FORBIDDEN,
|
|
detail="Permission denied",
|
|
)
|
|
|
|
# Validate new status belongs to same project
|
|
new_status = db.query(TaskStatus).filter(
|
|
TaskStatus.id == status_data.status_id,
|
|
TaskStatus.project_id == task.project_id
|
|
).first()
|
|
|
|
if not new_status:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail="Status not found in this project",
|
|
)
|
|
|
|
# Capture old status for triggers and event publishing
|
|
old_status_id = task.status_id
|
|
old_status_name = task.status.name if task.status else None
|
|
|
|
task.status_id = status_data.status_id
|
|
|
|
# Auto-set blocker_flag based on status name and actual Blocker records
|
|
if new_status.name.lower() == "blocked":
|
|
task.blocker_flag = True
|
|
else:
|
|
# Only set blocker_flag = False if there are no unresolved blockers
|
|
unresolved_blockers = db.query(Blocker).filter(
|
|
Blocker.task_id == task.id,
|
|
Blocker.resolved_at == None,
|
|
).count()
|
|
if unresolved_blockers == 0:
|
|
task.blocker_flag = False
|
|
# If there are unresolved blockers, keep blocker_flag as is
|
|
|
|
# Evaluate triggers for status changes
|
|
if old_status_id != status_data.status_id:
|
|
TriggerService.evaluate_triggers(
|
|
db, task,
|
|
{"status_id": old_status_id},
|
|
{"status_id": status_data.status_id},
|
|
current_user
|
|
)
|
|
|
|
db.commit()
|
|
db.refresh(task)
|
|
|
|
# Invalidate workload cache when status changes (affects completed/incomplete task calculations)
|
|
if old_status_id != status_data.status_id and task.assignee_id:
|
|
invalidate_user_workload_cache(task.assignee_id)
|
|
|
|
# Publish real-time event
|
|
try:
|
|
await publish_task_event(
|
|
project_id=str(task.project_id),
|
|
event_type="task_status_changed",
|
|
task_data={
|
|
"task_id": str(task.id),
|
|
"project_id": str(task.project_id),
|
|
"title": task.title,
|
|
"old_status_id": str(old_status_id) if old_status_id else None,
|
|
"old_status_name": old_status_name,
|
|
"new_status_id": str(task.status_id) if task.status_id else None,
|
|
"new_status_name": task.status.name if task.status else None,
|
|
"new_status_color": task.status.color if task.status else None,
|
|
"assignee_id": str(task.assignee_id) if task.assignee_id else None,
|
|
"blocker_flag": task.blocker_flag,
|
|
},
|
|
triggered_by=str(current_user.id)
|
|
)
|
|
except Exception as e:
|
|
logger.warning(f"Failed to publish task_status_changed event: {e}")
|
|
|
|
return task
|
|
|
|
|
|
@router.patch("/api/tasks/{task_id}/assign", response_model=TaskResponse)
|
|
async def assign_task(
|
|
task_id: str,
|
|
assign_data: TaskAssignUpdate,
|
|
request: Request,
|
|
db: Session = Depends(get_db),
|
|
current_user: User = Depends(get_current_user),
|
|
):
|
|
"""
|
|
Assign or unassign a task.
|
|
"""
|
|
task = db.query(Task).filter(Task.id == task_id).first()
|
|
|
|
if not task:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail="Task not found",
|
|
)
|
|
|
|
if not check_task_edit_access(current_user, task, task.project):
|
|
raise HTTPException(
|
|
status_code=status.HTTP_403_FORBIDDEN,
|
|
detail="Permission denied",
|
|
)
|
|
|
|
# Validate assignee exists if provided
|
|
if assign_data.assignee_id:
|
|
assignee = db.query(User).filter(User.id == assign_data.assignee_id).first()
|
|
if not assignee:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail="Assignee not found",
|
|
)
|
|
|
|
old_assignee_id = task.assignee_id
|
|
old_assignee_name = task.assignee.name if task.assignee else None
|
|
task.assignee_id = assign_data.assignee_id
|
|
|
|
# Audit log
|
|
AuditService.log_event(
|
|
db=db,
|
|
event_type="task.assign",
|
|
resource_type="task",
|
|
action=AuditAction.UPDATE,
|
|
user_id=current_user.id,
|
|
resource_id=task.id,
|
|
changes=[{"field": "assignee_id", "old_value": old_assignee_id, "new_value": assign_data.assignee_id}],
|
|
request_metadata=get_audit_metadata(request),
|
|
)
|
|
|
|
# Evaluate triggers for assignee changes
|
|
if old_assignee_id != assign_data.assignee_id:
|
|
TriggerService.evaluate_triggers(
|
|
db, task,
|
|
{"assignee_id": old_assignee_id},
|
|
{"assignee_id": assign_data.assignee_id},
|
|
current_user
|
|
)
|
|
|
|
db.commit()
|
|
db.refresh(task)
|
|
|
|
# Invalidate workload cache for both old and new assignees
|
|
if old_assignee_id != assign_data.assignee_id:
|
|
if old_assignee_id:
|
|
invalidate_user_workload_cache(old_assignee_id)
|
|
if assign_data.assignee_id:
|
|
invalidate_user_workload_cache(assign_data.assignee_id)
|
|
|
|
# Publish real-time event
|
|
try:
|
|
await publish_task_event(
|
|
project_id=str(task.project_id),
|
|
event_type="task_assigned",
|
|
task_data={
|
|
"task_id": str(task.id),
|
|
"project_id": str(task.project_id),
|
|
"title": task.title,
|
|
"old_assignee_id": str(old_assignee_id) if old_assignee_id else None,
|
|
"old_assignee_name": old_assignee_name,
|
|
"new_assignee_id": str(task.assignee_id) if task.assignee_id else None,
|
|
"new_assignee_name": task.assignee.name if task.assignee else None,
|
|
"status_id": str(task.status_id) if task.status_id else None,
|
|
"status_name": task.status.name if task.status else None,
|
|
},
|
|
triggered_by=str(current_user.id)
|
|
)
|
|
except Exception as e:
|
|
logger.warning(f"Failed to publish task_assigned event: {e}")
|
|
|
|
return task
|
|
|
|
|
|
@router.get("/api/tasks/{task_id}/subtasks", response_model=TaskListResponse)
|
|
async def list_subtasks(
|
|
task_id: str,
|
|
include_deleted: bool = Query(False, description="Include deleted subtasks (admin only)"),
|
|
db: Session = Depends(get_db),
|
|
current_user: User = Depends(get_current_user),
|
|
):
|
|
"""
|
|
List subtasks of a task.
|
|
"""
|
|
task = db.query(Task).filter(Task.id == task_id).first()
|
|
|
|
if not task:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail="Task not found",
|
|
)
|
|
|
|
if not check_task_access(current_user, task, task.project):
|
|
raise HTTPException(
|
|
status_code=status.HTTP_403_FORBIDDEN,
|
|
detail="Access denied",
|
|
)
|
|
|
|
query = db.query(Task).filter(Task.parent_task_id == task_id)
|
|
|
|
# Filter deleted subtasks (only admin can include deleted)
|
|
if not (include_deleted and current_user.is_system_admin):
|
|
query = query.filter(Task.is_deleted == False)
|
|
|
|
subtasks = query.order_by(Task.position, Task.created_at).all()
|
|
|
|
return TaskListResponse(
|
|
tasks=[task_to_response(t) for t in subtasks],
|
|
total=len(subtasks),
|
|
)
|