feat: complete issue fixes and implement remaining features
## Critical Issues (CRIT-001~003) - All Fixed
- JWT secret key validation with pydantic field_validator
- Login audit logging for success/failure attempts
- Frontend API path prefix removal
## High Priority Issues (HIGH-001~008) - All Fixed
- Project soft delete using is_active flag
- Redis session token bytes handling
- Rate limiting with slowapi (5 req/min for login)
- Attachment API permission checks
- Kanban view with drag-and-drop
- Workload heatmap UI (WorkloadPage, WorkloadHeatmap)
- TaskDetailModal integrating Comments/Attachments
- UserSelect component for task assignment
## Medium Priority Issues (MED-001~012) - All Fixed
- MED-001~005: DB commits, N+1 queries, datetime, error format, blocker flag
- MED-006: Project health dashboard (HealthService, ProjectHealthPage)
- MED-007: Capacity update API (PUT /api/users/{id}/capacity)
- MED-008: Schedule triggers (cron parsing, deadline reminders)
- MED-009: Watermark feature (image/PDF watermarking)
- MED-010~012: useEffect deps, DOM operations, PDF export
## New Files
- backend/app/api/health/ - Project health API
- backend/app/services/health_service.py
- backend/app/services/trigger_scheduler.py
- backend/app/services/watermark_service.py
- backend/app/core/rate_limiter.py
- frontend/src/pages/ProjectHealthPage.tsx
- frontend/src/components/ProjectHealthCard.tsx
- frontend/src/components/KanbanBoard.tsx
- frontend/src/components/WorkloadHeatmap.tsx
## Tests
- 113 new tests passing (health: 32, users: 14, triggers: 35, watermark: 32)
## OpenSpec Archives
- add-project-health-dashboard
- add-capacity-update-api
- add-schedule-triggers
- add-watermark-feature
- add-rate-limiting
- enhance-frontend-ux
- add-resource-management-ui
🤖 Generated with [Claude Code](https://claude.com/claude-code)
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -1,38 +1,74 @@
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Request
|
||||
from fastapi.responses import FileResponse
|
||||
from fastapi.responses import FileResponse, Response
|
||||
from sqlalchemy.orm import Session
|
||||
from typing import Optional
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.middleware.auth import get_current_user
|
||||
from app.models import User, Task, Attachment, AttachmentVersion, AuditAction
|
||||
from app.middleware.auth import get_current_user, check_task_access, check_task_edit_access
|
||||
from app.models import User, Task, Project, Attachment, AttachmentVersion, AuditAction
|
||||
from app.schemas.attachment import (
|
||||
AttachmentResponse, AttachmentListResponse, AttachmentDetailResponse,
|
||||
AttachmentVersionResponse, VersionHistoryResponse
|
||||
)
|
||||
from app.services.file_storage_service import file_storage_service
|
||||
from app.services.audit_service import AuditService
|
||||
from app.services.watermark_service import watermark_service
|
||||
|
||||
router = APIRouter(prefix="/api", tags=["attachments"])
|
||||
|
||||
|
||||
def get_task_or_404(db: Session, task_id: str) -> Task:
|
||||
"""Get task or raise 404."""
|
||||
def get_task_with_access_check(db: Session, task_id: str, current_user: User, require_edit: bool = False) -> Task:
|
||||
"""Get task and verify access permissions."""
|
||||
task = db.query(Task).filter(Task.id == task_id).first()
|
||||
if not task:
|
||||
raise HTTPException(status_code=404, detail="Task not found")
|
||||
|
||||
# Get project for access check
|
||||
project = db.query(Project).filter(Project.id == task.project_id).first()
|
||||
if not project:
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
# Check access permission
|
||||
if not check_task_access(current_user, task, project):
|
||||
raise HTTPException(status_code=403, detail="Access denied to this task")
|
||||
|
||||
# Check edit permission if required
|
||||
if require_edit and not check_task_edit_access(current_user, task, project):
|
||||
raise HTTPException(status_code=403, detail="Edit access denied to this task")
|
||||
|
||||
return task
|
||||
|
||||
|
||||
def get_attachment_or_404(db: Session, attachment_id: str) -> Attachment:
|
||||
"""Get attachment or raise 404."""
|
||||
def get_attachment_with_access_check(
|
||||
db: Session, attachment_id: str, current_user: User, require_edit: bool = False
|
||||
) -> Attachment:
|
||||
"""Get attachment and verify access permissions."""
|
||||
attachment = db.query(Attachment).filter(
|
||||
Attachment.id == attachment_id,
|
||||
Attachment.is_deleted == False
|
||||
).first()
|
||||
if not attachment:
|
||||
raise HTTPException(status_code=404, detail="Attachment not found")
|
||||
|
||||
# Get task and project for access check
|
||||
task = db.query(Task).filter(Task.id == attachment.task_id).first()
|
||||
if not task:
|
||||
raise HTTPException(status_code=404, detail="Task not found")
|
||||
|
||||
project = db.query(Project).filter(Project.id == task.project_id).first()
|
||||
if not project:
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
# Check access permission
|
||||
if not check_task_access(current_user, task, project):
|
||||
raise HTTPException(status_code=403, detail="Access denied to this attachment")
|
||||
|
||||
# Check edit permission if required
|
||||
if require_edit and not check_task_edit_access(current_user, task, project):
|
||||
raise HTTPException(status_code=403, detail="Edit access denied to this attachment")
|
||||
|
||||
return attachment
|
||||
|
||||
|
||||
@@ -76,7 +112,7 @@ async def upload_attachment(
|
||||
current_user: User = Depends(get_current_user)
|
||||
):
|
||||
"""Upload a file attachment to a task."""
|
||||
task = get_task_or_404(db, task_id)
|
||||
task = get_task_with_access_check(db, task_id, current_user, require_edit=True)
|
||||
|
||||
# Check if attachment with same filename exists (for versioning in Phase 2)
|
||||
existing = db.query(Attachment).filter(
|
||||
@@ -115,9 +151,6 @@ async def upload_attachment(
|
||||
existing.file_size = file_size
|
||||
existing.updated_at = version.created_at
|
||||
|
||||
db.commit()
|
||||
db.refresh(existing)
|
||||
|
||||
# Audit log
|
||||
AuditService.log_event(
|
||||
db=db,
|
||||
@@ -129,7 +162,9 @@ async def upload_attachment(
|
||||
changes=[{"field": "version", "old_value": new_version - 1, "new_value": new_version}],
|
||||
request_metadata=getattr(request.state, "audit_metadata", None)
|
||||
)
|
||||
|
||||
db.commit()
|
||||
db.refresh(existing)
|
||||
|
||||
return attachment_to_response(existing)
|
||||
|
||||
@@ -175,9 +210,6 @@ async def upload_attachment(
|
||||
)
|
||||
db.add(version)
|
||||
|
||||
db.commit()
|
||||
db.refresh(attachment)
|
||||
|
||||
# Audit log
|
||||
AuditService.log_event(
|
||||
db=db,
|
||||
@@ -189,7 +221,9 @@ async def upload_attachment(
|
||||
changes=[{"field": "filename", "old_value": None, "new_value": attachment.filename}],
|
||||
request_metadata=getattr(request.state, "audit_metadata", None)
|
||||
)
|
||||
|
||||
db.commit()
|
||||
db.refresh(attachment)
|
||||
|
||||
return attachment_to_response(attachment)
|
||||
|
||||
@@ -201,7 +235,7 @@ async def list_task_attachments(
|
||||
current_user: User = Depends(get_current_user)
|
||||
):
|
||||
"""List all attachments for a task."""
|
||||
task = get_task_or_404(db, task_id)
|
||||
task = get_task_with_access_check(db, task_id, current_user, require_edit=False)
|
||||
|
||||
attachments = db.query(Attachment).filter(
|
||||
Attachment.task_id == task_id,
|
||||
@@ -221,7 +255,7 @@ async def get_attachment(
|
||||
current_user: User = Depends(get_current_user)
|
||||
):
|
||||
"""Get attachment details with version history."""
|
||||
attachment = get_attachment_or_404(db, attachment_id)
|
||||
attachment = get_attachment_with_access_check(db, attachment_id, current_user, require_edit=False)
|
||||
|
||||
versions = db.query(AttachmentVersion).filter(
|
||||
AttachmentVersion.attachment_id == attachment_id
|
||||
@@ -252,8 +286,8 @@ async def download_attachment(
|
||||
db: Session = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user)
|
||||
):
|
||||
"""Download an attachment file."""
|
||||
attachment = get_attachment_or_404(db, attachment_id)
|
||||
"""Download an attachment file with dynamic watermark."""
|
||||
attachment = get_attachment_with_access_check(db, attachment_id, current_user, require_edit=False)
|
||||
|
||||
# Get version to download
|
||||
target_version = version or attachment.current_version
|
||||
@@ -272,6 +306,7 @@ async def download_attachment(
|
||||
raise HTTPException(status_code=404, detail="File not found on disk")
|
||||
|
||||
# Audit log
|
||||
download_time = datetime.now()
|
||||
AuditService.log_event(
|
||||
db=db,
|
||||
event_type="attachment.download",
|
||||
@@ -284,6 +319,63 @@ async def download_attachment(
|
||||
)
|
||||
db.commit()
|
||||
|
||||
# Check if watermark should be applied
|
||||
mime_type = attachment.mime_type or ""
|
||||
if watermark_service.supports_watermark(mime_type):
|
||||
try:
|
||||
# Read the original file
|
||||
with open(file_path, "rb") as f:
|
||||
file_bytes = f.read()
|
||||
|
||||
# Apply watermark based on file type
|
||||
if watermark_service.is_supported_image(mime_type):
|
||||
watermarked_bytes, output_format = watermark_service.add_image_watermark(
|
||||
image_bytes=file_bytes,
|
||||
user_name=current_user.name,
|
||||
employee_id=current_user.employee_id,
|
||||
download_time=download_time
|
||||
)
|
||||
# Update mime type based on output format
|
||||
output_mime_type = f"image/{output_format}"
|
||||
# Update filename extension if format changed
|
||||
original_filename = attachment.original_filename
|
||||
if output_format == "png" and not original_filename.lower().endswith(".png"):
|
||||
original_filename = original_filename.rsplit(".", 1)[0] + ".png"
|
||||
|
||||
return Response(
|
||||
content=watermarked_bytes,
|
||||
media_type=output_mime_type,
|
||||
headers={
|
||||
"Content-Disposition": f'attachment; filename="{original_filename}"'
|
||||
}
|
||||
)
|
||||
|
||||
elif watermark_service.is_supported_pdf(mime_type):
|
||||
watermarked_bytes = watermark_service.add_pdf_watermark(
|
||||
pdf_bytes=file_bytes,
|
||||
user_name=current_user.name,
|
||||
employee_id=current_user.employee_id,
|
||||
download_time=download_time
|
||||
)
|
||||
|
||||
return Response(
|
||||
content=watermarked_bytes,
|
||||
media_type="application/pdf",
|
||||
headers={
|
||||
"Content-Disposition": f'attachment; filename="{attachment.original_filename}"'
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
# If watermarking fails, log the error but still return the original file
|
||||
# This ensures users can still download files even if watermarking has issues
|
||||
import logging
|
||||
logging.getLogger(__name__).warning(
|
||||
f"Watermarking failed for attachment {attachment_id}: {str(e)}. "
|
||||
"Returning original file."
|
||||
)
|
||||
|
||||
# Return original file without watermark for unsupported types or on error
|
||||
return FileResponse(
|
||||
path=str(file_path),
|
||||
filename=attachment.original_filename,
|
||||
@@ -299,11 +391,10 @@ async def delete_attachment(
|
||||
current_user: User = Depends(get_current_user)
|
||||
):
|
||||
"""Soft delete an attachment."""
|
||||
attachment = get_attachment_or_404(db, attachment_id)
|
||||
attachment = get_attachment_with_access_check(db, attachment_id, current_user, require_edit=True)
|
||||
|
||||
# Soft delete
|
||||
attachment.is_deleted = True
|
||||
db.commit()
|
||||
|
||||
# Audit log
|
||||
AuditService.log_event(
|
||||
@@ -316,9 +407,10 @@ async def delete_attachment(
|
||||
changes=[{"field": "is_deleted", "old_value": False, "new_value": True}],
|
||||
request_metadata=getattr(request.state, "audit_metadata", None)
|
||||
)
|
||||
|
||||
db.commit()
|
||||
|
||||
return {"message": "Attachment deleted", "id": attachment_id}
|
||||
return {"detail": "Attachment deleted", "id": attachment_id}
|
||||
|
||||
|
||||
@router.get("/attachments/{attachment_id}/versions", response_model=VersionHistoryResponse)
|
||||
@@ -328,7 +420,7 @@ async def get_version_history(
|
||||
current_user: User = Depends(get_current_user)
|
||||
):
|
||||
"""Get version history for an attachment."""
|
||||
attachment = get_attachment_or_404(db, attachment_id)
|
||||
attachment = get_attachment_with_access_check(db, attachment_id, current_user, require_edit=False)
|
||||
|
||||
versions = db.query(AttachmentVersion).filter(
|
||||
AttachmentVersion.attachment_id == attachment_id
|
||||
@@ -351,7 +443,7 @@ async def restore_version(
|
||||
current_user: User = Depends(get_current_user)
|
||||
):
|
||||
"""Restore an attachment to a specific version."""
|
||||
attachment = get_attachment_or_404(db, attachment_id)
|
||||
attachment = get_attachment_with_access_check(db, attachment_id, current_user, require_edit=True)
|
||||
|
||||
version_record = db.query(AttachmentVersion).filter(
|
||||
AttachmentVersion.attachment_id == attachment_id,
|
||||
@@ -364,7 +456,6 @@ async def restore_version(
|
||||
old_version = attachment.current_version
|
||||
attachment.current_version = version
|
||||
attachment.file_size = version_record.file_size
|
||||
db.commit()
|
||||
|
||||
# Audit log
|
||||
AuditService.log_event(
|
||||
@@ -377,6 +468,7 @@ async def restore_version(
|
||||
changes=[{"field": "current_version", "old_value": old_version, "new_value": version}],
|
||||
request_metadata=getattr(request.state, "audit_metadata", None)
|
||||
)
|
||||
|
||||
db.commit()
|
||||
|
||||
return {"message": f"Restored to version {version}", "current_version": version}
|
||||
return {"detail": f"Restored to version {version}", "current_version": version}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import csv
|
||||
import io
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
||||
from fastapi.responses import StreamingResponse
|
||||
@@ -191,7 +191,7 @@ async def export_audit_logs(
|
||||
|
||||
output.seek(0)
|
||||
|
||||
filename = f"audit_logs_{datetime.utcnow().strftime('%Y%m%d_%H%M%S')}.csv"
|
||||
filename = f"audit_logs_{datetime.now(timezone.utc).strftime('%Y%m%d_%H%M%S')}.csv"
|
||||
|
||||
return StreamingResponse(
|
||||
iter([output.getvalue()]),
|
||||
|
||||
@@ -1,53 +1,86 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Request
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.database import get_db
|
||||
from app.core.security import create_access_token, create_token_payload
|
||||
from app.core.redis import get_redis
|
||||
from app.core.rate_limiter import limiter
|
||||
from app.models.user import User
|
||||
from app.models.audit_log import AuditAction
|
||||
from app.schemas.auth import LoginRequest, LoginResponse, UserInfo
|
||||
from app.services.auth_client import (
|
||||
verify_credentials,
|
||||
AuthAPIError,
|
||||
AuthAPIConnectionError,
|
||||
)
|
||||
from app.services.audit_service import AuditService
|
||||
from app.middleware.auth import get_current_user
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post("/login", response_model=LoginResponse)
|
||||
@limiter.limit("5/minute")
|
||||
async def login(
|
||||
request: LoginRequest,
|
||||
request: Request,
|
||||
login_request: LoginRequest,
|
||||
db: Session = Depends(get_db),
|
||||
redis_client=Depends(get_redis),
|
||||
):
|
||||
"""
|
||||
Authenticate user via external API and return JWT token.
|
||||
"""
|
||||
# Prepare metadata for audit logging
|
||||
client_ip = request.client.host if request.client else "unknown"
|
||||
user_agent = request.headers.get("user-agent", "unknown")
|
||||
|
||||
try:
|
||||
# Verify credentials with external API
|
||||
auth_result = await verify_credentials(request.email, request.password)
|
||||
auth_result = await verify_credentials(login_request.email, login_request.password)
|
||||
except AuthAPIConnectionError:
|
||||
# Log failed login attempt due to service unavailable
|
||||
AuditService.log_event(
|
||||
db=db,
|
||||
event_type="user.login_failed",
|
||||
resource_type="user",
|
||||
action=AuditAction.LOGIN,
|
||||
user_id=None,
|
||||
resource_id=None,
|
||||
changes={"email": login_request.email, "reason": "auth_service_unavailable"},
|
||||
request_metadata={"ip_address": client_ip, "user_agent": user_agent},
|
||||
)
|
||||
db.commit()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
detail="Authentication service temporarily unavailable",
|
||||
)
|
||||
except AuthAPIError as e:
|
||||
# Log failed login attempt due to invalid credentials
|
||||
AuditService.log_event(
|
||||
db=db,
|
||||
event_type="user.login_failed",
|
||||
resource_type="user",
|
||||
action=AuditAction.LOGIN,
|
||||
user_id=None,
|
||||
resource_id=None,
|
||||
changes={"email": login_request.email, "reason": "invalid_credentials"},
|
||||
request_metadata={"ip_address": client_ip, "user_agent": user_agent},
|
||||
)
|
||||
db.commit()
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid credentials",
|
||||
)
|
||||
|
||||
# Find or create user in local database
|
||||
user = db.query(User).filter(User.email == request.email).first()
|
||||
user = db.query(User).filter(User.email == login_request.email).first()
|
||||
|
||||
if not user:
|
||||
# Create new user based on auth API response
|
||||
user = User(
|
||||
email=request.email,
|
||||
name=auth_result.get("name", request.email.split("@")[0]),
|
||||
email=login_request.email,
|
||||
name=auth_result.get("name", login_request.email.split("@")[0]),
|
||||
is_active=True,
|
||||
)
|
||||
db.add(user)
|
||||
@@ -82,6 +115,19 @@ async def login(
|
||||
access_token,
|
||||
)
|
||||
|
||||
# Log successful login
|
||||
AuditService.log_event(
|
||||
db=db,
|
||||
event_type="user.login",
|
||||
resource_type="user",
|
||||
action=AuditAction.LOGIN,
|
||||
user_id=user.id,
|
||||
resource_id=user.id,
|
||||
changes=None,
|
||||
request_metadata={"ip_address": client_ip, "user_agent": user_agent},
|
||||
)
|
||||
db.commit()
|
||||
|
||||
return LoginResponse(
|
||||
access_token=access_token,
|
||||
user=UserInfo(
|
||||
@@ -106,7 +152,7 @@ async def logout(
|
||||
# Remove session from Redis
|
||||
redis_client.delete(f"session:{current_user.id}")
|
||||
|
||||
return {"message": "Successfully logged out"}
|
||||
return {"detail": "Successfully logged out"}
|
||||
|
||||
|
||||
@router.get("/me", response_model=UserInfo)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Request
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
@@ -138,7 +138,8 @@ async def resolve_blocker(
|
||||
# Update blocker
|
||||
blocker.resolved_by = current_user.id
|
||||
blocker.resolution_note = resolve_data.resolution_note
|
||||
blocker.resolved_at = datetime.utcnow()
|
||||
# Use naive datetime for consistency with database storage
|
||||
blocker.resolved_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
||||
|
||||
# Check if there are other unresolved blockers
|
||||
other_blockers = db.query(Blocker).filter(
|
||||
|
||||
3
backend/app/api/health/__init__.py
Normal file
3
backend/app/api/health/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from app.api.health.router import router
|
||||
|
||||
__all__ = ["router"]
|
||||
70
backend/app/api/health/router.py
Normal file
70
backend/app/api/health/router.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""Project health API endpoints.
|
||||
|
||||
Provides endpoints for retrieving project health metrics
|
||||
and dashboard information.
|
||||
"""
|
||||
from typing import Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models import User
|
||||
from app.schemas.project_health import (
|
||||
ProjectHealthWithDetails,
|
||||
ProjectHealthDashboardResponse,
|
||||
)
|
||||
from app.services.health_service import HealthService
|
||||
from app.middleware.auth import get_current_user
|
||||
|
||||
router = APIRouter(prefix="/api/projects/health", tags=["Project Health"])
|
||||
|
||||
|
||||
@router.get("/dashboard", response_model=ProjectHealthDashboardResponse)
|
||||
async def get_health_dashboard(
|
||||
status_filter: Optional[str] = "active",
|
||||
db: Session = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get health dashboard for all projects.
|
||||
|
||||
Returns aggregated health metrics and summary statistics
|
||||
for all projects matching the status filter.
|
||||
|
||||
- **status_filter**: Filter projects by status (default: "active")
|
||||
|
||||
Returns:
|
||||
- **projects**: List of project health details
|
||||
- **summary**: Aggregated summary statistics
|
||||
"""
|
||||
service = HealthService(db)
|
||||
return service.get_dashboard(status_filter=status_filter)
|
||||
|
||||
|
||||
@router.get("/{project_id}", response_model=ProjectHealthWithDetails)
|
||||
async def get_project_health(
|
||||
project_id: str,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Get health information for a specific project.
|
||||
|
||||
Returns detailed health metrics including risk level,
|
||||
schedule status, resource status, and task statistics.
|
||||
|
||||
- **project_id**: UUID of the project
|
||||
|
||||
Raises:
|
||||
- **404**: Project not found
|
||||
"""
|
||||
service = HealthService(db)
|
||||
result = service.get_project_health(project_id)
|
||||
|
||||
if not result:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Project not found"
|
||||
)
|
||||
|
||||
return result
|
||||
@@ -1,5 +1,5 @@
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
@@ -91,7 +91,8 @@ async def mark_as_read(
|
||||
|
||||
if not notification.is_read:
|
||||
notification.is_read = True
|
||||
notification.read_at = datetime.utcnow()
|
||||
# Use naive datetime for consistency with database storage
|
||||
notification.read_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
||||
db.commit()
|
||||
db.refresh(notification)
|
||||
|
||||
@@ -104,7 +105,8 @@ async def mark_all_as_read(
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
"""Mark all notifications as read."""
|
||||
now = datetime.utcnow()
|
||||
# Use naive datetime for consistency with database storage
|
||||
now = datetime.now(timezone.utc).replace(tzinfo=None)
|
||||
|
||||
updated_count = db.query(Notification).filter(
|
||||
Notification.user_id == current_user.id,
|
||||
|
||||
@@ -273,9 +273,9 @@ async def delete_project(
|
||||
current_user: User = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
Delete a project (hard delete, cascades to tasks).
|
||||
Delete a project (soft delete - sets is_active to False).
|
||||
"""
|
||||
project = db.query(Project).filter(Project.id == project_id).first()
|
||||
project = db.query(Project).filter(Project.id == project_id, Project.is_active == True).first()
|
||||
|
||||
if not project:
|
||||
raise HTTPException(
|
||||
@@ -289,7 +289,7 @@ async def delete_project(
|
||||
detail="Only project owner can delete",
|
||||
)
|
||||
|
||||
# Audit log before deletion (this is a high-sensitivity event that triggers alert)
|
||||
# Audit log before soft deletion (this is a high-sensitivity event that triggers alert)
|
||||
AuditService.log_event(
|
||||
db=db,
|
||||
event_type="project.delete",
|
||||
@@ -297,11 +297,12 @@ async def delete_project(
|
||||
action=AuditAction.DELETE,
|
||||
user_id=current_user.id,
|
||||
resource_id=project.id,
|
||||
changes=[{"field": "title", "old_value": project.title, "new_value": None}],
|
||||
changes=[{"field": "is_active", "old_value": True, "new_value": False}],
|
||||
request_metadata=get_audit_metadata(request),
|
||||
)
|
||||
|
||||
db.delete(project)
|
||||
# Soft delete - set is_active to False
|
||||
project.is_active = False
|
||||
db.commit()
|
||||
|
||||
return None
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from typing import List, Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Query, Request
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.models import User, Project, Task, TaskStatus, AuditAction
|
||||
from app.models import User, Project, Task, TaskStatus, AuditAction, Blocker
|
||||
from app.schemas.task import (
|
||||
TaskCreate, TaskUpdate, TaskResponse, TaskWithDetails, TaskListResponse,
|
||||
TaskStatusUpdate, TaskAssignUpdate
|
||||
@@ -374,7 +374,8 @@ async def delete_task(
|
||||
detail="Permission denied",
|
||||
)
|
||||
|
||||
now = datetime.utcnow()
|
||||
# Use naive datetime for consistency with database storage
|
||||
now = datetime.now(timezone.utc).replace(tzinfo=None)
|
||||
|
||||
# Soft delete the task
|
||||
task.is_deleted = True
|
||||
@@ -504,11 +505,18 @@ async def update_task_status(
|
||||
|
||||
task.status_id = status_data.status_id
|
||||
|
||||
# Auto-set blocker_flag based on status name
|
||||
# Auto-set blocker_flag based on status name and actual Blocker records
|
||||
if new_status.name.lower() == "blocked":
|
||||
task.blocker_flag = True
|
||||
else:
|
||||
task.blocker_flag = False
|
||||
# Only set blocker_flag = False if there are no unresolved blockers
|
||||
unresolved_blockers = db.query(Blocker).filter(
|
||||
Blocker.task_id == task.id,
|
||||
Blocker.resolved_at == None,
|
||||
).count()
|
||||
if unresolved_blockers == 0:
|
||||
task.blocker_flag = False
|
||||
# If there are unresolved blockers, keep blocker_flag as is
|
||||
|
||||
# Evaluate triggers for status changes
|
||||
if old_status_id != status_data.status_id:
|
||||
|
||||
@@ -10,6 +10,7 @@ from app.schemas.trigger import (
|
||||
TriggerLogResponse, TriggerLogListResponse, TriggerUserInfo
|
||||
)
|
||||
from app.middleware.auth import get_current_user, check_project_access, check_project_edit_access
|
||||
from app.services.trigger_scheduler import TriggerSchedulerService
|
||||
|
||||
router = APIRouter(tags=["triggers"])
|
||||
|
||||
@@ -65,18 +66,50 @@ async def create_trigger(
|
||||
detail="Invalid trigger type. Must be 'field_change' or 'schedule'",
|
||||
)
|
||||
|
||||
# Validate conditions
|
||||
if trigger_data.conditions.field not in ["status_id", "assignee_id", "priority"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid condition field. Must be 'status_id', 'assignee_id', or 'priority'",
|
||||
)
|
||||
# Validate conditions based on trigger type
|
||||
if trigger_data.trigger_type == "field_change":
|
||||
# Validate field_change conditions
|
||||
if not trigger_data.conditions.field:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Field is required for field_change triggers",
|
||||
)
|
||||
if trigger_data.conditions.field not in ["status_id", "assignee_id", "priority"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid condition field. Must be 'status_id', 'assignee_id', or 'priority'",
|
||||
)
|
||||
if not trigger_data.conditions.operator:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Operator is required for field_change triggers",
|
||||
)
|
||||
if trigger_data.conditions.operator not in ["equals", "not_equals", "changed_to", "changed_from"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid operator. Must be 'equals', 'not_equals', 'changed_to', or 'changed_from'",
|
||||
)
|
||||
elif trigger_data.trigger_type == "schedule":
|
||||
# Validate schedule conditions
|
||||
has_cron = trigger_data.conditions.cron_expression is not None
|
||||
has_deadline = trigger_data.conditions.deadline_reminder_days is not None
|
||||
|
||||
if trigger_data.conditions.operator not in ["equals", "not_equals", "changed_to", "changed_from"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid operator. Must be 'equals', 'not_equals', 'changed_to', or 'changed_from'",
|
||||
)
|
||||
if not has_cron and not has_deadline:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Schedule triggers require either cron_expression or deadline_reminder_days",
|
||||
)
|
||||
|
||||
# Validate cron expression if provided
|
||||
if has_cron:
|
||||
is_valid, error_msg = TriggerSchedulerService.parse_cron_expression(
|
||||
trigger_data.conditions.cron_expression
|
||||
)
|
||||
if not is_valid:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=error_msg or "Invalid cron expression",
|
||||
)
|
||||
|
||||
# Create trigger
|
||||
trigger = Trigger(
|
||||
@@ -186,13 +219,25 @@ async def update_trigger(
|
||||
if trigger_data.description is not None:
|
||||
trigger.description = trigger_data.description
|
||||
if trigger_data.conditions is not None:
|
||||
# Validate conditions
|
||||
if trigger_data.conditions.field not in ["status_id", "assignee_id", "priority"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid condition field",
|
||||
)
|
||||
trigger.conditions = trigger_data.conditions.model_dump()
|
||||
# Validate conditions based on trigger type
|
||||
if trigger.trigger_type == "field_change":
|
||||
if trigger_data.conditions.field and trigger_data.conditions.field not in ["status_id", "assignee_id", "priority"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid condition field",
|
||||
)
|
||||
elif trigger.trigger_type == "schedule":
|
||||
# Validate cron expression if provided
|
||||
if trigger_data.conditions.cron_expression is not None:
|
||||
is_valid, error_msg = TriggerSchedulerService.parse_cron_expression(
|
||||
trigger_data.conditions.cron_expression
|
||||
)
|
||||
if not is_valid:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=error_msg or "Invalid cron expression",
|
||||
)
|
||||
trigger.conditions = trigger_data.conditions.model_dump(exclude_none=True)
|
||||
if trigger_data.actions is not None:
|
||||
trigger.actions = [a.model_dump() for a in trigger_data.actions]
|
||||
if trigger_data.is_active is not None:
|
||||
|
||||
@@ -4,10 +4,11 @@ from sqlalchemy import or_
|
||||
from typing import List
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.redis import get_redis
|
||||
from app.models.user import User
|
||||
from app.models.role import Role
|
||||
from app.models import AuditAction
|
||||
from app.schemas.user import UserResponse, UserUpdate
|
||||
from app.schemas.user import UserResponse, UserUpdate, CapacityUpdate
|
||||
from app.middleware.auth import (
|
||||
get_current_user,
|
||||
require_permission,
|
||||
@@ -239,3 +240,86 @@ async def set_admin_status(
|
||||
db.commit()
|
||||
db.refresh(user)
|
||||
return user
|
||||
|
||||
|
||||
@router.put("/{user_id}/capacity", response_model=UserResponse)
|
||||
async def update_user_capacity(
|
||||
user_id: str,
|
||||
capacity: CapacityUpdate,
|
||||
request: Request,
|
||||
db: Session = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user),
|
||||
redis_client=Depends(get_redis),
|
||||
):
|
||||
"""
|
||||
Update user's weekly capacity hours.
|
||||
|
||||
Permission: admin, manager, or the user themselves can update capacity.
|
||||
- Admin/Manager can update any user's capacity
|
||||
- Regular users can only update their own capacity
|
||||
|
||||
Capacity changes are recorded in the audit trail and workload cache is invalidated.
|
||||
"""
|
||||
user = db.query(User).filter(User.id == user_id).first()
|
||||
if not user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="User not found",
|
||||
)
|
||||
|
||||
# Permission check: admin, manager, or the user themselves can update capacity
|
||||
is_self = current_user.id == user_id
|
||||
is_admin = current_user.is_system_admin
|
||||
is_manager = False
|
||||
|
||||
# Check if current user has manager role
|
||||
if current_user.role and current_user.role.name == "manager":
|
||||
is_manager = True
|
||||
|
||||
if not is_self and not is_admin and not is_manager:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Only admin, manager, or the user themselves can update capacity",
|
||||
)
|
||||
|
||||
# Store old capacity for audit log
|
||||
old_capacity = float(user.capacity) if user.capacity else None
|
||||
|
||||
# Update capacity (validation is handled by Pydantic schema)
|
||||
user.capacity = capacity.capacity_hours
|
||||
new_capacity = float(capacity.capacity_hours)
|
||||
|
||||
# Record capacity change in audit trail
|
||||
if old_capacity != new_capacity:
|
||||
AuditService.log_event(
|
||||
db=db,
|
||||
event_type="user.capacity_change",
|
||||
resource_type="user",
|
||||
action=AuditAction.UPDATE,
|
||||
user_id=current_user.id,
|
||||
resource_id=user.id,
|
||||
changes=[{
|
||||
"field": "capacity",
|
||||
"old_value": old_capacity,
|
||||
"new_value": new_capacity
|
||||
}],
|
||||
request_metadata=get_audit_metadata(request),
|
||||
)
|
||||
|
||||
db.commit()
|
||||
db.refresh(user)
|
||||
|
||||
# Invalidate workload cache for this user
|
||||
# Cache keys follow pattern: workload:{user_id}:* or workload:heatmap:*
|
||||
try:
|
||||
# Delete user-specific workload cache
|
||||
for key in redis_client.scan_iter(f"workload:{user_id}:*"):
|
||||
redis_client.delete(key)
|
||||
# Delete heatmap cache (contains all users' workload data)
|
||||
for key in redis_client.scan_iter("workload:heatmap:*"):
|
||||
redis_client.delete(key)
|
||||
except Exception:
|
||||
# Cache invalidation failure should not fail the request
|
||||
pass
|
||||
|
||||
return user
|
||||
|
||||
Reference in New Issue
Block a user