diff --git a/backend/app/api/reports/__init__.py b/backend/app/api/reports/__init__.py new file mode 100644 index 0000000..b263c20 --- /dev/null +++ b/backend/app/api/reports/__init__.py @@ -0,0 +1,3 @@ +from app.api.reports.router import router + +__all__ = ["router"] diff --git a/backend/app/api/reports/router.py b/backend/app/api/reports/router.py new file mode 100644 index 0000000..358954f --- /dev/null +++ b/backend/app/api/reports/router.py @@ -0,0 +1,146 @@ +from fastapi import APIRouter, Depends, HTTPException, status +from sqlalchemy.orm import Session +from typing import Optional + +from app.core.database import get_db +from app.models import User, ReportHistory, ScheduledReport +from app.schemas.report import ( + WeeklyReportContent, ReportHistoryListResponse, ReportHistoryItem, + GenerateReportResponse, ReportSummary +) +from app.middleware.auth import get_current_user +from app.services.report_service import ReportService + +router = APIRouter(tags=["reports"]) + + +@router.get("/api/reports/weekly/preview", response_model=WeeklyReportContent) +async def preview_weekly_report( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +): + """ + Preview the weekly report for the current user. + Shows what would be included in the next weekly report. + """ + content = ReportService.get_weekly_stats(db, current_user.id) + + return WeeklyReportContent( + week_start=content["week_start"], + week_end=content["week_end"], + generated_at=content["generated_at"], + projects=content["projects"], + summary=ReportSummary(**content["summary"]), + ) + + +@router.post("/api/reports/weekly/generate", response_model=GenerateReportResponse) +async def generate_weekly_report( + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +): + """ + Manually trigger weekly report generation for the current user. + """ + # Generate report + report_history = ReportService.generate_weekly_report(db, current_user.id) + + if not report_history: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to generate report", + ) + + # Send notification + ReportService.send_report_notification(db, current_user.id, report_history.content) + db.commit() + + summary = report_history.content.get("summary", {}) + + return GenerateReportResponse( + message="Weekly report generated successfully", + report_id=report_history.id, + summary=ReportSummary( + completed_count=summary.get("completed_count", 0), + in_progress_count=summary.get("in_progress_count", 0), + overdue_count=summary.get("overdue_count", 0), + total_tasks=summary.get("total_tasks", 0), + ), + ) + + +@router.get("/api/reports/history", response_model=ReportHistoryListResponse) +async def list_report_history( + limit: int = 10, + offset: int = 0, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +): + """ + List report history for the current user. + """ + # Get scheduled report for this user + scheduled_report = db.query(ScheduledReport).filter( + ScheduledReport.recipient_id == current_user.id, + ).first() + + if not scheduled_report: + return ReportHistoryListResponse(reports=[], total=0) + + # Get history + total = db.query(ReportHistory).filter( + ReportHistory.report_id == scheduled_report.id, + ).count() + + history = db.query(ReportHistory).filter( + ReportHistory.report_id == scheduled_report.id, + ).order_by(ReportHistory.generated_at.desc()).offset(offset).limit(limit).all() + + return ReportHistoryListResponse( + reports=[ + ReportHistoryItem( + id=h.id, + report_id=h.report_id, + generated_at=h.generated_at, + content=h.content, + status=h.status, + error_message=h.error_message, + ) for h in history + ], + total=total, + ) + + +@router.get("/api/reports/history/{report_id}") +async def get_report_detail( + report_id: str, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +): + """ + Get detailed content of a specific report. + """ + report = db.query(ReportHistory).filter(ReportHistory.id == report_id).first() + + if not report: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Report not found", + ) + + # Check ownership + scheduled_report = report.report + if scheduled_report.recipient_id != current_user.id and not current_user.is_system_admin: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Access denied", + ) + + return ReportHistoryItem( + id=report.id, + report_id=report.report_id, + generated_at=report.generated_at, + content=report.content, + status=report.status, + error_message=report.error_message, + ) diff --git a/backend/app/api/tasks/router.py b/backend/app/api/tasks/router.py index 2a8de4d..6830564 100644 --- a/backend/app/api/tasks/router.py +++ b/backend/app/api/tasks/router.py @@ -14,6 +14,7 @@ from app.middleware.auth import ( ) from app.middleware.audit import get_audit_metadata from app.services.audit_service import AuditService +from app.services.trigger_service import TriggerService router = APIRouter(tags=["tasks"]) @@ -271,7 +272,7 @@ async def update_task( detail="Permission denied", ) - # Capture old values for audit + # Capture old values for audit and triggers old_values = { "title": task.title, "description": task.description, @@ -289,7 +290,7 @@ async def update_task( else: setattr(task, field, value) - # Capture new values for audit + # Capture new values for audit and triggers new_values = { "title": task.title, "description": task.description, @@ -313,6 +314,10 @@ async def update_task( request_metadata=get_audit_metadata(request), ) + # Evaluate triggers for priority changes + if "priority" in update_data: + TriggerService.evaluate_triggers(db, task, old_values, new_values, current_user) + db.commit() db.refresh(task) @@ -397,6 +402,9 @@ async def update_task_status( detail="Status not found in this project", ) + # Capture old status for triggers + old_status_id = task.status_id + task.status_id = status_data.status_id # Auto-set blocker_flag based on status name @@ -405,6 +413,15 @@ async def update_task_status( else: task.blocker_flag = False + # Evaluate triggers for status changes + if old_status_id != status_data.status_id: + TriggerService.evaluate_triggers( + db, task, + {"status_id": old_status_id}, + {"status_id": status_data.status_id}, + current_user + ) + db.commit() db.refresh(task) @@ -460,6 +477,15 @@ async def assign_task( request_metadata=get_audit_metadata(request), ) + # Evaluate triggers for assignee changes + if old_assignee_id != assign_data.assignee_id: + TriggerService.evaluate_triggers( + db, task, + {"assignee_id": old_assignee_id}, + {"assignee_id": assign_data.assignee_id}, + current_user + ) + db.commit() db.refresh(task) diff --git a/backend/app/api/triggers/__init__.py b/backend/app/api/triggers/__init__.py new file mode 100644 index 0000000..c035005 --- /dev/null +++ b/backend/app/api/triggers/__init__.py @@ -0,0 +1,3 @@ +from app.api.triggers.router import router + +__all__ = ["router"] diff --git a/backend/app/api/triggers/router.py b/backend/app/api/triggers/router.py new file mode 100644 index 0000000..97bdd86 --- /dev/null +++ b/backend/app/api/triggers/router.py @@ -0,0 +1,276 @@ +import uuid +from fastapi import APIRouter, Depends, HTTPException, status +from sqlalchemy.orm import Session +from typing import Optional + +from app.core.database import get_db +from app.models import User, Project, Trigger, TriggerLog +from app.schemas.trigger import ( + TriggerCreate, TriggerUpdate, TriggerResponse, TriggerListResponse, + TriggerLogResponse, TriggerLogListResponse, TriggerUserInfo +) +from app.middleware.auth import get_current_user, check_project_access, check_project_edit_access + +router = APIRouter(tags=["triggers"]) + + +def trigger_to_response(trigger: Trigger) -> TriggerResponse: + """Convert Trigger model to TriggerResponse.""" + return TriggerResponse( + id=trigger.id, + project_id=trigger.project_id, + name=trigger.name, + description=trigger.description, + trigger_type=trigger.trigger_type, + conditions=trigger.conditions, + actions=trigger.actions if isinstance(trigger.actions, list) else [trigger.actions], + is_active=trigger.is_active, + created_by=trigger.created_by, + created_at=trigger.created_at, + updated_at=trigger.updated_at, + creator=TriggerUserInfo( + id=trigger.creator.id, + name=trigger.creator.name, + email=trigger.creator.email, + ) if trigger.creator else None, + ) + + +@router.post("/api/projects/{project_id}/triggers", response_model=TriggerResponse, status_code=status.HTTP_201_CREATED) +async def create_trigger( + project_id: str, + trigger_data: TriggerCreate, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +): + """Create a new trigger for a project.""" + project = db.query(Project).filter(Project.id == project_id).first() + + if not project: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Project not found", + ) + + if not check_project_edit_access(current_user, project): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Permission denied - only project owner can manage triggers", + ) + + # Validate trigger type + if trigger_data.trigger_type not in ["field_change", "schedule"]: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Invalid trigger type. Must be 'field_change' or 'schedule'", + ) + + # Validate conditions + if trigger_data.conditions.field not in ["status_id", "assignee_id", "priority"]: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Invalid condition field. Must be 'status_id', 'assignee_id', or 'priority'", + ) + + if trigger_data.conditions.operator not in ["equals", "not_equals", "changed_to", "changed_from"]: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Invalid operator. Must be 'equals', 'not_equals', 'changed_to', or 'changed_from'", + ) + + # Create trigger + trigger = Trigger( + id=str(uuid.uuid4()), + project_id=project_id, + name=trigger_data.name, + description=trigger_data.description, + trigger_type=trigger_data.trigger_type, + conditions=trigger_data.conditions.model_dump(), + actions=[a.model_dump() for a in trigger_data.actions], + is_active=trigger_data.is_active, + created_by=current_user.id, + ) + db.add(trigger) + db.commit() + db.refresh(trigger) + + return trigger_to_response(trigger) + + +@router.get("/api/projects/{project_id}/triggers", response_model=TriggerListResponse) +async def list_triggers( + project_id: str, + is_active: Optional[bool] = None, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +): + """List all triggers for a project.""" + project = db.query(Project).filter(Project.id == project_id).first() + + if not project: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Project not found", + ) + + if not check_project_access(current_user, project): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Access denied", + ) + + query = db.query(Trigger).filter(Trigger.project_id == project_id) + + if is_active is not None: + query = query.filter(Trigger.is_active == is_active) + + triggers = query.order_by(Trigger.created_at.desc()).all() + + return TriggerListResponse( + triggers=[trigger_to_response(t) for t in triggers], + total=len(triggers), + ) + + +@router.get("/api/triggers/{trigger_id}", response_model=TriggerResponse) +async def get_trigger( + trigger_id: str, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +): + """Get a specific trigger by ID.""" + trigger = db.query(Trigger).filter(Trigger.id == trigger_id).first() + + if not trigger: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Trigger not found", + ) + + project = trigger.project + if not check_project_access(current_user, project): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Access denied", + ) + + return trigger_to_response(trigger) + + +@router.put("/api/triggers/{trigger_id}", response_model=TriggerResponse) +async def update_trigger( + trigger_id: str, + trigger_data: TriggerUpdate, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +): + """Update a trigger.""" + trigger = db.query(Trigger).filter(Trigger.id == trigger_id).first() + + if not trigger: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Trigger not found", + ) + + project = trigger.project + if not check_project_edit_access(current_user, project): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Permission denied", + ) + + # Update fields if provided + if trigger_data.name is not None: + trigger.name = trigger_data.name + if trigger_data.description is not None: + trigger.description = trigger_data.description + if trigger_data.conditions is not None: + # Validate conditions + if trigger_data.conditions.field not in ["status_id", "assignee_id", "priority"]: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Invalid condition field", + ) + trigger.conditions = trigger_data.conditions.model_dump() + if trigger_data.actions is not None: + trigger.actions = [a.model_dump() for a in trigger_data.actions] + if trigger_data.is_active is not None: + trigger.is_active = trigger_data.is_active + + db.commit() + db.refresh(trigger) + + return trigger_to_response(trigger) + + +@router.delete("/api/triggers/{trigger_id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_trigger( + trigger_id: str, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +): + """Delete a trigger.""" + trigger = db.query(Trigger).filter(Trigger.id == trigger_id).first() + + if not trigger: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Trigger not found", + ) + + project = trigger.project + if not check_project_edit_access(current_user, project): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Permission denied", + ) + + db.delete(trigger) + db.commit() + + +@router.get("/api/triggers/{trigger_id}/logs", response_model=TriggerLogListResponse) +async def list_trigger_logs( + trigger_id: str, + limit: int = 50, + offset: int = 0, + db: Session = Depends(get_db), + current_user: User = Depends(get_current_user), +): + """Get execution logs for a trigger.""" + trigger = db.query(Trigger).filter(Trigger.id == trigger_id).first() + + if not trigger: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Trigger not found", + ) + + project = trigger.project + if not check_project_access(current_user, project): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Access denied", + ) + + total = db.query(TriggerLog).filter(TriggerLog.trigger_id == trigger_id).count() + + logs = db.query(TriggerLog).filter( + TriggerLog.trigger_id == trigger_id, + ).order_by(TriggerLog.executed_at.desc()).offset(offset).limit(limit).all() + + return TriggerLogListResponse( + logs=[ + TriggerLogResponse( + id=log.id, + trigger_id=log.trigger_id, + task_id=log.task_id, + executed_at=log.executed_at, + status=log.status, + details=log.details, + error_message=log.error_message, + ) for log in logs + ], + total=total, + ) diff --git a/backend/app/core/scheduler.py b/backend/app/core/scheduler.py new file mode 100644 index 0000000..d282ae6 --- /dev/null +++ b/backend/app/core/scheduler.py @@ -0,0 +1,53 @@ +import logging +from apscheduler.schedulers.asyncio import AsyncIOScheduler +from apscheduler.triggers.cron import CronTrigger + +from app.core.database import SessionLocal +from app.services.report_service import ReportService + +logger = logging.getLogger(__name__) + +scheduler = AsyncIOScheduler() + + +async def weekly_report_job(): + """Job function to generate weekly reports.""" + logger.info("Starting weekly report generation...") + + db = SessionLocal() + try: + generated_for = await ReportService.generate_all_weekly_reports(db) + logger.info(f"Weekly reports generated for {len(generated_for)} users") + except Exception as e: + logger.error(f"Error generating weekly reports: {e}") + finally: + db.close() + + +def init_scheduler(): + """Initialize the scheduler with jobs.""" + # Weekly report - Every Friday at 16:00 + scheduler.add_job( + weekly_report_job, + CronTrigger(day_of_week='fri', hour=16, minute=0), + id='weekly_report', + name='Generate Weekly Reports', + replace_existing=True, + ) + + logger.info("Scheduler initialized with weekly report job (Friday 16:00)") + + +def start_scheduler(): + """Start the scheduler.""" + if not scheduler.running: + init_scheduler() + scheduler.start() + logger.info("Scheduler started") + + +def shutdown_scheduler(): + """Shutdown the scheduler gracefully.""" + if scheduler.running: + scheduler.shutdown(wait=False) + logger.info("Scheduler shutdown") diff --git a/backend/app/main.py b/backend/app/main.py index 9b71af9..a2a5d29 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -1,7 +1,19 @@ +from contextlib import asynccontextmanager from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from app.middleware.audit import AuditMiddleware +from app.core.scheduler import start_scheduler, shutdown_scheduler + + +@asynccontextmanager +async def lifespan(app: FastAPI): + """Manage application lifespan events.""" + # Startup + start_scheduler() + yield + # Shutdown + shutdown_scheduler() from app.api.auth import router as auth_router from app.api.users import router as users_router from app.api.departments import router as departments_router @@ -15,12 +27,15 @@ from app.api.blockers import router as blockers_router from app.api.websocket import router as websocket_router from app.api.audit import router as audit_router from app.api.attachments import router as attachments_router +from app.api.triggers import router as triggers_router +from app.api.reports import router as reports_router from app.core.config import settings app = FastAPI( title="Project Control API", description="Cross-departmental project management system API", version="0.1.0", + lifespan=lifespan, ) # CORS middleware @@ -49,6 +64,8 @@ app.include_router(blockers_router) app.include_router(websocket_router) app.include_router(audit_router) app.include_router(attachments_router) +app.include_router(triggers_router) +app.include_router(reports_router) @app.get("/health") diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py index 51e19c6..c9790cb 100644 --- a/backend/app/models/__init__.py +++ b/backend/app/models/__init__.py @@ -14,10 +14,16 @@ from app.models.audit_log import AuditLog, AuditAction, SensitivityLevel, EVENT_ from app.models.audit_alert import AuditAlert from app.models.attachment import Attachment from app.models.attachment_version import AttachmentVersion +from app.models.trigger import Trigger, TriggerType +from app.models.trigger_log import TriggerLog, TriggerLogStatus +from app.models.scheduled_report import ScheduledReport, ReportType +from app.models.report_history import ReportHistory, ReportHistoryStatus __all__ = [ "User", "Role", "Department", "Space", "Project", "TaskStatus", "Task", "WorkloadSnapshot", "Comment", "Mention", "Notification", "Blocker", "AuditLog", "AuditAlert", "AuditAction", "SensitivityLevel", "EVENT_SENSITIVITY", "ALERT_EVENTS", - "Attachment", "AttachmentVersion" + "Attachment", "AttachmentVersion", + "Trigger", "TriggerType", "TriggerLog", "TriggerLogStatus", + "ScheduledReport", "ReportType", "ReportHistory", "ReportHistoryStatus" ] diff --git a/backend/app/models/project.py b/backend/app/models/project.py index ab7e7b7..3a8d146 100644 --- a/backend/app/models/project.py +++ b/backend/app/models/project.py @@ -38,3 +38,4 @@ class Project(Base): department = relationship("Department", back_populates="projects") task_statuses = relationship("TaskStatus", back_populates="project", cascade="all, delete-orphan") tasks = relationship("Task", back_populates="project", cascade="all, delete-orphan") + triggers = relationship("Trigger", back_populates="project", cascade="all, delete-orphan") diff --git a/backend/app/models/report_history.py b/backend/app/models/report_history.py new file mode 100644 index 0000000..c966fd5 --- /dev/null +++ b/backend/app/models/report_history.py @@ -0,0 +1,28 @@ +import uuid +import enum +from sqlalchemy import Column, String, Text, DateTime, ForeignKey, Enum, JSON +from sqlalchemy.sql import func +from sqlalchemy.orm import relationship +from app.core.database import Base + + +class ReportHistoryStatus(str, enum.Enum): + SENT = "sent" + FAILED = "failed" + + +class ReportHistory(Base): + __tablename__ = "pjctrl_report_history" + + id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + report_id = Column(String(36), ForeignKey("pjctrl_scheduled_reports.id", ondelete="CASCADE"), nullable=False) + generated_at = Column(DateTime, server_default=func.now(), nullable=False) + content = Column(JSON, nullable=False) + status = Column( + Enum("sent", "failed", name="report_history_status_enum"), + nullable=False + ) + error_message = Column(Text, nullable=True) + + # Relationships + report = relationship("ScheduledReport", back_populates="history") diff --git a/backend/app/models/scheduled_report.py b/backend/app/models/scheduled_report.py new file mode 100644 index 0000000..172cfc7 --- /dev/null +++ b/backend/app/models/scheduled_report.py @@ -0,0 +1,28 @@ +import uuid +import enum +from sqlalchemy import Column, String, Boolean, DateTime, ForeignKey, Enum +from sqlalchemy.sql import func +from sqlalchemy.orm import relationship +from app.core.database import Base + + +class ReportType(str, enum.Enum): + WEEKLY = "weekly" + + +class ScheduledReport(Base): + __tablename__ = "pjctrl_scheduled_reports" + + id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + report_type = Column( + Enum("weekly", name="report_type_enum"), + nullable=False + ) + recipient_id = Column(String(36), ForeignKey("pjctrl_users.id", ondelete="CASCADE"), nullable=False) + is_active = Column(Boolean, default=True, nullable=False) + last_sent_at = Column(DateTime, nullable=True) + created_at = Column(DateTime, server_default=func.now(), nullable=False) + + # Relationships + recipient = relationship("User", back_populates="scheduled_reports") + history = relationship("ReportHistory", back_populates="report", cascade="all, delete-orphan") diff --git a/backend/app/models/task.py b/backend/app/models/task.py index bbd7b2a..108f9dd 100644 --- a/backend/app/models/task.py +++ b/backend/app/models/task.py @@ -48,3 +48,4 @@ class Task(Base): comments = relationship("Comment", back_populates="task", cascade="all, delete-orphan") blockers = relationship("Blocker", back_populates="task", cascade="all, delete-orphan") attachments = relationship("Attachment", back_populates="task", cascade="all, delete-orphan") + trigger_logs = relationship("TriggerLog", back_populates="task") diff --git a/backend/app/models/trigger.py b/backend/app/models/trigger.py new file mode 100644 index 0000000..f04ba8b --- /dev/null +++ b/backend/app/models/trigger.py @@ -0,0 +1,35 @@ +import uuid +import enum +from sqlalchemy import Column, String, Text, Boolean, DateTime, ForeignKey, Enum, JSON +from sqlalchemy.sql import func +from sqlalchemy.orm import relationship +from app.core.database import Base + + +class TriggerType(str, enum.Enum): + FIELD_CHANGE = "field_change" + SCHEDULE = "schedule" + + +class Trigger(Base): + __tablename__ = "pjctrl_triggers" + + id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + project_id = Column(String(36), ForeignKey("pjctrl_projects.id", ondelete="CASCADE"), nullable=False) + name = Column(String(200), nullable=False) + description = Column(Text, nullable=True) + trigger_type = Column( + Enum("field_change", "schedule", name="trigger_type_enum"), + nullable=False + ) + conditions = Column(JSON, nullable=False) + actions = Column(JSON, nullable=False) + is_active = Column(Boolean, default=True, nullable=False) + created_by = Column(String(36), ForeignKey("pjctrl_users.id", ondelete="SET NULL"), nullable=True) + created_at = Column(DateTime, server_default=func.now(), nullable=False) + updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now(), nullable=False) + + # Relationships + project = relationship("Project", back_populates="triggers") + creator = relationship("User", back_populates="created_triggers") + logs = relationship("TriggerLog", back_populates="trigger", cascade="all, delete-orphan") diff --git a/backend/app/models/trigger_log.py b/backend/app/models/trigger_log.py new file mode 100644 index 0000000..8df27b6 --- /dev/null +++ b/backend/app/models/trigger_log.py @@ -0,0 +1,30 @@ +import uuid +import enum +from sqlalchemy import Column, String, Text, DateTime, ForeignKey, Enum, JSON +from sqlalchemy.sql import func +from sqlalchemy.orm import relationship +from app.core.database import Base + + +class TriggerLogStatus(str, enum.Enum): + SUCCESS = "success" + FAILED = "failed" + + +class TriggerLog(Base): + __tablename__ = "pjctrl_trigger_logs" + + id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4())) + trigger_id = Column(String(36), ForeignKey("pjctrl_triggers.id", ondelete="CASCADE"), nullable=False) + task_id = Column(String(36), ForeignKey("pjctrl_tasks.id", ondelete="SET NULL"), nullable=True) + executed_at = Column(DateTime, server_default=func.now(), nullable=False) + status = Column( + Enum("success", "failed", name="trigger_log_status_enum"), + nullable=False + ) + details = Column(JSON, nullable=True) + error_message = Column(Text, nullable=True) + + # Relationships + trigger = relationship("Trigger", back_populates="logs") + task = relationship("Task", back_populates="trigger_logs") diff --git a/backend/app/models/user.py b/backend/app/models/user.py index 29ebeaf..1052f9f 100644 --- a/backend/app/models/user.py +++ b/backend/app/models/user.py @@ -36,3 +36,7 @@ class User(Base): notifications = relationship("Notification", back_populates="user", cascade="all, delete-orphan") reported_blockers = relationship("Blocker", foreign_keys="Blocker.reported_by", back_populates="reporter") resolved_blockers = relationship("Blocker", foreign_keys="Blocker.resolved_by", back_populates="resolver") + + # Automation relationships + created_triggers = relationship("Trigger", back_populates="creator") + scheduled_reports = relationship("ScheduledReport", back_populates="recipient", cascade="all, delete-orphan") diff --git a/backend/app/schemas/report.py b/backend/app/schemas/report.py new file mode 100644 index 0000000..d93f015 --- /dev/null +++ b/backend/app/schemas/report.py @@ -0,0 +1,50 @@ +from datetime import datetime +from typing import Optional, List, Dict, Any +from pydantic import BaseModel + + +class ProjectSummary(BaseModel): + project_id: str + project_title: str + completed_count: int + in_progress_count: int + overdue_count: int + total_tasks: int + + +class ReportSummary(BaseModel): + completed_count: int + in_progress_count: int + overdue_count: int + total_tasks: int + + +class WeeklyReportContent(BaseModel): + week_start: str + week_end: str + generated_at: str + projects: List[Dict[str, Any]] + summary: ReportSummary + + +class ReportHistoryItem(BaseModel): + id: str + report_id: str + generated_at: datetime + content: Dict[str, Any] + status: str + error_message: Optional[str] = None + + class Config: + from_attributes = True + + +class ReportHistoryListResponse(BaseModel): + reports: List[ReportHistoryItem] + total: int + + +class GenerateReportResponse(BaseModel): + message: str + report_id: str + summary: ReportSummary diff --git a/backend/app/schemas/trigger.py b/backend/app/schemas/trigger.py new file mode 100644 index 0000000..ae62184 --- /dev/null +++ b/backend/app/schemas/trigger.py @@ -0,0 +1,82 @@ +from datetime import datetime +from typing import Optional, List, Dict, Any +from pydantic import BaseModel, Field + + +class TriggerCondition(BaseModel): + field: str = Field(..., description="Field to check: status_id, assignee_id, priority") + operator: str = Field(..., description="Operator: equals, not_equals, changed_to, changed_from") + value: str = Field(..., description="Value to compare against") + + +class TriggerAction(BaseModel): + type: str = Field(default="notify", description="Action type: notify") + target: str = Field(default="assignee", description="Target: assignee, creator, project_owner, user:") + template: Optional[str] = Field(None, description="Message template with variables") + + +class TriggerCreate(BaseModel): + name: str = Field(..., min_length=1, max_length=200) + description: Optional[str] = Field(None, max_length=2000) + trigger_type: str = Field(default="field_change") + conditions: TriggerCondition + actions: List[TriggerAction] + is_active: bool = Field(default=True) + + +class TriggerUpdate(BaseModel): + name: Optional[str] = Field(None, min_length=1, max_length=200) + description: Optional[str] = Field(None, max_length=2000) + conditions: Optional[TriggerCondition] = None + actions: Optional[List[TriggerAction]] = None + is_active: Optional[bool] = None + + +class TriggerUserInfo(BaseModel): + id: str + name: str + email: str + + class Config: + from_attributes = True + + +class TriggerResponse(BaseModel): + id: str + project_id: str + name: str + description: Optional[str] + trigger_type: str + conditions: Dict[str, Any] + actions: List[Dict[str, Any]] + is_active: bool + created_by: Optional[str] + created_at: datetime + updated_at: datetime + creator: Optional[TriggerUserInfo] = None + + class Config: + from_attributes = True + + +class TriggerListResponse(BaseModel): + triggers: List[TriggerResponse] + total: int + + +class TriggerLogResponse(BaseModel): + id: str + trigger_id: str + task_id: Optional[str] + executed_at: datetime + status: str + details: Optional[Dict[str, Any]] + error_message: Optional[str] + + class Config: + from_attributes = True + + +class TriggerLogListResponse(BaseModel): + logs: List[TriggerLogResponse] + total: int diff --git a/backend/app/services/report_service.py b/backend/app/services/report_service.py new file mode 100644 index 0000000..68277bd --- /dev/null +++ b/backend/app/services/report_service.py @@ -0,0 +1,228 @@ +import uuid +from datetime import datetime, timedelta +from typing import Dict, Any, List, Optional +from sqlalchemy.orm import Session +from sqlalchemy import func + +from app.models import ( + User, Task, Project, ScheduledReport, ReportHistory +) +from app.services.notification_service import NotificationService + + +class ReportService: + """Service for generating and managing scheduled reports.""" + + @staticmethod + def get_week_start(date: Optional[datetime] = None) -> datetime: + """Get the start of the week (Monday) for a given date.""" + if date is None: + date = datetime.utcnow() + # Get Monday of the current week + days_since_monday = date.weekday() + week_start = date - timedelta(days=days_since_monday) + return week_start.replace(hour=0, minute=0, second=0, microsecond=0) + + @staticmethod + def get_weekly_stats(db: Session, user_id: str, week_start: Optional[datetime] = None) -> Dict[str, Any]: + """ + Get weekly task statistics for a user's projects. + + Returns stats for all projects where the user is the owner. + """ + if week_start is None: + week_start = ReportService.get_week_start() + + week_end = week_start + timedelta(days=7) + + # Get projects owned by the user + projects = db.query(Project).filter(Project.owner_id == user_id).all() + + if not projects: + return { + "week_start": week_start.isoformat(), + "week_end": week_end.isoformat(), + "projects": [], + "summary": { + "completed_count": 0, + "in_progress_count": 0, + "overdue_count": 0, + "total_tasks": 0, + } + } + + project_ids = [p.id for p in projects] + + # Get all tasks for these projects + all_tasks = db.query(Task).filter(Task.project_id.in_(project_ids)).all() + + # Categorize tasks + completed_tasks = [] + in_progress_tasks = [] + overdue_tasks = [] + + now = datetime.utcnow() + + for task in all_tasks: + status_name = task.status.name.lower() if task.status else "" + + # Check if completed (updated this week) + if status_name in ["done", "completed", "完成"]: + if task.updated_at and task.updated_at >= week_start: + completed_tasks.append(task) + # Check if in progress + elif status_name in ["in progress", "進行中", "doing"]: + in_progress_tasks.append(task) + + # Check if overdue + if task.due_date and task.due_date < now and status_name not in ["done", "completed", "完成"]: + overdue_tasks.append(task) + + # Build project details + project_details = [] + for project in projects: + project_tasks = [t for t in all_tasks if t.project_id == project.id] + project_completed = [t for t in completed_tasks if t.project_id == project.id] + project_in_progress = [t for t in in_progress_tasks if t.project_id == project.id] + project_overdue = [t for t in overdue_tasks if t.project_id == project.id] + + project_details.append({ + "project_id": project.id, + "project_title": project.title, + "completed_count": len(project_completed), + "in_progress_count": len(project_in_progress), + "overdue_count": len(project_overdue), + "total_tasks": len(project_tasks), + "completed_tasks": [{"id": t.id, "title": t.title} for t in project_completed[:5]], + "overdue_tasks": [{"id": t.id, "title": t.title, "due_date": t.due_date.isoformat() if t.due_date else None} for t in project_overdue[:5]], + }) + + return { + "week_start": week_start.isoformat(), + "week_end": week_end.isoformat(), + "generated_at": datetime.utcnow().isoformat(), + "projects": project_details, + "summary": { + "completed_count": len(completed_tasks), + "in_progress_count": len(in_progress_tasks), + "overdue_count": len(overdue_tasks), + "total_tasks": len(all_tasks), + } + } + + @staticmethod + def generate_weekly_report(db: Session, user_id: str) -> Optional[ReportHistory]: + """ + Generate a weekly report for a user and save to history. + """ + # Get or create scheduled report for this user + scheduled_report = db.query(ScheduledReport).filter( + ScheduledReport.recipient_id == user_id, + ScheduledReport.report_type == "weekly", + ).first() + + if not scheduled_report: + scheduled_report = ScheduledReport( + id=str(uuid.uuid4()), + report_type="weekly", + recipient_id=user_id, + is_active=True, + ) + db.add(scheduled_report) + db.flush() + + # Generate report content + content = ReportService.get_weekly_stats(db, user_id) + + # Save to history + report_history = ReportHistory( + id=str(uuid.uuid4()), + report_id=scheduled_report.id, + content=content, + status="sent", + ) + db.add(report_history) + + # Update last_sent_at + scheduled_report.last_sent_at = datetime.utcnow() + + db.commit() + + return report_history + + @staticmethod + def send_report_notification( + db: Session, + user_id: str, + report_content: Dict[str, Any], + ) -> None: + """Send a notification with the weekly report summary.""" + summary = report_content.get("summary", {}) + completed = summary.get("completed_count", 0) + in_progress = summary.get("in_progress_count", 0) + overdue = summary.get("overdue_count", 0) + + message = f"本週完成 {completed} 項任務,進行中 {in_progress} 項" + if overdue > 0: + message += f",逾期 {overdue} 項需關注" + + NotificationService.create_notification( + db=db, + user_id=user_id, + notification_type="status_change", + reference_type="report", + reference_id="weekly", + title="週報:專案進度彙整", + message=message, + ) + + @staticmethod + async def generate_all_weekly_reports(db: Session) -> List[str]: + """ + Generate weekly reports for all active subscriptions. + Called by the scheduler on Friday 16:00. + """ + generated_for = [] + + # Get all active scheduled reports + active_reports = db.query(ScheduledReport).filter( + ScheduledReport.is_active == True, + ScheduledReport.report_type == "weekly", + ).all() + + for scheduled_report in active_reports: + try: + # Generate report + content = ReportService.get_weekly_stats(db, scheduled_report.recipient_id) + + # Save history + history = ReportHistory( + id=str(uuid.uuid4()), + report_id=scheduled_report.id, + content=content, + status="sent", + ) + db.add(history) + + # Update last_sent_at + scheduled_report.last_sent_at = datetime.utcnow() + + # Send notification + ReportService.send_report_notification(db, scheduled_report.recipient_id, content) + + generated_for.append(scheduled_report.recipient_id) + + except Exception as e: + # Log failure + history = ReportHistory( + id=str(uuid.uuid4()), + report_id=scheduled_report.id, + content={}, + status="failed", + error_message=str(e), + ) + db.add(history) + + db.commit() + + return generated_for diff --git a/backend/app/services/trigger_service.py b/backend/app/services/trigger_service.py new file mode 100644 index 0000000..88de663 --- /dev/null +++ b/backend/app/services/trigger_service.py @@ -0,0 +1,200 @@ +import uuid +from typing import List, Dict, Any, Optional +from sqlalchemy.orm import Session + +from app.models import Trigger, TriggerLog, Task, User, Project +from app.services.notification_service import NotificationService + + +class TriggerService: + """Service for evaluating and executing triggers.""" + + SUPPORTED_FIELDS = ["status_id", "assignee_id", "priority"] + SUPPORTED_OPERATORS = ["equals", "not_equals", "changed_to", "changed_from"] + + @staticmethod + def evaluate_triggers( + db: Session, + task: Task, + old_values: Dict[str, Any], + new_values: Dict[str, Any], + current_user: User, + ) -> List[TriggerLog]: + """Evaluate all active triggers for a project when task values change.""" + logs = [] + + # Get active field_change triggers for the project + triggers = db.query(Trigger).filter( + Trigger.project_id == task.project_id, + Trigger.is_active == True, + Trigger.trigger_type == "field_change", + ).all() + + for trigger in triggers: + if TriggerService._check_conditions(trigger.conditions, old_values, new_values): + log = TriggerService._execute_actions(db, trigger, task, current_user, old_values, new_values) + logs.append(log) + + return logs + + @staticmethod + def _check_conditions( + conditions: Dict[str, Any], + old_values: Dict[str, Any], + new_values: Dict[str, Any], + ) -> bool: + """Check if trigger conditions are met.""" + field = conditions.get("field") + operator = conditions.get("operator") + value = conditions.get("value") + + if field not in TriggerService.SUPPORTED_FIELDS: + return False + + old_value = old_values.get(field) + new_value = new_values.get(field) + + if operator == "equals": + return new_value == value + elif operator == "not_equals": + return new_value != value + elif operator == "changed_to": + return old_value != value and new_value == value + elif operator == "changed_from": + return old_value == value and new_value != value + + return False + + @staticmethod + def _execute_actions( + db: Session, + trigger: Trigger, + task: Task, + current_user: User, + old_values: Dict[str, Any], + new_values: Dict[str, Any], + ) -> TriggerLog: + """Execute trigger actions and log the result.""" + actions = trigger.actions if isinstance(trigger.actions, list) else [trigger.actions] + executed_actions = [] + error_message = None + + try: + for action in actions: + action_type = action.get("type") + if action_type == "notify": + TriggerService._execute_notify_action(db, action, task, current_user, old_values, new_values) + executed_actions.append({"type": action_type, "status": "success"}) + + status = "success" + except Exception as e: + status = "failed" + error_message = str(e) + executed_actions.append({"type": "error", "message": str(e)}) + + log = TriggerLog( + id=str(uuid.uuid4()), + trigger_id=trigger.id, + task_id=task.id, + status=status, + details={ + "trigger_name": trigger.name, + "old_values": old_values, + "new_values": new_values, + "actions_executed": executed_actions, + }, + error_message=error_message, + ) + db.add(log) + + return log + + @staticmethod + def _execute_notify_action( + db: Session, + action: Dict[str, Any], + task: Task, + current_user: User, + old_values: Dict[str, Any], + new_values: Dict[str, Any], + ) -> None: + """Execute a notify action.""" + target = action.get("target", "assignee") + template = action.get("template", "任務 {task_title} 已觸發自動化規則") + + # Resolve target user + target_user_id = TriggerService._resolve_target(task, target) + if not target_user_id: + return + + # Don't notify the user who triggered the action + if target_user_id == current_user.id: + return + + # Format message with variables + message = TriggerService._format_template(template, task, old_values, new_values) + + NotificationService.create_notification( + db=db, + user_id=target_user_id, + notification_type="status_change", + reference_type="task", + reference_id=task.id, + title=f"自動化通知: {task.title}", + message=message, + ) + + @staticmethod + def _resolve_target(task: Task, target: str) -> Optional[str]: + """Resolve notification target to user ID.""" + if target == "assignee": + return task.assignee_id + elif target == "creator": + return task.created_by + elif target == "project_owner": + return task.project.owner_id if task.project else None + elif target.startswith("user:"): + return target.split(":", 1)[1] + return None + + @staticmethod + def _format_template( + template: str, + task: Task, + old_values: Dict[str, Any], + new_values: Dict[str, Any], + ) -> str: + """Format message template with task variables.""" + replacements = { + "{task_title}": task.title, + "{task_id}": task.id, + "{old_value}": str(old_values.get("status_id", old_values.get("assignee_id", old_values.get("priority", "")))), + "{new_value}": str(new_values.get("status_id", new_values.get("assignee_id", new_values.get("priority", "")))), + } + + result = template + for key, value in replacements.items(): + result = result.replace(key, value) + + return result + + @staticmethod + def log_execution( + db: Session, + trigger: Trigger, + task: Optional[Task], + status: str, + details: Optional[Dict[str, Any]] = None, + error_message: Optional[str] = None, + ) -> TriggerLog: + """Log a trigger execution.""" + log = TriggerLog( + id=str(uuid.uuid4()), + trigger_id=trigger.id, + task_id=task.id if task else None, + status=status, + details=details, + error_message=error_message, + ) + db.add(log) + return log diff --git a/backend/migrations/versions/007_automation_tables.py b/backend/migrations/versions/007_automation_tables.py new file mode 100644 index 0000000..e3cb6d6 --- /dev/null +++ b/backend/migrations/versions/007_automation_tables.py @@ -0,0 +1,96 @@ +"""Create automation tables + +Revision ID: 007 +Revises: 006 +Create Date: 2024-12-29 + +""" +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = '007' +down_revision = '006' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # Create triggers table + op.create_table( + 'pjctrl_triggers', + sa.Column('id', sa.String(36), primary_key=True), + sa.Column('project_id', sa.String(36), sa.ForeignKey('pjctrl_projects.id', ondelete='CASCADE'), nullable=False), + sa.Column('name', sa.String(200), nullable=False), + sa.Column('description', sa.Text, nullable=True), + sa.Column('trigger_type', sa.Enum('field_change', 'schedule', name='trigger_type_enum'), nullable=False), + sa.Column('conditions', sa.JSON, nullable=False), + sa.Column('actions', sa.JSON, nullable=False), + sa.Column('is_active', sa.Boolean, server_default='1', nullable=False), + sa.Column('created_by', sa.String(36), sa.ForeignKey('pjctrl_users.id', ondelete='SET NULL'), nullable=True), + sa.Column('created_at', sa.DateTime, server_default=sa.func.now(), nullable=False), + sa.Column('updated_at', sa.DateTime, server_default=sa.func.now(), nullable=False), + ) + + # Create index for triggers + op.create_index('idx_trigger_project', 'pjctrl_triggers', ['project_id', 'is_active']) + + # Create trigger_logs table + op.create_table( + 'pjctrl_trigger_logs', + sa.Column('id', sa.String(36), primary_key=True), + sa.Column('trigger_id', sa.String(36), sa.ForeignKey('pjctrl_triggers.id', ondelete='CASCADE'), nullable=False), + sa.Column('task_id', sa.String(36), sa.ForeignKey('pjctrl_tasks.id', ondelete='SET NULL'), nullable=True), + sa.Column('executed_at', sa.DateTime, server_default=sa.func.now(), nullable=False), + sa.Column('status', sa.Enum('success', 'failed', name='trigger_log_status_enum'), nullable=False), + sa.Column('details', sa.JSON, nullable=True), + sa.Column('error_message', sa.Text, nullable=True), + ) + + # Create indexes for trigger_logs + op.create_index('idx_trigger_log_trigger', 'pjctrl_trigger_logs', ['trigger_id', 'executed_at']) + op.create_index('idx_trigger_log_task', 'pjctrl_trigger_logs', ['task_id']) + + # Create scheduled_reports table + op.create_table( + 'pjctrl_scheduled_reports', + sa.Column('id', sa.String(36), primary_key=True), + sa.Column('report_type', sa.Enum('weekly', name='report_type_enum'), nullable=False), + sa.Column('recipient_id', sa.String(36), sa.ForeignKey('pjctrl_users.id', ondelete='CASCADE'), nullable=False), + sa.Column('is_active', sa.Boolean, server_default='1', nullable=False), + sa.Column('last_sent_at', sa.DateTime, nullable=True), + sa.Column('created_at', sa.DateTime, server_default=sa.func.now(), nullable=False), + ) + + # Create index for scheduled_reports + op.create_index('idx_scheduled_report_recipient', 'pjctrl_scheduled_reports', ['recipient_id', 'is_active']) + + # Create report_history table + op.create_table( + 'pjctrl_report_history', + sa.Column('id', sa.String(36), primary_key=True), + sa.Column('report_id', sa.String(36), sa.ForeignKey('pjctrl_scheduled_reports.id', ondelete='CASCADE'), nullable=False), + sa.Column('generated_at', sa.DateTime, server_default=sa.func.now(), nullable=False), + sa.Column('content', sa.JSON, nullable=False), + sa.Column('status', sa.Enum('sent', 'failed', name='report_history_status_enum'), nullable=False), + sa.Column('error_message', sa.Text, nullable=True), + ) + + # Create index for report_history + op.create_index('idx_report_history', 'pjctrl_report_history', ['report_id', 'generated_at']) + + +def downgrade() -> None: + op.drop_index('idx_report_history', table_name='pjctrl_report_history') + op.drop_table('pjctrl_report_history') + op.drop_index('idx_scheduled_report_recipient', table_name='pjctrl_scheduled_reports') + op.drop_table('pjctrl_scheduled_reports') + op.drop_index('idx_trigger_log_task', table_name='pjctrl_trigger_logs') + op.drop_index('idx_trigger_log_trigger', table_name='pjctrl_trigger_logs') + op.drop_table('pjctrl_trigger_logs') + op.drop_index('idx_trigger_project', table_name='pjctrl_triggers') + op.drop_table('pjctrl_triggers') + op.execute("DROP TYPE IF EXISTS trigger_type_enum") + op.execute("DROP TYPE IF EXISTS trigger_log_status_enum") + op.execute("DROP TYPE IF EXISTS report_type_enum") + op.execute("DROP TYPE IF EXISTS report_history_status_enum") diff --git a/backend/tests/test_reports.py b/backend/tests/test_reports.py new file mode 100644 index 0000000..2264336 --- /dev/null +++ b/backend/tests/test_reports.py @@ -0,0 +1,260 @@ +import pytest +import uuid +from datetime import datetime, timedelta +from app.models import User, Space, Project, Task, TaskStatus, ScheduledReport, ReportHistory +from app.services.report_service import ReportService + + +@pytest.fixture +def test_user(db): + """Create a test user.""" + user = User( + id=str(uuid.uuid4()), + email="reportuser@example.com", + name="Report User", + role_id="00000000-0000-0000-0000-000000000003", + is_active=True, + is_system_admin=False, + ) + db.add(user) + db.commit() + return user + + +@pytest.fixture +def test_user_token(client, mock_redis, test_user): + """Get a token for test user.""" + from app.core.security import create_access_token, create_token_payload + + token_data = create_token_payload( + user_id=test_user.id, + email=test_user.email, + role="engineer", + department_id=None, + is_system_admin=False, + ) + token = create_access_token(token_data) + mock_redis.setex(f"session:{test_user.id}", 900, token) + return token + + +@pytest.fixture +def test_space(db, test_user): + """Create a test space.""" + space = Space( + id=str(uuid.uuid4()), + name="Report Test Space", + description="Test space for reports", + owner_id=test_user.id, + ) + db.add(space) + db.commit() + return space + + +@pytest.fixture +def test_project(db, test_space, test_user): + """Create a test project.""" + project = Project( + id=str(uuid.uuid4()), + space_id=test_space.id, + title="Report Test Project", + description="Test project for reports", + owner_id=test_user.id, + ) + db.add(project) + db.commit() + return project + + +@pytest.fixture +def test_statuses(db, test_project): + """Create test task statuses.""" + todo = TaskStatus( + id=str(uuid.uuid4()), + project_id=test_project.id, + name="To Do", + color="#808080", + position=0, + ) + in_progress = TaskStatus( + id=str(uuid.uuid4()), + project_id=test_project.id, + name="In Progress", + color="#0000FF", + position=1, + ) + done = TaskStatus( + id=str(uuid.uuid4()), + project_id=test_project.id, + name="Done", + color="#00FF00", + position=2, + ) + db.add_all([todo, in_progress, done]) + db.commit() + return {"todo": todo, "in_progress": in_progress, "done": done} + + +@pytest.fixture +def test_tasks(db, test_project, test_user, test_statuses): + """Create test tasks with various statuses.""" + tasks = [] + + # Completed task (updated this week) + completed_task = Task( + id=str(uuid.uuid4()), + project_id=test_project.id, + title="Completed Task", + status_id=test_statuses["done"].id, + created_by=test_user.id, + ) + completed_task.updated_at = datetime.utcnow() + tasks.append(completed_task) + + # In progress task + in_progress_task = Task( + id=str(uuid.uuid4()), + project_id=test_project.id, + title="In Progress Task", + status_id=test_statuses["in_progress"].id, + created_by=test_user.id, + ) + tasks.append(in_progress_task) + + # Overdue task + overdue_task = Task( + id=str(uuid.uuid4()), + project_id=test_project.id, + title="Overdue Task", + status_id=test_statuses["todo"].id, + due_date=datetime.utcnow() - timedelta(days=3), + created_by=test_user.id, + ) + tasks.append(overdue_task) + + db.add_all(tasks) + db.commit() + return tasks + + +class TestReportService: + """Tests for ReportService.""" + + def test_get_week_start(self): + """Test week start calculation.""" + # Test with a known Wednesday + wednesday = datetime(2024, 12, 25, 15, 30, 0) # Wednesday + week_start = ReportService.get_week_start(wednesday) + + assert week_start.weekday() == 0 # Monday + assert week_start.hour == 0 + assert week_start.minute == 0 + + def test_get_weekly_stats_empty(self, db, test_user): + """Test weekly stats with no projects.""" + stats = ReportService.get_weekly_stats(db, test_user.id) + + assert stats["summary"]["completed_count"] == 0 + assert stats["summary"]["in_progress_count"] == 0 + assert stats["summary"]["total_tasks"] == 0 + assert len(stats["projects"]) == 0 + + def test_get_weekly_stats_with_tasks(self, db, test_user, test_project, test_tasks, test_statuses): + """Test weekly stats with tasks.""" + stats = ReportService.get_weekly_stats(db, test_user.id) + + assert stats["summary"]["completed_count"] == 1 + assert stats["summary"]["in_progress_count"] == 1 + assert stats["summary"]["overdue_count"] == 1 + assert stats["summary"]["total_tasks"] == 3 + assert len(stats["projects"]) == 1 + assert stats["projects"][0]["project_title"] == "Report Test Project" + + def test_generate_weekly_report(self, db, test_user, test_project, test_tasks, test_statuses): + """Test generating a weekly report.""" + report = ReportService.generate_weekly_report(db, test_user.id) + + assert report is not None + assert report.status == "sent" + assert "summary" in report.content + + # Check scheduled report was created + scheduled = db.query(ScheduledReport).filter( + ScheduledReport.recipient_id == test_user.id + ).first() + assert scheduled is not None + assert scheduled.last_sent_at is not None + + +class TestReportAPI: + """Tests for Report API endpoints.""" + + def test_preview_weekly_report(self, client, test_user_token, test_project, test_tasks, test_statuses): + """Test previewing weekly report.""" + response = client.get( + "/api/reports/weekly/preview", + headers={"Authorization": f"Bearer {test_user_token}"}, + ) + + assert response.status_code == 200 + data = response.json() + assert "summary" in data + assert "projects" in data + assert data["summary"]["total_tasks"] == 3 + + def test_generate_weekly_report_api(self, client, test_user_token, test_project, test_tasks, test_statuses): + """Test generating weekly report via API.""" + response = client.post( + "/api/reports/weekly/generate", + headers={"Authorization": f"Bearer {test_user_token}"}, + ) + + assert response.status_code == 200 + data = response.json() + assert data["message"] == "Weekly report generated successfully" + assert "report_id" in data + assert "summary" in data + + def test_list_report_history_empty(self, client, test_user_token): + """Test listing report history when empty.""" + response = client.get( + "/api/reports/history", + headers={"Authorization": f"Bearer {test_user_token}"}, + ) + + assert response.status_code == 200 + data = response.json() + assert data["total"] == 0 + assert len(data["reports"]) == 0 + + def test_list_report_history_with_reports(self, client, test_user_token, test_project, test_tasks, test_statuses, db, test_user): + """Test listing report history with existing reports.""" + # Generate a report first + ReportService.generate_weekly_report(db, test_user.id) + + response = client.get( + "/api/reports/history", + headers={"Authorization": f"Bearer {test_user_token}"}, + ) + + assert response.status_code == 200 + data = response.json() + assert data["total"] >= 1 + assert len(data["reports"]) >= 1 + assert data["reports"][0]["status"] == "sent" + + def test_get_report_detail(self, client, test_user_token, test_project, test_tasks, test_statuses, db, test_user): + """Test getting specific report detail.""" + # Generate a report first + report = ReportService.generate_weekly_report(db, test_user.id) + + response = client.get( + f"/api/reports/history/{report.id}", + headers={"Authorization": f"Bearer {test_user_token}"}, + ) + + assert response.status_code == 200 + data = response.json() + assert data["id"] == report.id + assert "content" in data diff --git a/backend/tests/test_triggers.py b/backend/tests/test_triggers.py new file mode 100644 index 0000000..dafe37f --- /dev/null +++ b/backend/tests/test_triggers.py @@ -0,0 +1,377 @@ +import pytest +import uuid +from app.models import User, Space, Project, Task, TaskStatus, Trigger, TriggerLog, Notification +from app.services.trigger_service import TriggerService + + +@pytest.fixture +def test_user(db): + """Create a test user.""" + user = User( + id=str(uuid.uuid4()), + email="testuser@example.com", + name="Test User", + role_id="00000000-0000-0000-0000-000000000003", + is_active=True, + is_system_admin=False, + ) + db.add(user) + db.commit() + return user + + +@pytest.fixture +def test_user_token(client, mock_redis, test_user): + """Get a token for test user.""" + from app.core.security import create_access_token, create_token_payload + + token_data = create_token_payload( + user_id=test_user.id, + email=test_user.email, + role="engineer", + department_id=None, + is_system_admin=False, + ) + token = create_access_token(token_data) + mock_redis.setex(f"session:{test_user.id}", 900, token) + return token + + +@pytest.fixture +def test_space(db, test_user): + """Create a test space.""" + space = Space( + id=str(uuid.uuid4()), + name="Test Space", + description="Test space for triggers", + owner_id=test_user.id, + ) + db.add(space) + db.commit() + return space + + +@pytest.fixture +def test_project(db, test_space, test_user): + """Create a test project.""" + project = Project( + id=str(uuid.uuid4()), + space_id=test_space.id, + title="Test Project", + description="Test project for triggers", + owner_id=test_user.id, + ) + db.add(project) + db.commit() + return project + + +@pytest.fixture +def test_status(db, test_project): + """Create test task statuses.""" + status1 = TaskStatus( + id=str(uuid.uuid4()), + project_id=test_project.id, + name="To Do", + color="#808080", + position=0, + ) + status2 = TaskStatus( + id=str(uuid.uuid4()), + project_id=test_project.id, + name="In Progress", + color="#0000FF", + position=1, + ) + db.add(status1) + db.add(status2) + db.commit() + return status1, status2 + + +@pytest.fixture +def test_task(db, test_project, test_user, test_status): + """Create a test task.""" + task = Task( + id=str(uuid.uuid4()), + project_id=test_project.id, + title="Test Task", + description="Test task for triggers", + status_id=test_status[0].id, + created_by=test_user.id, + assignee_id=test_user.id, + ) + db.add(task) + db.commit() + return task + + +@pytest.fixture +def test_trigger(db, test_project, test_user, test_status): + """Create a test trigger.""" + trigger = Trigger( + id=str(uuid.uuid4()), + project_id=test_project.id, + name="Status Change Trigger", + description="Notify when status changes to In Progress", + trigger_type="field_change", + conditions={ + "field": "status_id", + "operator": "changed_to", + "value": test_status[1].id, + }, + actions=[{ + "type": "notify", + "target": "assignee", + "template": "Task {task_title} status changed", + }], + is_active=True, + created_by=test_user.id, + ) + db.add(trigger) + db.commit() + return trigger + + +class TestTriggerService: + """Tests for TriggerService.""" + + def test_check_conditions_changed_to(self, db, test_status): + """Test changed_to condition.""" + conditions = { + "field": "status_id", + "operator": "changed_to", + "value": test_status[1].id, + } + old_values = {"status_id": test_status[0].id} + new_values = {"status_id": test_status[1].id} + + result = TriggerService._check_conditions(conditions, old_values, new_values) + assert result is True + + def test_check_conditions_changed_to_no_match(self, db, test_status): + """Test changed_to condition when value doesn't match.""" + conditions = { + "field": "status_id", + "operator": "changed_to", + "value": test_status[1].id, + } + old_values = {"status_id": test_status[1].id} + new_values = {"status_id": test_status[0].id} + + result = TriggerService._check_conditions(conditions, old_values, new_values) + assert result is False + + def test_check_conditions_equals(self, db, test_status): + """Test equals condition.""" + conditions = { + "field": "status_id", + "operator": "equals", + "value": test_status[1].id, + } + old_values = {"status_id": test_status[0].id} + new_values = {"status_id": test_status[1].id} + + result = TriggerService._check_conditions(conditions, old_values, new_values) + assert result is True + + def test_check_conditions_not_equals(self, db, test_status): + """Test not_equals condition.""" + conditions = { + "field": "status_id", + "operator": "not_equals", + "value": test_status[0].id, + } + old_values = {"status_id": test_status[0].id} + new_values = {"status_id": test_status[1].id} + + result = TriggerService._check_conditions(conditions, old_values, new_values) + assert result is True + + def test_evaluate_triggers_creates_notification(self, db, test_task, test_trigger, test_user, test_status): + """Test that evaluate_triggers creates notification when conditions match.""" + # Create another user to receive notification + other_user = User( + id=str(uuid.uuid4()), + email="other@example.com", + name="Other User", + role_id="00000000-0000-0000-0000-000000000003", + is_active=True, + ) + db.add(other_user) + test_task.assignee_id = other_user.id + db.commit() + + old_values = {"status_id": test_status[0].id} + new_values = {"status_id": test_status[1].id} + + logs = TriggerService.evaluate_triggers(db, test_task, old_values, new_values, test_user) + db.commit() + + assert len(logs) == 1 + assert logs[0].status == "success" + + # Check notification was created + notifications = db.query(Notification).filter( + Notification.user_id == other_user.id + ).all() + assert len(notifications) == 1 + + def test_evaluate_triggers_inactive_trigger_not_executed(self, db, test_task, test_trigger, test_user, test_status): + """Test that inactive triggers are not executed.""" + test_trigger.is_active = False + db.commit() + + old_values = {"status_id": test_status[0].id} + new_values = {"status_id": test_status[1].id} + + logs = TriggerService.evaluate_triggers(db, test_task, old_values, new_values, test_user) + + assert len(logs) == 0 + + +class TestTriggerAPI: + """Tests for Trigger API endpoints.""" + + def test_create_trigger(self, client, test_user_token, test_project, test_status): + """Test creating a trigger.""" + response = client.post( + f"/api/projects/{test_project.id}/triggers", + headers={"Authorization": f"Bearer {test_user_token}"}, + json={ + "name": "New Trigger", + "description": "Test trigger", + "trigger_type": "field_change", + "conditions": { + "field": "status_id", + "operator": "changed_to", + "value": test_status[1].id, + }, + "actions": [{ + "type": "notify", + "target": "assignee", + }], + }, + ) + + assert response.status_code == 201 + data = response.json() + assert data["name"] == "New Trigger" + assert data["is_active"] is True + + def test_list_triggers(self, client, test_user_token, test_project, test_trigger): + """Test listing triggers.""" + response = client.get( + f"/api/projects/{test_project.id}/triggers", + headers={"Authorization": f"Bearer {test_user_token}"}, + ) + + assert response.status_code == 200 + data = response.json() + assert data["total"] >= 1 + assert len(data["triggers"]) >= 1 + + def test_get_trigger(self, client, test_user_token, test_trigger): + """Test getting a specific trigger.""" + response = client.get( + f"/api/triggers/{test_trigger.id}", + headers={"Authorization": f"Bearer {test_user_token}"}, + ) + + assert response.status_code == 200 + data = response.json() + assert data["id"] == test_trigger.id + assert data["name"] == test_trigger.name + + def test_update_trigger(self, client, test_user_token, test_trigger): + """Test updating a trigger.""" + response = client.put( + f"/api/triggers/{test_trigger.id}", + headers={"Authorization": f"Bearer {test_user_token}"}, + json={ + "name": "Updated Trigger", + "is_active": False, + }, + ) + + assert response.status_code == 200 + data = response.json() + assert data["name"] == "Updated Trigger" + assert data["is_active"] is False + + def test_delete_trigger(self, client, test_user_token, test_trigger): + """Test deleting a trigger.""" + response = client.delete( + f"/api/triggers/{test_trigger.id}", + headers={"Authorization": f"Bearer {test_user_token}"}, + ) + + assert response.status_code == 204 + + # Verify deletion + response = client.get( + f"/api/triggers/{test_trigger.id}", + headers={"Authorization": f"Bearer {test_user_token}"}, + ) + assert response.status_code == 404 + + def test_get_trigger_logs(self, client, test_user_token, test_trigger, db): + """Test getting trigger logs.""" + # Create a log entry + log = TriggerLog( + id=str(uuid.uuid4()), + trigger_id=test_trigger.id, + status="success", + details={"test": True}, + ) + db.add(log) + db.commit() + + response = client.get( + f"/api/triggers/{test_trigger.id}/logs", + headers={"Authorization": f"Bearer {test_user_token}"}, + ) + + assert response.status_code == 200 + data = response.json() + assert data["total"] >= 1 + + def test_create_trigger_invalid_field(self, client, test_user_token, test_project): + """Test creating a trigger with invalid field.""" + response = client.post( + f"/api/projects/{test_project.id}/triggers", + headers={"Authorization": f"Bearer {test_user_token}"}, + json={ + "name": "Invalid Trigger", + "trigger_type": "field_change", + "conditions": { + "field": "invalid_field", + "operator": "equals", + "value": "test", + }, + "actions": [{"type": "notify", "target": "assignee"}], + }, + ) + + assert response.status_code == 400 + assert "Invalid condition field" in response.json()["detail"] + + def test_create_trigger_invalid_operator(self, client, test_user_token, test_project): + """Test creating a trigger with invalid operator.""" + response = client.post( + f"/api/projects/{test_project.id}/triggers", + headers={"Authorization": f"Bearer {test_user_token}"}, + json={ + "name": "Invalid Trigger", + "trigger_type": "field_change", + "conditions": { + "field": "status_id", + "operator": "invalid_op", + "value": "test", + }, + "actions": [{"type": "notify", "target": "assignee"}], + }, + ) + + assert response.status_code == 400 + assert "Invalid operator" in response.json()["detail"] diff --git a/frontend/src/components/ReportHistory.tsx b/frontend/src/components/ReportHistory.tsx new file mode 100644 index 0000000..66d2fac --- /dev/null +++ b/frontend/src/components/ReportHistory.tsx @@ -0,0 +1,106 @@ +import { useState, useEffect } from 'react' +import { reportsApi, ReportHistoryItem } from '../services/reports' + +export function ReportHistory() { + const [reports, setReports] = useState([]) + const [loading, setLoading] = useState(true) + const [error, setError] = useState(null) + const [total, setTotal] = useState(0) + + const fetchHistory = async () => { + try { + setLoading(true) + const data = await reportsApi.listReportHistory(10, 0) + setReports(data.reports) + setTotal(data.total) + setError(null) + } catch { + setError('Failed to load report history') + } finally { + setLoading(false) + } + } + + useEffect(() => { + fetchHistory() + }, []) + + const formatDate = (dateStr: string) => { + return new Date(dateStr).toLocaleString('zh-TW', { + year: 'numeric', + month: 'short', + day: 'numeric', + hour: '2-digit', + minute: '2-digit', + }) + } + + if (loading) { + return
Loading history...
+ } + + if (error) { + return ( +
+ {error} + +
+ ) + } + + if (reports.length === 0) { + return ( +
+ No report history found. Reports are generated every Friday at 16:00. +
+ ) + } + + return ( +
+
+

Report History

+ {total} reports +
+ +
+ {reports.map(report => { + const summary = (report.content as Record>).summary || {} + return ( +
+
+
+

{formatDate(report.generated_at)}

+ {report.status === 'sent' && summary && ( +

+ Completed: {summary.completed_count || 0} | + In Progress: {summary.in_progress_count || 0} | + Overdue: {summary.overdue_count || 0} +

+ )} + {report.status === 'failed' && report.error_message && ( +

{report.error_message}

+ )} +
+ + {report.status} + +
+
+ ) + })} +
+
+ ) +} diff --git a/frontend/src/components/TriggerForm.tsx b/frontend/src/components/TriggerForm.tsx new file mode 100644 index 0000000..ce786f1 --- /dev/null +++ b/frontend/src/components/TriggerForm.tsx @@ -0,0 +1,198 @@ +import { useState, useEffect } from 'react' +import { triggersApi, Trigger, TriggerCreate, TriggerCondition, TriggerAction } from '../services/triggers' + +interface TriggerFormProps { + projectId: string + trigger?: Trigger | null + onSave: () => void + onCancel: () => void +} + +export function TriggerForm({ projectId, trigger, onSave, onCancel }: TriggerFormProps) { + const [name, setName] = useState('') + const [description, setDescription] = useState('') + const [field, setField] = useState('status_id') + const [operator, setOperator] = useState('changed_to') + const [value, setValue] = useState('') + const [target, setTarget] = useState('assignee') + const [template, setTemplate] = useState('') + const [isActive, setIsActive] = useState(true) + const [loading, setLoading] = useState(false) + const [error, setError] = useState(null) + + useEffect(() => { + if (trigger) { + setName(trigger.name) + setDescription(trigger.description || '') + setField(trigger.conditions.field) + setOperator(trigger.conditions.operator) + setValue(trigger.conditions.value) + if (trigger.actions.length > 0) { + setTarget(trigger.actions[0].target) + setTemplate(trigger.actions[0].template || '') + } + setIsActive(trigger.is_active) + } + }, [trigger]) + + const handleSubmit = async (e: React.FormEvent) => { + e.preventDefault() + setError(null) + setLoading(true) + + const conditions: TriggerCondition = { field, operator, value } + const actions: TriggerAction[] = [{ type: 'notify', target, template: template || undefined }] + + try { + if (trigger) { + await triggersApi.updateTrigger(trigger.id, { + name, + description: description || undefined, + conditions, + actions, + is_active: isActive, + }) + } else { + const data: TriggerCreate = { + name, + description: description || undefined, + trigger_type: 'field_change', + conditions, + actions, + is_active: isActive, + } + await triggersApi.createTrigger(projectId, data) + } + onSave() + } catch { + setError('Failed to save trigger') + } finally { + setLoading(false) + } + } + + return ( +
+
+ + setName(e.target.value)} + className="mt-1 block w-full rounded-md border border-gray-300 px-3 py-2 focus:border-blue-500 focus:outline-none" + required + /> +
+ +
+ +