feat: implement 8 OpenSpec proposals for security, reliability, and UX improvements
## Security Enhancements (P0) - Add input validation with max_length and numeric range constraints - Implement WebSocket token authentication via first message - Add path traversal prevention in file storage service ## Permission Enhancements (P0) - Add project member management for cross-department access - Implement is_department_manager flag for workload visibility ## Cycle Detection (P0) - Add DFS-based cycle detection for task dependencies - Add formula field circular reference detection - Display user-friendly cycle path visualization ## Concurrency & Reliability (P1) - Implement optimistic locking with version field (409 Conflict on mismatch) - Add trigger retry mechanism with exponential backoff (1s, 2s, 4s) - Implement cascade restore for soft-deleted tasks ## Rate Limiting (P1) - Add tiered rate limits: standard (60/min), sensitive (20/min), heavy (5/min) - Apply rate limits to tasks, reports, attachments, and comments ## Frontend Improvements (P1) - Add responsive sidebar with hamburger menu for mobile - Improve touch-friendly UI with proper tap target sizes - Complete i18n translations for all components ## Backend Reliability (P2) - Configure database connection pool (size=10, overflow=20) - Add Redis fallback mechanism with message queue - Add blocker check before task deletion ## API Enhancements (P3) - Add standardized response wrapper utility - Add /health/ready and /health/live endpoints - Implement project templates with status/field copying ## Tests Added - test_input_validation.py - Schema and path traversal tests - test_concurrency_reliability.py - Optimistic locking and retry tests - test_backend_reliability.py - Connection pool and Redis tests - test_api_enhancements.py - Health check and template tests Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -6,6 +6,7 @@ Handles task dependency validation including:
|
||||
- Date constraint validation based on dependency types
|
||||
- Self-reference prevention
|
||||
- Cross-project dependency prevention
|
||||
- Bulk dependency operations with cycle detection
|
||||
"""
|
||||
from typing import List, Optional, Set, Tuple, Dict, Any
|
||||
from collections import defaultdict
|
||||
@@ -25,6 +26,27 @@ class DependencyValidationError(Exception):
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
class CycleDetectionResult:
|
||||
"""Result of cycle detection with detailed path information."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
has_cycle: bool,
|
||||
cycle_path: Optional[List[str]] = None,
|
||||
cycle_task_titles: Optional[List[str]] = None
|
||||
):
|
||||
self.has_cycle = has_cycle
|
||||
self.cycle_path = cycle_path or []
|
||||
self.cycle_task_titles = cycle_task_titles or []
|
||||
|
||||
def get_cycle_description(self) -> str:
|
||||
"""Get a human-readable description of the cycle."""
|
||||
if not self.has_cycle or not self.cycle_task_titles:
|
||||
return ""
|
||||
# Format: Task A -> Task B -> Task C -> Task A
|
||||
return " -> ".join(self.cycle_task_titles)
|
||||
|
||||
|
||||
class DependencyService:
|
||||
"""Service for managing task dependencies with validation."""
|
||||
|
||||
@@ -53,9 +75,36 @@ class DependencyService:
|
||||
Returns:
|
||||
List of task IDs forming the cycle if circular, None otherwise
|
||||
"""
|
||||
# If adding predecessor -> successor, check if successor can reach predecessor
|
||||
# This would mean predecessor depends (transitively) on successor, creating a cycle
|
||||
result = DependencyService.detect_circular_dependency_detailed(
|
||||
db, predecessor_id, successor_id, project_id
|
||||
)
|
||||
return result.cycle_path if result.has_cycle else None
|
||||
|
||||
@staticmethod
|
||||
def detect_circular_dependency_detailed(
|
||||
db: Session,
|
||||
predecessor_id: str,
|
||||
successor_id: str,
|
||||
project_id: str,
|
||||
additional_edges: Optional[List[Tuple[str, str]]] = None
|
||||
) -> CycleDetectionResult:
|
||||
"""
|
||||
Detect if adding a dependency would create a circular reference.
|
||||
|
||||
Uses DFS to traverse from the successor to check if we can reach
|
||||
the predecessor through existing dependencies.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
predecessor_id: The task that must complete first
|
||||
successor_id: The task that depends on the predecessor
|
||||
project_id: Project ID to scope the query
|
||||
additional_edges: Optional list of additional (predecessor_id, successor_id)
|
||||
edges to consider (for bulk operations)
|
||||
|
||||
Returns:
|
||||
CycleDetectionResult with detailed cycle information
|
||||
"""
|
||||
# Build adjacency list for the project's dependencies
|
||||
dependencies = db.query(TaskDependency).join(
|
||||
Task, TaskDependency.successor_id == Task.id
|
||||
@@ -71,6 +120,20 @@ class DependencyService:
|
||||
# Simulate adding the new edge
|
||||
graph[successor_id].append(predecessor_id)
|
||||
|
||||
# Add any additional edges for bulk operations
|
||||
if additional_edges:
|
||||
for pred_id, succ_id in additional_edges:
|
||||
graph[succ_id].append(pred_id)
|
||||
|
||||
# Build task title map for readable error messages
|
||||
task_ids_in_graph = set()
|
||||
for succ_id, pred_ids in graph.items():
|
||||
task_ids_in_graph.add(succ_id)
|
||||
task_ids_in_graph.update(pred_ids)
|
||||
|
||||
tasks = db.query(Task).filter(Task.id.in_(task_ids_in_graph)).all()
|
||||
task_title_map: Dict[str, str] = {t.id: t.title for t in tasks}
|
||||
|
||||
# DFS to find if there's a path from predecessor back to successor
|
||||
# (which would complete a cycle)
|
||||
visited: Set[str] = set()
|
||||
@@ -101,7 +164,18 @@ class DependencyService:
|
||||
return None
|
||||
|
||||
# Start DFS from the successor to check if we can reach back to it
|
||||
return dfs(successor_id)
|
||||
cycle_path = dfs(successor_id)
|
||||
|
||||
if cycle_path:
|
||||
# Build task titles for the cycle
|
||||
cycle_titles = [task_title_map.get(task_id, task_id) for task_id in cycle_path]
|
||||
return CycleDetectionResult(
|
||||
has_cycle=True,
|
||||
cycle_path=cycle_path,
|
||||
cycle_task_titles=cycle_titles
|
||||
)
|
||||
|
||||
return CycleDetectionResult(has_cycle=False)
|
||||
|
||||
@staticmethod
|
||||
def validate_dependency(
|
||||
@@ -183,15 +257,19 @@ class DependencyService:
|
||||
)
|
||||
|
||||
# Check circular dependency
|
||||
cycle = DependencyService.detect_circular_dependency(
|
||||
cycle_result = DependencyService.detect_circular_dependency_detailed(
|
||||
db, predecessor_id, successor_id, predecessor.project_id
|
||||
)
|
||||
|
||||
if cycle:
|
||||
if cycle_result.has_cycle:
|
||||
raise DependencyValidationError(
|
||||
error_type="circular",
|
||||
message="Adding this dependency would create a circular reference",
|
||||
details={"cycle": cycle}
|
||||
message=f"Adding this dependency would create a circular reference: {cycle_result.get_cycle_description()}",
|
||||
details={
|
||||
"cycle": cycle_result.cycle_path,
|
||||
"cycle_description": cycle_result.get_cycle_description(),
|
||||
"cycle_task_titles": cycle_result.cycle_task_titles
|
||||
}
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@@ -422,3 +500,202 @@ class DependencyService:
|
||||
queue.append(dep.successor_id)
|
||||
|
||||
return successors
|
||||
|
||||
@staticmethod
|
||||
def validate_bulk_dependencies(
|
||||
db: Session,
|
||||
dependencies: List[Tuple[str, str]],
|
||||
project_id: str
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Validate a batch of dependencies for cycle detection.
|
||||
|
||||
This method validates multiple dependencies together to detect cycles
|
||||
that would only appear when all dependencies are added together.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
dependencies: List of (predecessor_id, successor_id) tuples
|
||||
project_id: Project ID to scope the query
|
||||
|
||||
Returns:
|
||||
List of validation errors (empty if all valid)
|
||||
"""
|
||||
errors: List[Dict[str, Any]] = []
|
||||
|
||||
if not dependencies:
|
||||
return errors
|
||||
|
||||
# First, validate each dependency individually for basic checks
|
||||
for predecessor_id, successor_id in dependencies:
|
||||
# Check self-reference
|
||||
if predecessor_id == successor_id:
|
||||
errors.append({
|
||||
"error_type": "self_reference",
|
||||
"predecessor_id": predecessor_id,
|
||||
"successor_id": successor_id,
|
||||
"message": "A task cannot depend on itself"
|
||||
})
|
||||
continue
|
||||
|
||||
# Get tasks to validate project membership
|
||||
predecessor = db.query(Task).filter(Task.id == predecessor_id).first()
|
||||
successor = db.query(Task).filter(Task.id == successor_id).first()
|
||||
|
||||
if not predecessor:
|
||||
errors.append({
|
||||
"error_type": "not_found",
|
||||
"predecessor_id": predecessor_id,
|
||||
"successor_id": successor_id,
|
||||
"message": f"Predecessor task not found: {predecessor_id}"
|
||||
})
|
||||
continue
|
||||
|
||||
if not successor:
|
||||
errors.append({
|
||||
"error_type": "not_found",
|
||||
"predecessor_id": predecessor_id,
|
||||
"successor_id": successor_id,
|
||||
"message": f"Successor task not found: {successor_id}"
|
||||
})
|
||||
continue
|
||||
|
||||
if predecessor.project_id != project_id or successor.project_id != project_id:
|
||||
errors.append({
|
||||
"error_type": "cross_project",
|
||||
"predecessor_id": predecessor_id,
|
||||
"successor_id": successor_id,
|
||||
"message": "All tasks must be in the same project"
|
||||
})
|
||||
continue
|
||||
|
||||
# Check for duplicates within the batch
|
||||
existing = db.query(TaskDependency).filter(
|
||||
TaskDependency.predecessor_id == predecessor_id,
|
||||
TaskDependency.successor_id == successor_id
|
||||
).first()
|
||||
|
||||
if existing:
|
||||
errors.append({
|
||||
"error_type": "duplicate",
|
||||
"predecessor_id": predecessor_id,
|
||||
"successor_id": successor_id,
|
||||
"message": "This dependency already exists"
|
||||
})
|
||||
|
||||
# If there are basic validation errors, return them first
|
||||
if errors:
|
||||
return errors
|
||||
|
||||
# Now check for cycles considering all dependencies together
|
||||
# Build the graph incrementally and check for cycles
|
||||
accumulated_edges: List[Tuple[str, str]] = []
|
||||
|
||||
for predecessor_id, successor_id in dependencies:
|
||||
# Check if adding this edge (plus all previously accumulated edges)
|
||||
# would create a cycle
|
||||
cycle_result = DependencyService.detect_circular_dependency_detailed(
|
||||
db,
|
||||
predecessor_id,
|
||||
successor_id,
|
||||
project_id,
|
||||
additional_edges=accumulated_edges
|
||||
)
|
||||
|
||||
if cycle_result.has_cycle:
|
||||
errors.append({
|
||||
"error_type": "circular",
|
||||
"predecessor_id": predecessor_id,
|
||||
"successor_id": successor_id,
|
||||
"message": f"Adding this dependency would create a circular reference: {cycle_result.get_cycle_description()}",
|
||||
"cycle": cycle_result.cycle_path,
|
||||
"cycle_description": cycle_result.get_cycle_description(),
|
||||
"cycle_task_titles": cycle_result.cycle_task_titles
|
||||
})
|
||||
else:
|
||||
# Add this edge to accumulated edges for subsequent checks
|
||||
accumulated_edges.append((predecessor_id, successor_id))
|
||||
|
||||
return errors
|
||||
|
||||
@staticmethod
|
||||
def detect_cycles_in_graph(
|
||||
db: Session,
|
||||
project_id: str
|
||||
) -> List[CycleDetectionResult]:
|
||||
"""
|
||||
Detect all cycles in the existing dependency graph for a project.
|
||||
|
||||
This is useful for auditing or cleanup operations.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
project_id: Project ID to check
|
||||
|
||||
Returns:
|
||||
List of CycleDetectionResult for each cycle found
|
||||
"""
|
||||
cycles: List[CycleDetectionResult] = []
|
||||
|
||||
# Get all dependencies for the project
|
||||
dependencies = db.query(TaskDependency).join(
|
||||
Task, TaskDependency.successor_id == Task.id
|
||||
).filter(Task.project_id == project_id).all()
|
||||
|
||||
if not dependencies:
|
||||
return cycles
|
||||
|
||||
# Build the graph
|
||||
graph: Dict[str, List[str]] = defaultdict(list)
|
||||
for dep in dependencies:
|
||||
graph[dep.successor_id].append(dep.predecessor_id)
|
||||
|
||||
# Get task titles
|
||||
task_ids = set()
|
||||
for succ_id, pred_ids in graph.items():
|
||||
task_ids.add(succ_id)
|
||||
task_ids.update(pred_ids)
|
||||
|
||||
tasks = db.query(Task).filter(Task.id.in_(task_ids)).all()
|
||||
task_title_map: Dict[str, str] = {t.id: t.title for t in tasks}
|
||||
|
||||
# Find all cycles using DFS
|
||||
visited: Set[str] = set()
|
||||
found_cycles: Set[Tuple[str, ...]] = set()
|
||||
|
||||
def find_cycles_dfs(node: str, path: List[str], in_path: Set[str]):
|
||||
"""DFS to find all cycles."""
|
||||
if node in in_path:
|
||||
# Found a cycle
|
||||
cycle_start = path.index(node)
|
||||
cycle = tuple(sorted(path[cycle_start:])) # Normalize for dedup
|
||||
if cycle not in found_cycles:
|
||||
found_cycles.add(cycle)
|
||||
actual_cycle = path[cycle_start:] + [node]
|
||||
cycle_titles = [task_title_map.get(tid, tid) for tid in actual_cycle]
|
||||
cycles.append(CycleDetectionResult(
|
||||
has_cycle=True,
|
||||
cycle_path=actual_cycle,
|
||||
cycle_task_titles=cycle_titles
|
||||
))
|
||||
return
|
||||
|
||||
if node in visited:
|
||||
return
|
||||
|
||||
visited.add(node)
|
||||
in_path.add(node)
|
||||
path.append(node)
|
||||
|
||||
for neighbor in graph.get(node, []):
|
||||
find_cycles_dfs(neighbor, path.copy(), in_path.copy())
|
||||
|
||||
path.pop()
|
||||
in_path.remove(node)
|
||||
|
||||
# Start DFS from all nodes
|
||||
for start_node in graph.keys():
|
||||
if start_node not in visited:
|
||||
find_cycles_dfs(start_node, [], set())
|
||||
|
||||
return cycles
|
||||
|
||||
Reference in New Issue
Block a user