feat: Initial commit - Task Reporter incident response system

Complete implementation of the production line incident response system (生產線異常即時反應系統) including:

Backend (FastAPI):
- User authentication with AD integration and session management
- Chat room management (create, list, update, members, roles)
- Real-time messaging via WebSocket (typing indicators, reactions)
- File storage with MinIO (upload, download, image preview)

Frontend (React + Vite):
- Authentication flow with token management
- Room list with filtering, search, and pagination
- Real-time chat interface with WebSocket
- File upload with drag-and-drop and image preview
- Member management and room settings
- Breadcrumb navigation
- 53 unit tests (Vitest)

Specifications:
- authentication: AD auth, sessions, JWT tokens
- chat-room: rooms, members, templates
- realtime-messaging: WebSocket, messages, reactions
- file-storage: MinIO integration, file management
- frontend-core: React SPA structure

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
egg
2025-12-01 17:42:52 +08:00
commit c8966477b9
135 changed files with 23269 additions and 0 deletions

View File

@@ -0,0 +1,5 @@
"""File storage module for MinIO integration"""
from app.modules.file_storage.models import RoomFile
from app.modules.file_storage.router import router
__all__ = ["RoomFile", "router"]

View File

@@ -0,0 +1,44 @@
"""Database models for file storage"""
from sqlalchemy import Column, String, BigInteger, DateTime, Index, ForeignKey
from sqlalchemy.orm import relationship
from datetime import datetime
from app.core.database import Base
class RoomFile(Base):
"""File uploaded to an incident room"""
__tablename__ = "room_files"
# Primary key
file_id = Column(String(36), primary_key=True)
# Foreign key to incident room
room_id = Column(String(36), ForeignKey("incident_rooms.room_id"), nullable=False)
# File metadata
uploader_id = Column(String(255), nullable=False)
filename = Column(String(255), nullable=False)
file_type = Column(String(20), nullable=False) # 'image', 'document', 'log'
mime_type = Column(String(100), nullable=False)
file_size = Column(BigInteger, nullable=False) # bytes
# MinIO storage information
minio_bucket = Column(String(100), nullable=False)
minio_object_path = Column(String(500), nullable=False)
# Timestamps
uploaded_at = Column(DateTime, default=datetime.utcnow, nullable=False)
deleted_at = Column(DateTime, nullable=True) # soft delete
# Relationships
room = relationship("IncidentRoom", back_populates="files")
# Indexes
__table_args__ = (
Index("ix_room_files", "room_id", "uploaded_at"),
Index("ix_file_uploader", "uploader_id"),
)
def __repr__(self):
return f"<RoomFile(file_id={self.file_id}, filename={self.filename}, room_id={self.room_id})>"

View File

@@ -0,0 +1,228 @@
"""API routes for file storage operations
FastAPI router with file upload, download, listing, and delete endpoints
"""
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form, Query, status, BackgroundTasks
from sqlalchemy.orm import Session
from typing import Optional
from datetime import datetime
import asyncio
import logging
from app.core.database import get_db
from app.core.config import get_settings
from app.modules.auth import get_current_user
from app.modules.chat_room.dependencies import get_current_room
from app.modules.chat_room.models import MemberRole
from app.modules.chat_room.services.membership_service import membership_service
from app.modules.file_storage.schemas import FileUploadResponse, FileMetadata, FileListResponse, FileType
from app.modules.file_storage.services.file_service import FileService
from app.modules.file_storage.services import minio_service
from app.modules.realtime.websocket_manager import manager as websocket_manager
from app.modules.realtime.schemas import FileUploadedBroadcast, FileDeletedBroadcast, FileUploadAck
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api/rooms", tags=["Files"])
@router.post("/{room_id}/files", response_model=FileUploadResponse, status_code=status.HTTP_201_CREATED)
async def upload_file(
room_id: str,
background_tasks: BackgroundTasks,
file: UploadFile = File(...),
description: Optional[str] = Form(None),
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user),
_room = Depends(get_current_room) # Validates room exists and user has access
):
"""Upload a file to an incident room
Requires OWNER or EDITOR role in the room.
Supported file types:
- Images: jpg, jpeg, png, gif (max 10MB)
- Documents: pdf (max 20MB)
- Logs: txt, log, csv (max 5MB)
"""
user_email = current_user["username"]
# Check write permission (OWNER or EDITOR)
member = FileService.check_room_membership(db, room_id, user_email)
if not FileService.check_write_permission(member):
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Only OWNER or EDITOR can upload files"
)
# Upload file
result = FileService.upload_file(db, room_id, user_email, file, description)
# Broadcast file upload event to room members via WebSocket
async def broadcast_file_upload():
try:
broadcast = FileUploadedBroadcast(
file_id=result.file_id,
room_id=room_id,
uploader_id=user_email,
filename=result.filename,
file_type=result.file_type.value,
file_size=result.file_size,
mime_type=result.mime_type,
download_url=result.download_url,
uploaded_at=result.uploaded_at
)
await websocket_manager.broadcast_to_room(room_id, broadcast.to_dict())
logger.info(f"Broadcasted file upload event: {result.file_id} to room {room_id}")
# Send acknowledgment to uploader
ack = FileUploadAck(
file_id=result.file_id,
status="success",
download_url=result.download_url
)
await websocket_manager.send_personal(user_email, ack.to_dict())
except Exception as e:
logger.error(f"Failed to broadcast file upload: {e}")
# Run broadcast in background
background_tasks.add_task(asyncio.create_task, broadcast_file_upload())
return result
@router.get("/{room_id}/files", response_model=FileListResponse)
async def list_files(
room_id: str,
file_type: Optional[FileType] = Query(None, description="Filter by file type"),
limit: int = Query(50, ge=1, le=100, description="Number of files to return"),
offset: int = Query(0, ge=0, description="Number of files to skip"),
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user),
_room = Depends(get_current_room) # Validates room exists and user has access
):
"""List files in an incident room with pagination
All room members can list files.
"""
# Convert enum to string value if provided
file_type_str = file_type.value if file_type else None
return FileService.get_files(db, room_id, limit, offset, file_type_str)
@router.get("/{room_id}/files/{file_id}", response_model=FileMetadata)
async def get_file(
room_id: str,
file_id: str,
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user),
_room = Depends(get_current_room) # Validates room exists and user has access
):
"""Get file metadata and presigned download URL
All room members can access file metadata and download files.
Presigned URL expires in 1 hour.
"""
settings = get_settings()
# Get file metadata
file_record = FileService.get_file(db, file_id)
if not file_record:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="File not found"
)
# Verify file belongs to requested room
if file_record.room_id != room_id:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="File not found in this room"
)
# Generate presigned download URL
download_url = minio_service.generate_presigned_url(
bucket=settings.MINIO_BUCKET,
object_path=file_record.minio_object_path,
expiry_seconds=3600
)
# Build response with download URL
return FileMetadata(
file_id=file_record.file_id,
room_id=file_record.room_id,
filename=file_record.filename,
file_type=file_record.file_type,
mime_type=file_record.mime_type,
file_size=file_record.file_size,
minio_bucket=file_record.minio_bucket,
minio_object_path=file_record.minio_object_path,
uploaded_at=file_record.uploaded_at,
uploader_id=file_record.uploader_id,
deleted_at=file_record.deleted_at,
download_url=download_url
)
@router.delete("/{room_id}/files/{file_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_file(
room_id: str,
file_id: str,
background_tasks: BackgroundTasks,
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user),
_room = Depends(get_current_room) # Validates room exists and user has access
):
"""Soft delete a file
Only the file uploader or room OWNER can delete files.
"""
user_email = current_user["username"]
# Get file to check ownership
file_record = FileService.get_file(db, file_id)
if not file_record:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="File not found"
)
# Verify file belongs to requested room
if file_record.room_id != room_id:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="File not found in this room"
)
# Check if user is room owner
role = membership_service.get_user_role_in_room(db, room_id, user_email)
is_room_owner = role == MemberRole.OWNER
# Check if admin
is_admin = membership_service.is_system_admin(user_email)
# Delete file (service will verify permissions)
deleted_file = FileService.delete_file(db, file_id, user_email, is_room_owner or is_admin)
# Broadcast file deletion event to room members via WebSocket
if deleted_file:
async def broadcast_file_delete():
try:
broadcast = FileDeletedBroadcast(
file_id=file_id,
room_id=room_id,
deleted_by=user_email,
deleted_at=deleted_file.deleted_at
)
await websocket_manager.broadcast_to_room(room_id, broadcast.to_dict())
logger.info(f"Broadcasted file deletion event: {file_id} from room {room_id}")
except Exception as e:
logger.error(f"Failed to broadcast file deletion: {e}")
# Run broadcast in background
background_tasks.add_task(asyncio.create_task, broadcast_file_delete())
return None

View File

@@ -0,0 +1,74 @@
"""Pydantic schemas for file storage operations"""
from pydantic import BaseModel, Field, field_validator
from typing import Optional, List
from datetime import datetime
from enum import Enum
class FileType(str, Enum):
"""File type enumeration"""
IMAGE = "image"
DOCUMENT = "document"
LOG = "log"
class FileUploadResponse(BaseModel):
"""Response after successful file upload"""
file_id: str
filename: str
file_type: FileType
file_size: int
mime_type: str
download_url: str # Presigned URL
uploaded_at: datetime
uploader_id: str
class Config:
from_attributes = True
class FileMetadata(BaseModel):
"""File metadata response"""
file_id: str
room_id: str
filename: str
file_type: FileType
mime_type: str
file_size: int
minio_bucket: str
minio_object_path: str
uploaded_at: datetime
uploader_id: str
deleted_at: Optional[datetime] = None
download_url: Optional[str] = None # Presigned URL (only when requested)
class Config:
from_attributes = True
@field_validator("file_size")
@classmethod
def validate_file_size(cls, v):
"""Validate file size is positive"""
if v <= 0:
raise ValueError("File size must be positive")
return v
class FileListResponse(BaseModel):
"""Paginated file list response"""
files: List[FileMetadata]
total: int
limit: int
offset: int
has_more: bool
class Config:
from_attributes = True
class FileUploadParams(BaseModel):
"""Parameters for file upload (optional description)"""
description: Optional[str] = Field(None, max_length=500)
class Config:
from_attributes = True

View File

@@ -0,0 +1 @@
"""File storage services"""

View File

@@ -0,0 +1,251 @@
"""File storage service layer"""
from sqlalchemy.orm import Session
from fastapi import UploadFile, HTTPException
from app.modules.file_storage.models import RoomFile
from app.modules.file_storage.schemas import FileUploadResponse, FileMetadata, FileListResponse
from app.modules.file_storage.validators import validate_upload_file
from app.modules.file_storage.services import minio_service
from app.modules.chat_room.models import RoomMember, MemberRole
from app.modules.realtime.models import Message, MessageType
from app.modules.realtime.services.message_service import MessageService
from app.core.config import get_settings
from datetime import datetime
from typing import Optional, Dict, Any
import uuid
import logging
logger = logging.getLogger(__name__)
class FileService:
"""Service for file operations"""
@staticmethod
def upload_file(
db: Session,
room_id: str,
uploader_id: str,
file: UploadFile,
description: Optional[str] = None
) -> FileUploadResponse:
"""
Upload file to MinIO and store metadata in database
Args:
db: Database session
room_id: Room ID
uploader_id: User ID uploading the file
file: FastAPI UploadFile object
description: Optional file description
Returns:
FileUploadResponse with file metadata and download URL
Raises:
HTTPException if upload fails
"""
settings = get_settings()
# Validate file
file_type, mime_type, file_size = validate_upload_file(file)
# Generate file ID and object path
file_id = str(uuid.uuid4())
file_extension = file.filename.split(".")[-1] if "." in file.filename else ""
object_path = f"room-{room_id}/{file_type}s/{file_id}.{file_extension}"
# Upload to MinIO
success = minio_service.upload_file(
bucket=settings.MINIO_BUCKET,
object_path=object_path,
file_data=file.file,
file_size=file_size,
content_type=mime_type
)
if not success:
raise HTTPException(
status_code=503,
detail="File storage service temporarily unavailable"
)
# Create database record
try:
room_file = RoomFile(
file_id=file_id,
room_id=room_id,
uploader_id=uploader_id,
filename=file.filename,
file_type=file_type,
mime_type=mime_type,
file_size=file_size,
minio_bucket=settings.MINIO_BUCKET,
minio_object_path=object_path,
uploaded_at=datetime.utcnow()
)
db.add(room_file)
db.commit()
db.refresh(room_file)
# Generate presigned download URL
download_url = minio_service.generate_presigned_url(
bucket=settings.MINIO_BUCKET,
object_path=object_path,
expiry_seconds=3600
)
return FileUploadResponse(
file_id=file_id,
filename=file.filename,
file_type=file_type,
file_size=file_size,
mime_type=mime_type,
download_url=download_url,
uploaded_at=room_file.uploaded_at,
uploader_id=uploader_id
)
except Exception as e:
# Rollback database and cleanup MinIO
db.rollback()
minio_service.delete_file(settings.MINIO_BUCKET, object_path)
logger.error(f"Failed to create file record: {e}")
raise HTTPException(status_code=500, detail="Failed to save file metadata")
@staticmethod
def get_file(db: Session, file_id: str) -> Optional[RoomFile]:
"""Get file metadata by ID"""
return db.query(RoomFile).filter(
RoomFile.file_id == file_id,
RoomFile.deleted_at.is_(None)
).first()
@staticmethod
def get_files(
db: Session,
room_id: str,
limit: int = 50,
offset: int = 0,
file_type: Optional[str] = None
) -> FileListResponse:
"""Get paginated list of files in a room"""
query = db.query(RoomFile).filter(
RoomFile.room_id == room_id,
RoomFile.deleted_at.is_(None)
)
if file_type:
query = query.filter(RoomFile.file_type == file_type)
total = query.count()
files = query.order_by(RoomFile.uploaded_at.desc()).offset(offset).limit(limit).all()
file_metadata_list = [
FileMetadata.from_orm(f) for f in files
]
return FileListResponse(
files=file_metadata_list,
total=total,
limit=limit,
offset=offset,
has_more=(offset + len(files)) < total
)
@staticmethod
def delete_file(
db: Session,
file_id: str,
user_id: str,
is_room_owner: bool = False
) -> Optional[RoomFile]:
"""Soft delete file"""
file = db.query(RoomFile).filter(RoomFile.file_id == file_id).first()
if not file:
return None
# Check permissions
if not is_room_owner and file.uploader_id != user_id:
raise HTTPException(
status_code=403,
detail="Only file uploader or room owner can delete files"
)
# Soft delete
file.deleted_at = datetime.utcnow()
db.commit()
db.refresh(file)
return file
@staticmethod
def check_room_membership(db: Session, room_id: str, user_id: str) -> Optional[RoomMember]:
"""Check if user is member of room"""
return db.query(RoomMember).filter(
RoomMember.room_id == room_id,
RoomMember.user_id == user_id,
RoomMember.removed_at.is_(None)
).first()
@staticmethod
def check_write_permission(member: Optional[RoomMember]) -> bool:
"""Check if member has write permission"""
if not member:
return False
return member.role in [MemberRole.OWNER, MemberRole.EDITOR]
@staticmethod
def create_file_reference_message(
db: Session,
room_id: str,
sender_id: str,
file_id: str,
filename: str,
file_type: str,
file_url: str,
description: Optional[str] = None
) -> Message:
"""
Create a message referencing an uploaded file in the room chat.
Args:
db: Database session
room_id: Room ID
sender_id: User ID who uploaded the file
file_id: File ID in room_files table
filename: Original filename
file_type: Type of file (image, document, log)
file_url: Presigned download URL
description: Optional description for the file
Returns:
Created Message object with file reference
"""
# Determine message type based on file type
if file_type == "image":
msg_type = MessageType.IMAGE_REF
content = description or f"[Image] {filename}"
else:
msg_type = MessageType.FILE_REF
content = description or f"[File] {filename}"
# Create metadata with file info
metadata: Dict[str, Any] = {
"file_id": file_id,
"file_url": file_url,
"filename": filename,
"file_type": file_type
}
# Use MessageService to create the message
return MessageService.create_message(
db=db,
room_id=room_id,
sender_id=sender_id,
content=content,
message_type=msg_type,
metadata=metadata
)

View File

@@ -0,0 +1,160 @@
"""MinIO service layer for file operations"""
from minio.error import S3Error
from app.core.minio_client import get_minio_client
from app.core.config import get_settings
from datetime import timedelta
from typing import BinaryIO
import logging
import time
logger = logging.getLogger(__name__)
def upload_file(
bucket: str,
object_path: str,
file_data: BinaryIO,
file_size: int,
content_type: str,
max_retries: int = 3
) -> bool:
"""
Upload file to MinIO with retry logic
Args:
bucket: Bucket name
object_path: Object path in bucket
file_data: File data stream
file_size: File size in bytes
content_type: MIME type
max_retries: Maximum retry attempts
Returns:
True if upload successful, False otherwise
"""
client = get_minio_client()
for attempt in range(max_retries):
try:
# Reset file pointer to beginning
file_data.seek(0)
client.put_object(
bucket,
object_path,
file_data,
length=file_size,
content_type=content_type
)
logger.info(f"File uploaded successfully: {bucket}/{object_path}")
return True
except S3Error as e:
logger.error(f"MinIO upload error (attempt {attempt + 1}/{max_retries}): {e}")
if attempt < max_retries - 1:
# Exponential backoff: 1s, 2s, 4s
sleep_time = 2 ** attempt
logger.info(f"Retrying upload after {sleep_time}s...")
time.sleep(sleep_time)
else:
logger.error(f"Failed to upload file after {max_retries} attempts")
return False
except Exception as e:
logger.error(f"Unexpected error uploading file: {e}")
return False
return False
def generate_presigned_url(
bucket: str,
object_path: str,
expiry_seconds: int = 3600
) -> str:
"""
Generate presigned download URL with expiry
Args:
bucket: Bucket name
object_path: Object path in bucket
expiry_seconds: URL expiry time in seconds (default 1 hour)
Returns:
Presigned URL string
Raises:
Exception if URL generation fails
"""
client = get_minio_client()
try:
url = client.presigned_get_object(
bucket,
object_path,
expires=timedelta(seconds=expiry_seconds)
)
return url
except S3Error as e:
logger.error(f"Failed to generate presigned URL for {bucket}/{object_path}: {e}")
raise
except Exception as e:
logger.error(f"Unexpected error generating presigned URL: {e}")
raise
def delete_file(bucket: str, object_path: str) -> bool:
"""
Delete file from MinIO (for cleanup, not exposed to users)
Args:
bucket: Bucket name
object_path: Object path in bucket
Returns:
True if deleted successfully, False otherwise
"""
client = get_minio_client()
try:
client.remove_object(bucket, object_path)
logger.info(f"File deleted: {bucket}/{object_path}")
return True
except S3Error as e:
logger.error(f"Failed to delete file {bucket}/{object_path}: {e}")
return False
except Exception as e:
logger.error(f"Unexpected error deleting file: {e}")
return False
def check_file_exists(bucket: str, object_path: str) -> bool:
"""
Check if file exists in MinIO
Args:
bucket: Bucket name
object_path: Object path in bucket
Returns:
True if file exists, False otherwise
"""
client = get_minio_client()
try:
client.stat_object(bucket, object_path)
return True
except S3Error:
return False
except Exception as e:
logger.error(f"Error checking file existence: {e}")
return False

View File

@@ -0,0 +1,158 @@
"""File validation utilities"""
import magic
from fastapi import UploadFile, HTTPException
from typing import Set
import logging
logger = logging.getLogger(__name__)
# MIME type whitelists
IMAGE_TYPES: Set[str] = {
"image/jpeg",
"image/png",
"image/gif"
}
DOCUMENT_TYPES: Set[str] = {
"application/pdf"
}
LOG_TYPES: Set[str] = {
"text/plain",
"text/csv"
}
# File size limits (bytes)
IMAGE_MAX_SIZE = 10 * 1024 * 1024 # 10MB
DOCUMENT_MAX_SIZE = 20 * 1024 * 1024 # 20MB
LOG_MAX_SIZE = 5 * 1024 * 1024 # 5MB
def detect_mime_type(file_data: bytes) -> str:
"""
Detect MIME type from file content using python-magic
Args:
file_data: First chunk of file data
Returns:
MIME type string
"""
try:
mime = magic.Magic(mime=True)
return mime.from_buffer(file_data)
except Exception as e:
logger.error(f"Failed to detect MIME type: {e}")
return "application/octet-stream"
def validate_file_type(file: UploadFile, allowed_types: Set[str]) -> str:
"""
Validate file MIME type using actual file content
Args:
file: FastAPI UploadFile object
allowed_types: Set of allowed MIME types
Returns:
Detected MIME type
Raises:
HTTPException if file type is not allowed
"""
# Read first 2048 bytes to detect MIME type
file.file.seek(0)
header = file.file.read(2048)
file.file.seek(0)
# Detect actual MIME type from content
detected_mime = detect_mime_type(header)
if detected_mime not in allowed_types:
raise HTTPException(
status_code=400,
detail=f"File type not allowed: {detected_mime}. Allowed types: {', '.join(allowed_types)}"
)
return detected_mime
def validate_file_size(file: UploadFile, max_size: int):
"""
Validate file size
Args:
file: FastAPI UploadFile object
max_size: Maximum allowed size in bytes
Raises:
HTTPException if file exceeds max size
"""
# Seek to end to get file size
file.file.seek(0, 2) # 2 = SEEK_END
file_size = file.file.tell()
file.file.seek(0) # Reset to beginning
if file_size > max_size:
max_mb = max_size / (1024 * 1024)
actual_mb = file_size / (1024 * 1024)
raise HTTPException(
status_code=413,
detail=f"File size exceeds limit: {actual_mb:.2f}MB > {max_mb:.2f}MB"
)
return file_size
def get_file_type_and_limits(mime_type: str) -> tuple[str, int]:
"""
Determine file type category and size limit from MIME type
Args:
mime_type: MIME type string
Returns:
Tuple of (file_type, max_size)
Raises:
HTTPException if MIME type not recognized
"""
if mime_type in IMAGE_TYPES:
return ("image", IMAGE_MAX_SIZE)
elif mime_type in DOCUMENT_TYPES:
return ("document", DOCUMENT_MAX_SIZE)
elif mime_type in LOG_TYPES:
return ("log", LOG_MAX_SIZE)
else:
raise HTTPException(
status_code=400,
detail=f"Unsupported file type: {mime_type}"
)
def validate_upload_file(file: UploadFile) -> tuple[str, str, int]:
"""
Validate uploaded file (type and size)
Args:
file: FastAPI UploadFile object
Returns:
Tuple of (file_type, mime_type, file_size)
Raises:
HTTPException if validation fails
"""
# Combine all allowed types
all_allowed_types = IMAGE_TYPES | DOCUMENT_TYPES | LOG_TYPES
# Validate MIME type
mime_type = validate_file_type(file, all_allowed_types)
# Get file type category and max size
file_type, max_size = get_file_type_and_limits(mime_type)
# Validate file size
file_size = validate_file_size(file, max_size)
return (file_type, mime_type, file_size)