feat: Initial commit - Task Reporter incident response system

Complete implementation of the production line incident response system (生產線異常即時反應系統) including:

Backend (FastAPI):
- User authentication with AD integration and session management
- Chat room management (create, list, update, members, roles)
- Real-time messaging via WebSocket (typing indicators, reactions)
- File storage with MinIO (upload, download, image preview)

Frontend (React + Vite):
- Authentication flow with token management
- Room list with filtering, search, and pagination
- Real-time chat interface with WebSocket
- File upload with drag-and-drop and image preview
- Member management and room settings
- Breadcrumb navigation
- 53 unit tests (Vitest)

Specifications:
- authentication: AD auth, sessions, JWT tokens
- chat-room: rooms, members, templates
- realtime-messaging: WebSocket, messages, reactions
- file-storage: MinIO integration, file management
- frontend-core: React SPA structure

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
egg
2025-12-01 17:42:52 +08:00
commit c8966477b9
135 changed files with 23269 additions and 0 deletions

View File

@@ -0,0 +1 @@
"""File storage services"""

View File

@@ -0,0 +1,251 @@
"""File storage service layer"""
from sqlalchemy.orm import Session
from fastapi import UploadFile, HTTPException
from app.modules.file_storage.models import RoomFile
from app.modules.file_storage.schemas import FileUploadResponse, FileMetadata, FileListResponse
from app.modules.file_storage.validators import validate_upload_file
from app.modules.file_storage.services import minio_service
from app.modules.chat_room.models import RoomMember, MemberRole
from app.modules.realtime.models import Message, MessageType
from app.modules.realtime.services.message_service import MessageService
from app.core.config import get_settings
from datetime import datetime
from typing import Optional, Dict, Any
import uuid
import logging
logger = logging.getLogger(__name__)
class FileService:
"""Service for file operations"""
@staticmethod
def upload_file(
db: Session,
room_id: str,
uploader_id: str,
file: UploadFile,
description: Optional[str] = None
) -> FileUploadResponse:
"""
Upload file to MinIO and store metadata in database
Args:
db: Database session
room_id: Room ID
uploader_id: User ID uploading the file
file: FastAPI UploadFile object
description: Optional file description
Returns:
FileUploadResponse with file metadata and download URL
Raises:
HTTPException if upload fails
"""
settings = get_settings()
# Validate file
file_type, mime_type, file_size = validate_upload_file(file)
# Generate file ID and object path
file_id = str(uuid.uuid4())
file_extension = file.filename.split(".")[-1] if "." in file.filename else ""
object_path = f"room-{room_id}/{file_type}s/{file_id}.{file_extension}"
# Upload to MinIO
success = minio_service.upload_file(
bucket=settings.MINIO_BUCKET,
object_path=object_path,
file_data=file.file,
file_size=file_size,
content_type=mime_type
)
if not success:
raise HTTPException(
status_code=503,
detail="File storage service temporarily unavailable"
)
# Create database record
try:
room_file = RoomFile(
file_id=file_id,
room_id=room_id,
uploader_id=uploader_id,
filename=file.filename,
file_type=file_type,
mime_type=mime_type,
file_size=file_size,
minio_bucket=settings.MINIO_BUCKET,
minio_object_path=object_path,
uploaded_at=datetime.utcnow()
)
db.add(room_file)
db.commit()
db.refresh(room_file)
# Generate presigned download URL
download_url = minio_service.generate_presigned_url(
bucket=settings.MINIO_BUCKET,
object_path=object_path,
expiry_seconds=3600
)
return FileUploadResponse(
file_id=file_id,
filename=file.filename,
file_type=file_type,
file_size=file_size,
mime_type=mime_type,
download_url=download_url,
uploaded_at=room_file.uploaded_at,
uploader_id=uploader_id
)
except Exception as e:
# Rollback database and cleanup MinIO
db.rollback()
minio_service.delete_file(settings.MINIO_BUCKET, object_path)
logger.error(f"Failed to create file record: {e}")
raise HTTPException(status_code=500, detail="Failed to save file metadata")
@staticmethod
def get_file(db: Session, file_id: str) -> Optional[RoomFile]:
"""Get file metadata by ID"""
return db.query(RoomFile).filter(
RoomFile.file_id == file_id,
RoomFile.deleted_at.is_(None)
).first()
@staticmethod
def get_files(
db: Session,
room_id: str,
limit: int = 50,
offset: int = 0,
file_type: Optional[str] = None
) -> FileListResponse:
"""Get paginated list of files in a room"""
query = db.query(RoomFile).filter(
RoomFile.room_id == room_id,
RoomFile.deleted_at.is_(None)
)
if file_type:
query = query.filter(RoomFile.file_type == file_type)
total = query.count()
files = query.order_by(RoomFile.uploaded_at.desc()).offset(offset).limit(limit).all()
file_metadata_list = [
FileMetadata.from_orm(f) for f in files
]
return FileListResponse(
files=file_metadata_list,
total=total,
limit=limit,
offset=offset,
has_more=(offset + len(files)) < total
)
@staticmethod
def delete_file(
db: Session,
file_id: str,
user_id: str,
is_room_owner: bool = False
) -> Optional[RoomFile]:
"""Soft delete file"""
file = db.query(RoomFile).filter(RoomFile.file_id == file_id).first()
if not file:
return None
# Check permissions
if not is_room_owner and file.uploader_id != user_id:
raise HTTPException(
status_code=403,
detail="Only file uploader or room owner can delete files"
)
# Soft delete
file.deleted_at = datetime.utcnow()
db.commit()
db.refresh(file)
return file
@staticmethod
def check_room_membership(db: Session, room_id: str, user_id: str) -> Optional[RoomMember]:
"""Check if user is member of room"""
return db.query(RoomMember).filter(
RoomMember.room_id == room_id,
RoomMember.user_id == user_id,
RoomMember.removed_at.is_(None)
).first()
@staticmethod
def check_write_permission(member: Optional[RoomMember]) -> bool:
"""Check if member has write permission"""
if not member:
return False
return member.role in [MemberRole.OWNER, MemberRole.EDITOR]
@staticmethod
def create_file_reference_message(
db: Session,
room_id: str,
sender_id: str,
file_id: str,
filename: str,
file_type: str,
file_url: str,
description: Optional[str] = None
) -> Message:
"""
Create a message referencing an uploaded file in the room chat.
Args:
db: Database session
room_id: Room ID
sender_id: User ID who uploaded the file
file_id: File ID in room_files table
filename: Original filename
file_type: Type of file (image, document, log)
file_url: Presigned download URL
description: Optional description for the file
Returns:
Created Message object with file reference
"""
# Determine message type based on file type
if file_type == "image":
msg_type = MessageType.IMAGE_REF
content = description or f"[Image] {filename}"
else:
msg_type = MessageType.FILE_REF
content = description or f"[File] {filename}"
# Create metadata with file info
metadata: Dict[str, Any] = {
"file_id": file_id,
"file_url": file_url,
"filename": filename,
"file_type": file_type
}
# Use MessageService to create the message
return MessageService.create_message(
db=db,
room_id=room_id,
sender_id=sender_id,
content=content,
message_type=msg_type,
metadata=metadata
)

View File

@@ -0,0 +1,160 @@
"""MinIO service layer for file operations"""
from minio.error import S3Error
from app.core.minio_client import get_minio_client
from app.core.config import get_settings
from datetime import timedelta
from typing import BinaryIO
import logging
import time
logger = logging.getLogger(__name__)
def upload_file(
bucket: str,
object_path: str,
file_data: BinaryIO,
file_size: int,
content_type: str,
max_retries: int = 3
) -> bool:
"""
Upload file to MinIO with retry logic
Args:
bucket: Bucket name
object_path: Object path in bucket
file_data: File data stream
file_size: File size in bytes
content_type: MIME type
max_retries: Maximum retry attempts
Returns:
True if upload successful, False otherwise
"""
client = get_minio_client()
for attempt in range(max_retries):
try:
# Reset file pointer to beginning
file_data.seek(0)
client.put_object(
bucket,
object_path,
file_data,
length=file_size,
content_type=content_type
)
logger.info(f"File uploaded successfully: {bucket}/{object_path}")
return True
except S3Error as e:
logger.error(f"MinIO upload error (attempt {attempt + 1}/{max_retries}): {e}")
if attempt < max_retries - 1:
# Exponential backoff: 1s, 2s, 4s
sleep_time = 2 ** attempt
logger.info(f"Retrying upload after {sleep_time}s...")
time.sleep(sleep_time)
else:
logger.error(f"Failed to upload file after {max_retries} attempts")
return False
except Exception as e:
logger.error(f"Unexpected error uploading file: {e}")
return False
return False
def generate_presigned_url(
bucket: str,
object_path: str,
expiry_seconds: int = 3600
) -> str:
"""
Generate presigned download URL with expiry
Args:
bucket: Bucket name
object_path: Object path in bucket
expiry_seconds: URL expiry time in seconds (default 1 hour)
Returns:
Presigned URL string
Raises:
Exception if URL generation fails
"""
client = get_minio_client()
try:
url = client.presigned_get_object(
bucket,
object_path,
expires=timedelta(seconds=expiry_seconds)
)
return url
except S3Error as e:
logger.error(f"Failed to generate presigned URL for {bucket}/{object_path}: {e}")
raise
except Exception as e:
logger.error(f"Unexpected error generating presigned URL: {e}")
raise
def delete_file(bucket: str, object_path: str) -> bool:
"""
Delete file from MinIO (for cleanup, not exposed to users)
Args:
bucket: Bucket name
object_path: Object path in bucket
Returns:
True if deleted successfully, False otherwise
"""
client = get_minio_client()
try:
client.remove_object(bucket, object_path)
logger.info(f"File deleted: {bucket}/{object_path}")
return True
except S3Error as e:
logger.error(f"Failed to delete file {bucket}/{object_path}: {e}")
return False
except Exception as e:
logger.error(f"Unexpected error deleting file: {e}")
return False
def check_file_exists(bucket: str, object_path: str) -> bool:
"""
Check if file exists in MinIO
Args:
bucket: Bucket name
object_path: Object path in bucket
Returns:
True if file exists, False otherwise
"""
client = get_minio_client()
try:
client.stat_object(bucket, object_path)
return True
except S3Error:
return False
except Exception as e:
logger.error(f"Error checking file existence: {e}")
return False