feat: Improve file display, timezone handling, and LOT management

Changes:
- Fix datetime serialization with UTC 'Z' suffix for correct timezone display
- Add PDF upload support with extension fallback for MIME detection
- Fix LOT add/remove by creating new list for SQLAlchemy JSON change detection
- Add file message components (FileMessage, ImageLightbox, UploadPreview)
- Add multi-file upload support with progress tracking
- Link uploaded files to chat messages via message_id
- Include file attachments in AI report generation
- Update specs for file-storage, realtime-messaging, and ai-report-generation

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
egg
2025-12-08 12:39:15 +08:00
parent 599802b818
commit 44822a561a
36 changed files with 2252 additions and 156 deletions

View File

@@ -3,12 +3,37 @@
生產線異常即時反應系統 (Task Reporter)
"""
import os
import json
from pathlib import Path
from datetime import datetime
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse
from fastapi.responses import FileResponse, JSONResponse
from app.core.config import get_settings
class UTCDateTimeEncoder(json.JSONEncoder):
"""Custom JSON encoder that formats datetime with 'Z' suffix for UTC"""
def default(self, obj):
if isinstance(obj, datetime):
return obj.isoformat() + 'Z'
return super().default(obj)
class UTCJSONResponse(JSONResponse):
"""JSONResponse that uses UTCDateTimeEncoder"""
def render(self, content) -> bytes:
return json.dumps(
content,
ensure_ascii=False,
allow_nan=False,
indent=None,
separators=(",", ":"),
cls=UTCDateTimeEncoder,
).encode("utf-8")
from app.modules.auth import router as auth_router
from app.modules.auth.users_router import router as users_router
from app.modules.auth.middleware import auth_middleware
@@ -26,12 +51,13 @@ settings = get_settings()
# Database tables are managed by Alembic migrations
# Run: alembic upgrade head
# Initialize FastAPI app
# Initialize FastAPI app with custom JSON response for UTC datetime
app = FastAPI(
title="Task Reporter API",
description="Production Line Incident Response System - 生產線異常即時反應系統",
version="1.0.0",
debug=settings.DEBUG,
default_response_class=UTCJSONResponse,
)
# CORS middleware - origins configured via CORS_ORIGINS environment variable

View File

@@ -79,6 +79,7 @@ class IncidentRoom(Base):
# Relationships
members = relationship("RoomMember", back_populates="room", cascade="all, delete-orphan")
files = relationship("RoomFile", back_populates="room", cascade="all, delete-orphan")
reports = relationship("GeneratedReport", back_populates="room", cascade="all, delete-orphan")
# Indexes for common queries
__table_args__ = (

View File

@@ -193,7 +193,7 @@ async def permanent_delete_room(
"type": "system",
"event": "room_deleted",
"room_id": room_id,
"timestamp": datetime.utcnow().isoformat()
"timestamp": datetime.utcnow().isoformat() + "Z"
})
success, error = room_service.permanent_delete_room(db, room_id)
@@ -246,7 +246,7 @@ async def join_room(
detail={
"message": "Already a member of this room",
"current_role": existing.role.value,
"added_at": existing.added_at.isoformat()
"added_at": existing.added_at.isoformat() + "Z"
}
)
@@ -505,12 +505,12 @@ async def add_lot(
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Room not found")
# Get current lots or initialize empty list
current_lots = room.lots or []
current_lots = list(room.lots or []) # Create a new list to ensure change detection
# Prevent duplicates
if request.lot not in current_lots:
current_lots.append(request.lot)
room.lots = current_lots
room.lots = current_lots # Assign new list triggers SQLAlchemy change detection
room.last_updated_at = datetime.utcnow()
db.commit()
db.refresh(room)
@@ -532,11 +532,11 @@ async def remove_lot(
if not room:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Room not found")
current_lots = room.lots or []
current_lots = list(room.lots or []) # Create a new list to ensure change detection
if lot in current_lots:
current_lots.remove(lot)
room.lots = current_lots
room.lots = current_lots # Assign new list triggers SQLAlchemy change detection
room.last_updated_at = datetime.utcnow()
db.commit()
db.refresh(room)

View File

@@ -2,7 +2,7 @@
Request and response models for API endpoints
"""
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, ConfigDict, field_serializer
from typing import Optional, List
from datetime import datetime
from enum import Enum
@@ -98,8 +98,14 @@ class MemberResponse(BaseModel):
added_at: datetime
removed_at: Optional[datetime] = None
class Config:
from_attributes = True
model_config = ConfigDict(from_attributes=True)
@field_serializer("added_at", "removed_at")
def serialize_datetime(self, dt: Optional[datetime]) -> Optional[str]:
"""Serialize datetime with 'Z' suffix to indicate UTC"""
if dt is None:
return None
return dt.isoformat() + "Z"
class RoomResponse(BaseModel):
@@ -127,8 +133,17 @@ class RoomResponse(BaseModel):
is_member: bool = False
is_admin_view: bool = False
class Config:
from_attributes = True
model_config = ConfigDict(from_attributes=True)
@field_serializer(
"created_at", "resolved_at", "archived_at",
"last_activity_at", "last_updated_at", "ownership_transferred_at"
)
def serialize_datetime(self, dt: Optional[datetime]) -> Optional[str]:
"""Serialize datetime with 'Z' suffix to indicate UTC"""
if dt is None:
return None
return dt.isoformat() + "Z"
class RoomListResponse(BaseModel):

View File

@@ -467,7 +467,11 @@ class RoomService:
f"Failed to delete report file: {report.docx_storage_path}"
)
# Step 3: Delete room from database (CASCADE handles related tables)
# Step 3: Delete reports from database (before room delete due to relationship handling)
for report in reports:
db.delete(report)
# Step 4: Delete room from database (CASCADE handles other related tables)
db.delete(room)
db.commit()

View File

@@ -16,6 +16,9 @@ class RoomFile(Base):
# Foreign key to incident room (CASCADE delete when room is permanently deleted)
room_id = Column(String(36), ForeignKey("tr_incident_rooms.room_id", ondelete="CASCADE"), nullable=False)
# Foreign key to associated message (nullable for legacy files)
message_id = Column(String(36), ForeignKey("tr_messages.message_id", ondelete="SET NULL"), nullable=True)
# File metadata
uploader_id = Column(String(255), nullable=False)
filename = Column(String(255), nullable=False)
@@ -33,11 +36,13 @@ class RoomFile(Base):
# Relationships
room = relationship("IncidentRoom", back_populates="files")
message = relationship("Message", backref="file_attachment", uselist=False)
# Indexes
__table_args__ = (
Index("ix_tr_room_files_room_uploaded", "room_id", "uploaded_at"),
Index("ix_tr_room_files_uploader", "uploader_id"),
Index("ix_tr_room_files_message", "message_id"),
)
def __repr__(self):

View File

@@ -19,7 +19,8 @@ from app.modules.file_storage.schemas import FileUploadResponse, FileMetadata, F
from app.modules.file_storage.services.file_service import FileService
from app.modules.file_storage.services import minio_service
from app.modules.realtime.websocket_manager import manager as websocket_manager
from app.modules.realtime.schemas import FileUploadedBroadcast, FileDeletedBroadcast, FileUploadAck
from app.modules.realtime.schemas import FileUploadedBroadcast, FileDeletedBroadcast, FileUploadAck, MessageDeletedBroadcast, MessageBroadcast, MessageTypeEnum
from app.modules.realtime.services.message_service import MessageService
logger = logging.getLogger(__name__)
@@ -58,11 +59,52 @@ async def upload_file(
# Upload file
result = FileService.upload_file(db, room_id, user_email, file, description)
# Fetch the message and display name for broadcasting (before background task)
message_obj = MessageService.get_message(db, result.message_id) if result.message_id else None
display_name = MessageService.get_display_name(db, user_email)
# Prepare message broadcast data (needed before db session closes)
message_data = None
if message_obj:
message_data = {
"message_id": message_obj.message_id,
"room_id": message_obj.room_id,
"sender_id": message_obj.sender_id,
"sender_display_name": display_name,
"content": message_obj.content,
"message_type": message_obj.message_type.value,
"metadata": message_obj.message_metadata,
"created_at": message_obj.created_at,
"sequence_number": message_obj.sequence_number,
}
# Broadcast file upload event to room members via WebSocket
async def broadcast_file_upload():
try:
# First, broadcast the message event so it appears in chat
if message_data:
logger.info(f"Broadcasting message for file upload. message_data: {message_data}")
msg_broadcast = MessageBroadcast(
type="message",
message_id=message_data["message_id"],
room_id=message_data["room_id"],
sender_id=message_data["sender_id"],
sender_display_name=message_data["sender_display_name"],
content=message_data["content"],
message_type=MessageTypeEnum(message_data["message_type"]),
metadata=message_data["metadata"],
created_at=message_data["created_at"],
sequence_number=message_data["sequence_number"],
)
broadcast_dict = msg_broadcast.model_dump(mode='json')
logger.info(f"Message broadcast dict: {broadcast_dict}")
await websocket_manager.broadcast_to_room(room_id, broadcast_dict)
logger.info(f"Broadcasted file message: {message_data['message_id']} to room {room_id}")
# Then broadcast file uploaded event (for file drawer updates)
broadcast = FileUploadedBroadcast(
file_id=result.file_id,
message_id=result.message_id,
room_id=room_id,
uploader_id=user_email,
filename=result.filename,
@@ -70,10 +112,11 @@ async def upload_file(
file_size=result.file_size,
mime_type=result.mime_type,
download_url=result.download_url,
thumbnail_url=result.thumbnail_url,
uploaded_at=result.uploaded_at
)
await websocket_manager.broadcast_to_room(room_id, broadcast.to_dict())
logger.info(f"Broadcasted file upload event: {result.file_id} to room {room_id}")
logger.info(f"Broadcasted file upload event: {result.file_id} (message: {result.message_id}) to room {room_id}")
# Send acknowledgment to uploader
ack = FileUploadAck(
@@ -86,7 +129,7 @@ async def upload_file(
logger.error(f"Failed to broadcast file upload: {e}")
# Run broadcast in background
background_tasks.add_task(asyncio.create_task, broadcast_file_upload())
background_tasks.add_task(broadcast_file_upload)
return result
@@ -149,9 +192,13 @@ async def get_file(
expiry_seconds=3600
)
# For images, the download URL also serves as thumbnail (CSS resized on frontend)
thumbnail_url = download_url if file_record.file_type == "image" else None
# Build response with download URL
return FileMetadata(
file_id=file_record.file_id,
message_id=file_record.message_id,
room_id=file_record.room_id,
filename=file_record.filename,
file_type=file_record.file_type,
@@ -162,7 +209,8 @@ async def get_file(
uploaded_at=file_record.uploaded_at,
uploader_id=file_record.uploader_id,
deleted_at=file_record.deleted_at,
download_url=download_url
download_url=download_url,
thumbnail_url=thumbnail_url
)
@@ -204,25 +252,38 @@ async def delete_file(
# Check if admin
is_admin = membership_service.is_system_admin(user_email)
# Delete file (service will verify permissions)
deleted_file = FileService.delete_file(db, file_id, user_email, is_room_owner or is_admin)
# Delete file (service will verify permissions and cascade to message)
deleted_file, deleted_message_id = FileService.delete_file(db, file_id, user_email, is_room_owner or is_admin)
# Broadcast file deletion event to room members via WebSocket
# Broadcast file and message deletion events to room members via WebSocket
if deleted_file:
async def broadcast_file_delete():
try:
broadcast = FileDeletedBroadcast(
# Broadcast file deleted event
file_broadcast = FileDeletedBroadcast(
file_id=file_id,
message_id=deleted_message_id,
room_id=room_id,
deleted_by=user_email,
deleted_at=deleted_file.deleted_at
)
await websocket_manager.broadcast_to_room(room_id, broadcast.to_dict())
await websocket_manager.broadcast_to_room(room_id, file_broadcast.to_dict())
logger.info(f"Broadcasted file deletion event: {file_id} from room {room_id}")
# Also broadcast message deleted event if there was an associated message
if deleted_message_id:
msg_broadcast = MessageDeletedBroadcast(
message_id=deleted_message_id,
room_id=room_id,
deleted_by=user_email,
deleted_at=deleted_file.deleted_at
)
await websocket_manager.broadcast_to_room(room_id, msg_broadcast.to_dict())
logger.info(f"Broadcasted message deletion event: {deleted_message_id} from room {room_id}")
except Exception as e:
logger.error(f"Failed to broadcast file deletion: {e}")
logger.error(f"Failed to broadcast file/message deletion: {e}")
# Run broadcast in background
background_tasks.add_task(asyncio.create_task, broadcast_file_delete())
background_tasks.add_task(broadcast_file_delete)
return None

View File

@@ -1,5 +1,5 @@
"""Pydantic schemas for file storage operations"""
from pydantic import BaseModel, Field, field_validator
from pydantic import BaseModel, Field, field_validator, field_serializer, ConfigDict
from typing import Optional, List
from datetime import datetime
from enum import Enum
@@ -15,21 +15,28 @@ class FileType(str, Enum):
class FileUploadResponse(BaseModel):
"""Response after successful file upload"""
file_id: str
message_id: Optional[str] = None # Associated chat message ID
filename: str
file_type: FileType
file_size: int
mime_type: str
download_url: str # Presigned URL
thumbnail_url: Optional[str] = None # Thumbnail URL for images
uploaded_at: datetime
uploader_id: str
class Config:
from_attributes = True
model_config = ConfigDict(from_attributes=True)
@field_serializer("uploaded_at")
def serialize_datetime(self, dt: datetime) -> str:
"""Serialize datetime with 'Z' suffix to indicate UTC"""
return dt.isoformat() + "Z"
class FileMetadata(BaseModel):
"""File metadata response"""
file_id: str
message_id: Optional[str] = None # Associated chat message ID
room_id: str
filename: str
file_type: FileType
@@ -41,9 +48,9 @@ class FileMetadata(BaseModel):
uploader_id: str
deleted_at: Optional[datetime] = None
download_url: Optional[str] = None # Presigned URL (only when requested)
thumbnail_url: Optional[str] = None # Thumbnail URL for images
class Config:
from_attributes = True
model_config = ConfigDict(from_attributes=True)
@field_validator("file_size")
@classmethod
@@ -53,6 +60,13 @@ class FileMetadata(BaseModel):
raise ValueError("File size must be positive")
return v
@field_serializer("uploaded_at", "deleted_at")
def serialize_datetime(self, dt: Optional[datetime]) -> Optional[str]:
"""Serialize datetime with 'Z' suffix to indicate UTC"""
if dt is None:
return None
return dt.isoformat() + "Z"
class FileListResponse(BaseModel):
"""Paginated file list response"""
@@ -62,13 +76,11 @@ class FileListResponse(BaseModel):
offset: int
has_more: bool
class Config:
from_attributes = True
model_config = ConfigDict(from_attributes=True)
class FileUploadParams(BaseModel):
"""Parameters for file upload (optional description)"""
description: Optional[str] = Field(None, max_length=500)
class Config:
from_attributes = True
model_config = ConfigDict(from_attributes=True)

View File

@@ -69,11 +69,38 @@ class FileService:
detail="File storage service temporarily unavailable"
)
# Create database record
# Create database record and associated message
try:
# Generate presigned download URL
download_url = minio_service.generate_presigned_url(
bucket=settings.MINIO_BUCKET,
object_path=object_path,
expiry_seconds=3600
)
# For images, the download URL also serves as thumbnail (CSS resized on frontend)
thumbnail_url = download_url if file_type == "image" else None
# Create the associated chat message first
message = FileService.create_file_reference_message(
db=db,
room_id=room_id,
sender_id=uploader_id,
file_id=file_id,
filename=file.filename,
file_type=file_type,
mime_type=mime_type,
file_size=file_size,
file_url=download_url,
thumbnail_url=thumbnail_url,
description=description
)
# Create file record with message_id reference
room_file = RoomFile(
file_id=file_id,
room_id=room_id,
message_id=message.message_id,
uploader_id=uploader_id,
filename=file.filename,
file_type=file_type,
@@ -88,20 +115,15 @@ class FileService:
db.commit()
db.refresh(room_file)
# Generate presigned download URL
download_url = minio_service.generate_presigned_url(
bucket=settings.MINIO_BUCKET,
object_path=object_path,
expiry_seconds=3600
)
return FileUploadResponse(
file_id=file_id,
message_id=message.message_id,
filename=file.filename,
file_type=file_type,
file_size=file_size,
mime_type=mime_type,
download_url=download_url,
thumbnail_url=thumbnail_url,
uploaded_at=room_file.uploaded_at,
uploader_id=uploader_id
)
@@ -160,12 +182,17 @@ class FileService:
file_id: str,
user_id: str,
is_room_owner: bool = False
) -> Optional[RoomFile]:
"""Soft delete file"""
) -> tuple[Optional[RoomFile], Optional[str]]:
"""
Soft delete file and its associated message.
Returns:
Tuple of (deleted_file, deleted_message_id) or (None, None) if not found
"""
file = db.query(RoomFile).filter(RoomFile.file_id == file_id).first()
if not file:
return None
return None, None
# Check permissions
if not is_room_owner and file.uploader_id != user_id:
@@ -174,12 +201,21 @@ class FileService:
detail="Only file uploader or room owner can delete files"
)
# Soft delete
deleted_message_id = None
# Soft delete the associated message if it exists
if file.message_id:
message = db.query(Message).filter(Message.message_id == file.message_id).first()
if message and message.deleted_at is None:
message.deleted_at = datetime.utcnow()
deleted_message_id = message.message_id
# Soft delete the file
file.deleted_at = datetime.utcnow()
db.commit()
db.refresh(file)
return file
return file, deleted_message_id
@staticmethod
def check_room_membership(db: Session, room_id: str, user_id: str) -> Optional[RoomMember]:
@@ -205,7 +241,10 @@ class FileService:
file_id: str,
filename: str,
file_type: str,
mime_type: str,
file_size: int,
file_url: str,
thumbnail_url: Optional[str] = None,
description: Optional[str] = None
) -> Message:
"""
@@ -218,7 +257,10 @@ class FileService:
file_id: File ID in room_files table
filename: Original filename
file_type: Type of file (image, document, log)
mime_type: MIME type of the file
file_size: File size in bytes
file_url: Presigned download URL
thumbnail_url: Presigned thumbnail URL for images
description: Optional description for the file
Returns:
@@ -237,9 +279,15 @@ class FileService:
"file_id": file_id,
"file_url": file_url,
"filename": filename,
"file_type": file_type
"file_type": file_type,
"mime_type": mime_type,
"file_size": file_size
}
# Add thumbnail URL for images
if thumbnail_url:
metadata["thumbnail_url"] = thumbnail_url
# Use MessageService to create the message
return MessageService.create_message(
db=db,

View File

@@ -1,7 +1,8 @@
"""File validation utilities"""
import magic
import os
from fastapi import UploadFile, HTTPException
from typing import Set
from typing import Set, Dict
import logging
from app.core.config import get_settings
@@ -17,7 +18,15 @@ IMAGE_TYPES: Set[str] = {
}
DOCUMENT_TYPES: Set[str] = {
"application/pdf"
"application/pdf",
"application/x-pdf", # Some systems detect PDF as x-pdf
}
# Extensions that can be accepted even if MIME detection fails
EXTENSION_FALLBACK: Dict[str, str] = {
".pdf": "application/pdf",
".doc": "application/msword",
".docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
}
LOG_TYPES: Set[str] = {
@@ -67,6 +76,17 @@ def validate_file_type(file: UploadFile, allowed_types: Set[str]) -> str:
detected_mime = detect_mime_type(header)
if detected_mime not in allowed_types:
# Try extension fallback for known safe file types
filename = file.filename or ""
_, ext = os.path.splitext(filename.lower())
if ext in EXTENSION_FALLBACK:
logger.info(
f"MIME detection returned {detected_mime} for {filename}, "
f"using extension fallback: {EXTENSION_FALLBACK[ext]}"
)
return EXTENSION_FALLBACK[ext]
raise HTTPException(
status_code=400,
detail=f"File type not allowed: {detected_mime}. Allowed types: {', '.join(allowed_types)}"
@@ -115,9 +135,12 @@ def get_file_type_and_limits(mime_type: str) -> tuple[str, int]:
Raises:
HTTPException if MIME type not recognized
"""
# Include extension fallback types as documents
document_types_extended = DOCUMENT_TYPES | set(EXTENSION_FALLBACK.values())
if mime_type in IMAGE_TYPES:
return ("image", settings.get_image_max_size_bytes())
elif mime_type in DOCUMENT_TYPES:
elif mime_type in document_types_extended:
return ("document", settings.get_document_max_size_bytes())
elif mime_type in LOG_TYPES:
return ("log", settings.get_log_max_size_bytes())

View File

@@ -1,10 +1,17 @@
"""Pydantic schemas for WebSocket messages and REST API"""
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, ConfigDict, field_serializer
from typing import Optional, Dict, Any, List
from datetime import datetime
from enum import Enum
def serialize_datetime_utc(dt: datetime) -> str:
"""Serialize datetime with 'Z' suffix to indicate UTC"""
if dt is None:
return None
return dt.isoformat() + "Z"
class MessageTypeEnum(str, Enum):
"""Message type enumeration for validation"""
TEXT = "text"
@@ -89,6 +96,13 @@ class MessageBroadcast(BaseModel):
deleted_at: Optional[datetime] = None
sequence_number: int
@field_serializer("created_at", "edited_at", "deleted_at")
def serialize_datetime(self, dt: Optional[datetime]) -> Optional[str]:
"""Serialize datetime with 'Z' suffix to indicate UTC"""
if dt is None:
return None
return dt.isoformat() + "Z"
class SystemMessageBroadcast(BaseModel):
"""System message broadcast"""
@@ -99,6 +113,11 @@ class SystemMessageBroadcast(BaseModel):
timestamp: datetime
data: Optional[Dict[str, Any]] = None
@field_serializer("timestamp")
def serialize_datetime(self, dt: datetime) -> str:
"""Serialize datetime with 'Z' suffix to indicate UTC"""
return dt.isoformat() + "Z"
class TypingBroadcast(BaseModel):
"""Typing indicator broadcast"""
@@ -115,6 +134,11 @@ class MessageAck(BaseModel):
sequence_number: int
timestamp: datetime
@field_serializer("timestamp")
def serialize_datetime(self, dt: datetime) -> str:
"""Serialize datetime with 'Z' suffix to indicate UTC"""
return dt.isoformat() + "Z"
class ErrorMessage(BaseModel):
"""Error message"""
@@ -145,16 +169,25 @@ class MessageResponse(BaseModel):
sender_display_name: Optional[str] = None # Display name from users table
content: str
message_type: MessageTypeEnum
metadata: Optional[Dict[str, Any]] = Field(None, alias="message_metadata")
# Use validation_alias to read from ORM's message_metadata, but serialize as "metadata"
metadata: Optional[Dict[str, Any]] = Field(None, validation_alias="message_metadata")
created_at: datetime
edited_at: Optional[datetime] = None
deleted_at: Optional[datetime] = None
sequence_number: int
reaction_counts: Optional[Dict[str, int]] = None # emoji -> count
class Config:
from_attributes = True
populate_by_name = True # Allow both 'metadata' and 'message_metadata'
model_config = ConfigDict(
from_attributes=True,
populate_by_name=True,
)
@field_serializer("created_at", "edited_at", "deleted_at")
def serialize_datetime(self, dt: Optional[datetime]) -> Optional[str]:
"""Serialize datetime with 'Z' suffix to indicate UTC"""
if dt is None:
return None
return dt.isoformat() + "Z"
class MessageListResponse(BaseModel):
@@ -179,8 +212,12 @@ class ReactionResponse(BaseModel):
emoji: str
created_at: datetime
class Config:
from_attributes = True
model_config = ConfigDict(from_attributes=True)
@field_serializer("created_at")
def serialize_datetime(self, dt: datetime) -> str:
"""Serialize datetime with 'Z' suffix to indicate UTC"""
return dt.isoformat() + "Z"
class ReactionSummary(BaseModel):
@@ -195,12 +232,18 @@ class OnlineUser(BaseModel):
user_id: str
connected_at: datetime
@field_serializer("connected_at")
def serialize_datetime(self, dt: datetime) -> str:
"""Serialize datetime with 'Z' suffix to indicate UTC"""
return dt.isoformat() + "Z"
# File Upload WebSocket Schemas
class FileUploadedBroadcast(BaseModel):
"""Broadcast when a file is uploaded to a room"""
type: str = "file_uploaded"
file_id: str
message_id: Optional[str] = None # Associated chat message ID
room_id: str
uploader_id: str
filename: str
@@ -208,6 +251,7 @@ class FileUploadedBroadcast(BaseModel):
file_size: int
mime_type: str
download_url: Optional[str] = None
thumbnail_url: Optional[str] = None # Thumbnail URL for images
uploaded_at: datetime
def to_dict(self) -> dict:
@@ -215,6 +259,7 @@ class FileUploadedBroadcast(BaseModel):
return {
"type": self.type,
"file_id": self.file_id,
"message_id": self.message_id,
"room_id": self.room_id,
"uploader_id": self.uploader_id,
"filename": self.filename,
@@ -222,7 +267,8 @@ class FileUploadedBroadcast(BaseModel):
"file_size": self.file_size,
"mime_type": self.mime_type,
"download_url": self.download_url,
"uploaded_at": self.uploaded_at.isoformat()
"thumbnail_url": self.thumbnail_url,
"uploaded_at": self.uploaded_at.isoformat() + "Z"
}
@@ -249,6 +295,7 @@ class FileDeletedBroadcast(BaseModel):
"""Broadcast when a file is deleted from a room"""
type: str = "file_deleted"
file_id: str
message_id: Optional[str] = None # Associated chat message ID (also deleted)
room_id: str
deleted_by: str
deleted_at: datetime
@@ -258,7 +305,27 @@ class FileDeletedBroadcast(BaseModel):
return {
"type": self.type,
"file_id": self.file_id,
"message_id": self.message_id,
"room_id": self.room_id,
"deleted_by": self.deleted_by,
"deleted_at": self.deleted_at.isoformat()
"deleted_at": self.deleted_at.isoformat() + "Z"
}
class MessageDeletedBroadcast(BaseModel):
"""Broadcast when a message is deleted"""
type: str = "message_deleted"
message_id: str
room_id: str
deleted_by: str
deleted_at: datetime
def to_dict(self) -> dict:
"""Convert to dictionary for WebSocket broadcast"""
return {
"type": self.type,
"message_id": self.message_id,
"room_id": self.room_id,
"deleted_by": self.deleted_by,
"deleted_at": self.deleted_at.isoformat() + "Z"
}

View File

@@ -14,7 +14,8 @@ settings = get_settings()
def json_serializer(obj: Any) -> str:
"""Custom JSON serializer for objects not serializable by default json code"""
if isinstance(obj, datetime):
return obj.isoformat()
# Append 'Z' to indicate UTC so JavaScript parses it correctly
return obj.isoformat() + 'Z'
raise TypeError(f"Object of type {type(obj).__name__} is not JSON serializable")

View File

@@ -90,7 +90,7 @@ class GeneratedReport(Base):
)
# Relationship
room = relationship("IncidentRoom", backref="reports")
room = relationship("IncidentRoom", back_populates="reports")
# Indexes
__table_args__ = (

View File

@@ -4,7 +4,18 @@ Contains the prompt construction logic for building the user query
sent to DIFY Chat API.
"""
from typing import List, Dict, Any
from datetime import datetime
from datetime import datetime, timezone, timedelta
# Taiwan timezone (GMT+8)
TZ_GMT8 = timezone(timedelta(hours=8))
def _to_gmt8(dt: datetime) -> datetime:
"""Convert datetime to GMT+8 timezone"""
if dt.tzinfo is None:
# Assume UTC if no timezone
dt = dt.replace(tzinfo=timezone.utc)
return dt.astimezone(TZ_GMT8)
INCIDENT_TYPE_MAP = {
@@ -81,11 +92,11 @@ def _format_room_info(room_data: Dict[str, Any]) -> str:
created_at = room_data.get("created_at")
if isinstance(created_at, datetime):
created_at = created_at.strftime("%Y-%m-%d %H:%M")
created_at = _to_gmt8(created_at).strftime("%Y-%m-%d %H:%M")
resolved_at = room_data.get("resolved_at")
if isinstance(resolved_at, datetime):
resolved_at = resolved_at.strftime("%Y-%m-%d %H:%M")
resolved_at = _to_gmt8(resolved_at).strftime("%Y-%m-%d %H:%M")
elif resolved_at is None:
resolved_at = "尚未解決"
@@ -145,7 +156,7 @@ def _format_messages(messages: List[Dict[str, Any]]) -> str:
created_at = msg.get("created_at")
if isinstance(created_at, datetime):
time_str = created_at.strftime("%Y-%m-%d %H:%M")
time_str = _to_gmt8(created_at).strftime("%Y-%m-%d %H:%M")
else:
time_str = str(created_at) if created_at else "未知時間"
@@ -164,26 +175,58 @@ def _format_messages(messages: List[Dict[str, Any]]) -> str:
def _format_files(files: List[Dict[str, Any]]) -> str:
"""Format file attachments section"""
"""Format file attachments section with context
Each file now includes:
- caption: User-provided description when uploading
- context_before: The message sent before this file
- context_after: The message sent after this file
This helps AI understand the context of each attachment.
"""
lines = ["## 附件清單"]
lines.append("每個附件包含上傳時的說明文字以及上下文訊息,幫助理解該附件的用途。")
lines.append("")
if not files:
lines.append("無附件")
return "\n".join(lines)
for f in files:
for i, f in enumerate(files, 1):
filename = f.get("filename", "未命名檔案")
file_type = f.get("file_type", "file")
uploader = f.get("uploader_name") or f.get("uploaded_by", "未知")
caption = f.get("caption") # User-provided description
context_before = f.get("context_before")
context_after = f.get("context_after")
uploaded_at = f.get("uploaded_at")
if isinstance(uploaded_at, datetime):
time_str = uploaded_at.strftime("%Y-%m-%d %H:%M")
time_str = _to_gmt8(uploaded_at).strftime("%Y-%m-%d %H:%M")
else:
time_str = str(uploaded_at) if uploaded_at else ""
type_label = "圖片" if file_type == "image" else "檔案"
lines.append(f"- [{type_label}] {filename} (由 {uploader}{time_str} 上傳)")
# Basic file info
lines.append(f"### 附件 {i}: {filename}")
lines.append(f"- 類型: {type_label}")
lines.append(f"- 上傳者: {uploader}")
lines.append(f"- 上傳時間: {time_str}")
# Caption/description if provided
if caption:
lines.append(f"- 說明: {caption}")
# Context messages to help AI understand when/why file was uploaded
if context_before or context_after:
lines.append("- 上下文:")
if context_before:
lines.append(f" - 前一則訊息: [{context_before['sender']}]: {context_before['content']}")
if context_after:
lines.append(f" - 後一則訊息: [{context_after['sender']}]: {context_after['content']}")
lines.append("") # Blank line between files
return "\n".join(lines)

View File

@@ -2,7 +2,7 @@
Request and response models for the report generation endpoints.
"""
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, ConfigDict, field_serializer
from typing import Optional, List
from datetime import datetime
from enum import Enum
@@ -45,8 +45,12 @@ class ReportStatusResponse(BaseModel):
prompt_tokens: Optional[int] = None
completion_tokens: Optional[int] = None
class Config:
from_attributes = True
model_config = ConfigDict(from_attributes=True)
@field_serializer("generated_at")
def serialize_datetime(self, dt: datetime) -> str:
"""Serialize datetime with 'Z' suffix to indicate UTC"""
return dt.isoformat() + "Z"
class ReportListItem(BaseModel):
@@ -57,8 +61,12 @@ class ReportListItem(BaseModel):
status: ReportStatus
report_title: Optional[str] = None
class Config:
from_attributes = True
model_config = ConfigDict(from_attributes=True)
@field_serializer("generated_at")
def serialize_datetime(self, dt: datetime) -> str:
"""Serialize datetime with 'Z' suffix to indicate UTC"""
return dt.isoformat() + "Z"
class ReportListResponse(BaseModel):

View File

@@ -9,8 +9,21 @@ Creates .docx reports using python-docx with:
import io
import logging
from typing import Dict, Any, List, Optional
from datetime import datetime
from datetime import datetime, timezone, timedelta
from docx import Document
# Taiwan timezone (GMT+8)
TZ_GMT8 = timezone(timedelta(hours=8))
def _to_gmt8(dt: datetime) -> datetime:
"""Convert datetime to GMT+8 timezone"""
if dt.tzinfo is None:
# Assume UTC if no timezone
dt = dt.replace(tzinfo=timezone.utc)
return dt.astimezone(TZ_GMT8)
from docx.shared import Inches, Pt, RGBColor
from docx.enum.text import WD_ALIGN_PARAGRAPH
from docx.enum.style import WD_STYLE_TYPE
@@ -128,11 +141,11 @@ class DocxAssemblyService:
run.font.size = TITLE_SIZE
run.font.bold = True
# Add generation timestamp
timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M")
# Add generation timestamp in GMT+8
timestamp = datetime.now(TZ_GMT8).strftime("%Y-%m-%d %H:%M")
subtitle = doc.add_paragraph()
subtitle.alignment = WD_ALIGN_PARAGRAPH.CENTER
run = subtitle.add_run(f"報告產生時間:{timestamp}")
run = subtitle.add_run(f"報告產生時間:{timestamp} (GMT+8)")
run.font.size = Pt(10)
run.font.color.rgb = RGBColor(128, 128, 128)
@@ -160,19 +173,19 @@ class DocxAssemblyService:
cells[2].text = "發生地點"
cells[3].text = room_data.get("location") or "未指定"
# Row 3: Created and Resolved times
# Row 3: Created and Resolved times (in GMT+8)
cells = table.rows[2].cells
cells[0].text = "建立時間"
created_at = room_data.get("created_at")
if isinstance(created_at, datetime):
cells[1].text = created_at.strftime("%Y-%m-%d %H:%M")
cells[1].text = _to_gmt8(created_at).strftime("%Y-%m-%d %H:%M")
else:
cells[1].text = str(created_at) if created_at else "未知"
cells[2].text = "解決時間"
resolved_at = room_data.get("resolved_at")
if isinstance(resolved_at, datetime):
cells[3].text = resolved_at.strftime("%Y-%m-%d %H:%M")
cells[3].text = _to_gmt8(resolved_at).strftime("%Y-%m-%d %H:%M")
elif resolved_at:
cells[3].text = str(resolved_at)
else:
@@ -327,13 +340,24 @@ class DocxAssemblyService:
# Add image to document
doc.add_picture(image_data, width=Inches(5))
# Add caption
# Add caption (user-provided description or filename)
user_caption = f.get("caption") # User-provided description
filename = f.get("filename", "圖片")
caption = doc.add_paragraph()
caption.alignment = WD_ALIGN_PARAGRAPH.CENTER
run = caption.add_run(f"{f.get('filename', '圖片')}")
# Show filename first
run = caption.add_run(filename)
run.font.size = Pt(9)
run.font.italic = True
# Add user caption if provided
if user_caption:
caption.add_run("\n")
desc_run = caption.add_run(user_caption)
desc_run.font.size = Pt(10)
doc.add_paragraph() # Spacing
else:
# Image download failed, add note
@@ -344,7 +368,7 @@ class DocxAssemblyService:
doc.add_paragraph(f"[圖片嵌入失敗: {f.get('filename', '未知')}]")
def _add_file_list_section(self, doc: Document, files: List[Dict[str, Any]]):
"""Add file attachment list section"""
"""Add file attachment list section with captions"""
doc.add_heading("附件清單", level=1)
if not files:
@@ -352,7 +376,7 @@ class DocxAssemblyService:
return
# Create file list table
table = doc.add_table(rows=len(files) + 1, cols=4)
table = doc.add_table(rows=len(files) + 1, cols=5)
table.style = "Table Grid"
# Header row
@@ -361,6 +385,7 @@ class DocxAssemblyService:
header[1].text = "類型"
header[2].text = "上傳者"
header[3].text = "上傳時間"
header[4].text = "說明"
for cell in header:
for run in cell.paragraphs[0].runs:
run.font.bold = True
@@ -382,10 +407,13 @@ class DocxAssemblyService:
uploaded_at = f.get("uploaded_at")
if isinstance(uploaded_at, datetime):
row[3].text = uploaded_at.strftime("%Y-%m-%d %H:%M")
row[3].text = _to_gmt8(uploaded_at).strftime("%Y-%m-%d %H:%M")
else:
row[3].text = str(uploaded_at) if uploaded_at else ""
# Caption/description column
row[4].text = f.get("caption", "") or ""
def _download_file(self, object_path: str) -> Optional[io.BytesIO]:
"""Download file from MinIO
@@ -431,9 +459,9 @@ class DocxAssemblyService:
lines.append(f"# 事件報告:{title}")
lines.append("")
# Generation timestamp
timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M")
lines.append(f"*報告產生時間:{timestamp}*")
# Generation timestamp in GMT+8
timestamp = datetime.now(TZ_GMT8).strftime("%Y-%m-%d %H:%M")
lines.append(f"*報告產生時間:{timestamp} (GMT+8)*")
lines.append("")
# Metadata section
@@ -455,13 +483,13 @@ class DocxAssemblyService:
created_at = room_data.get("created_at")
if isinstance(created_at, datetime):
lines.append(f"| 建立時間 | {created_at.strftime('%Y-%m-%d %H:%M')} |")
lines.append(f"| 建立時間 | {_to_gmt8(created_at).strftime('%Y-%m-%d %H:%M')} |")
else:
lines.append(f"| 建立時間 | {str(created_at) if created_at else '未知'} |")
resolved_at = room_data.get("resolved_at")
if isinstance(resolved_at, datetime):
lines.append(f"| 解決時間 | {resolved_at.strftime('%Y-%m-%d %H:%M')} |")
lines.append(f"| 解決時間 | {_to_gmt8(resolved_at).strftime('%Y-%m-%d %H:%M')} |")
elif resolved_at:
lines.append(f"| 解決時間 | {str(resolved_at)} |")
else:
@@ -561,8 +589,8 @@ class DocxAssemblyService:
if files:
lines.append("## 附件清單")
lines.append("")
lines.append("| 檔案名稱 | 類型 | 上傳者 | 上傳時間 |")
lines.append("|----------|------|--------|----------|")
lines.append("| 檔案名稱 | 類型 | 上傳者 | 上傳時間 | 說明 |")
lines.append("|----------|------|--------|----------|------|")
file_type_map = {
"image": "圖片",
@@ -577,10 +605,13 @@ class DocxAssemblyService:
uploader = f.get("uploader_name") or f.get("uploader_id", "")
uploaded_at = f.get("uploaded_at")
if isinstance(uploaded_at, datetime):
uploaded_text = uploaded_at.strftime("%Y-%m-%d %H:%M")
uploaded_text = _to_gmt8(uploaded_at).strftime("%Y-%m-%d %H:%M")
else:
uploaded_text = str(uploaded_at) if uploaded_at else ""
lines.append(f"| {filename} | {type_text} | {uploader} | {uploaded_text} |")
caption = f.get("caption", "") or ""
# Escape pipe characters in caption
caption = caption.replace("|", "\\|")
lines.append(f"| {filename} | {type_text} | {uploader} | {uploaded_text} | {caption} |")
lines.append("")
return "\n".join(lines)

View File

@@ -13,7 +13,7 @@ from sqlalchemy.orm import Session
from sqlalchemy import desc
from app.modules.chat_room.models import IncidentRoom, RoomMember
from app.modules.realtime.models import Message
from app.modules.realtime.models import Message, MessageType
from app.modules.file_storage.models import RoomFile
from app.modules.auth.models import User
@@ -38,9 +38,17 @@ class MemberData:
role: str
@dataclass
class FileContextMessage:
"""Context message near a file upload"""
sender_name: str
content: str
created_at: datetime
@dataclass
class FileData:
"""File data for report generation"""
"""File data for report generation with context"""
file_id: str
filename: str
file_type: str
@@ -49,6 +57,10 @@ class FileData:
uploader_id: str
uploader_name: str
minio_object_path: str
# File context - the description/caption and surrounding messages
message_content: Optional[str] = None # Caption/description from file upload message
context_before: Optional[FileContextMessage] = None # Message before file
context_after: Optional[FileContextMessage] = None # Message after file
@dataclass
@@ -173,7 +185,7 @@ class ReportDataService:
return members
def _collect_files(self, room_id: str) -> List[FileData]:
"""Collect room files with uploader display names"""
"""Collect room files with uploader display names and message context"""
results = (
self.db.query(RoomFile, User.display_name)
.outerjoin(User, RoomFile.uploader_id == User.user_id)
@@ -185,6 +197,31 @@ class ReportDataService:
files = []
for f, display_name in results:
# Get file message content (caption/description)
message_content = None
context_before = None
context_after = None
if f.message_id:
# Get the file's message to extract caption
file_message = self.db.query(Message).filter(
Message.message_id == f.message_id
).first()
if file_message:
# Extract caption (content that's not default [Image] or [File] prefix)
content = file_message.content
if not content.startswith("[Image]") and not content.startswith("[File]"):
message_content = content
# Get context: 1 message before and 1 after the file message
context_before = self._get_context_message(
room_id, file_message.sequence_number, before=True
)
context_after = self._get_context_message(
room_id, file_message.sequence_number, before=False
)
files.append(FileData(
file_id=f.file_id,
filename=f.filename,
@@ -192,12 +229,45 @@ class ReportDataService:
mime_type=f.mime_type,
uploaded_at=f.uploaded_at,
uploader_id=f.uploader_id,
uploader_name=display_name or f.uploader_id, # Fallback to uploader_id
uploader_name=display_name or f.uploader_id,
minio_object_path=f.minio_object_path,
message_content=message_content,
context_before=context_before,
context_after=context_after,
))
return files
def _get_context_message(
self, room_id: str, sequence_number: int, before: bool = True
) -> Optional[FileContextMessage]:
"""Get a context message before or after a given sequence number"""
query = (
self.db.query(Message, User.display_name)
.outerjoin(User, Message.sender_id == User.user_id)
.filter(Message.room_id == room_id)
.filter(Message.deleted_at.is_(None))
.filter(Message.message_type.in_([MessageType.TEXT, MessageType.SYSTEM])) # Only text context
)
if before:
query = query.filter(Message.sequence_number < sequence_number)
query = query.order_by(desc(Message.sequence_number))
else:
query = query.filter(Message.sequence_number > sequence_number)
query = query.order_by(Message.sequence_number)
result = query.first()
if result:
msg, display_name = result
return FileContextMessage(
sender_name=display_name or msg.sender_id,
content=msg.content,
created_at=msg.created_at,
)
return None
def to_prompt_dict(self, data: RoomReportData) -> Dict[str, Any]:
"""Convert RoomReportData to dictionary format for prompt builder
@@ -244,8 +314,9 @@ class ReportDataService:
for m in data.members
]
files = [
{
files = []
for f in data.files:
file_dict = {
"file_id": f.file_id,
"filename": f.filename,
"file_type": f.file_type,
@@ -254,9 +325,20 @@ class ReportDataService:
"uploader_id": f.uploader_id,
"uploader_name": f.uploader_name,
"minio_object_path": f.minio_object_path,
"caption": f.message_content, # User-provided caption/description
}
for f in data.files
]
# Add context if available
if f.context_before:
file_dict["context_before"] = {
"sender": f.context_before.sender_name,
"content": f.context_before.content,
}
if f.context_after:
file_dict["context_after"] = {
"sender": f.context_after.sender_name,
"content": f.context_after.content,
}
files.append(file_dict)
return {
"room_data": room_data,