Files
OCR/backend/app/models/translation_log.py
egg ee49751c38 fix: add UTC timezone indicator to all datetime serialization
Database stores times in UTC but serialized without timezone info,
causing frontend to misinterpret as local time. Now all datetime
fields include 'Z' suffix to indicate UTC, enabling proper timezone
conversion in the browser.

- Add UTCDatetimeBaseModel base class for Pydantic schemas
- Update model to_dict() methods to append 'Z' suffix
- Affects: tasks, users, sessions, audit logs, translations

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2025-12-14 15:48:17 +08:00

91 lines
3.8 KiB
Python

"""
Tool_OCR - Translation Log Model
Tracks translation usage statistics for billing and monitoring
"""
from sqlalchemy import Column, Integer, String, DateTime, Float, ForeignKey
from sqlalchemy.orm import relationship
from datetime import datetime
from app.core.database import Base
class TranslationLog(Base):
"""
Translation log model for tracking API usage and costs.
Each record represents a single translation job completion,
storing token usage and estimated costs for billing purposes.
"""
__tablename__ = "tool_ocr_translation_logs"
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
user_id = Column(Integer, ForeignKey("tool_ocr_users.id", ondelete="CASCADE"),
nullable=False, index=True,
comment="Foreign key to users table")
task_id = Column(String(255), nullable=False, index=True,
comment="Task UUID that was translated")
target_lang = Column(String(10), nullable=False, index=True,
comment="Target language code (e.g., 'en', 'ja', 'zh-TW')")
source_lang = Column(String(10), nullable=True,
comment="Source language code (or 'auto')")
# Token usage statistics
input_tokens = Column(Integer, default=0, nullable=False,
comment="Number of input tokens used")
output_tokens = Column(Integer, default=0, nullable=False,
comment="Number of output tokens generated")
total_tokens = Column(Integer, default=0, nullable=False,
comment="Total tokens (input + output)")
# Translation statistics
total_elements = Column(Integer, default=0, nullable=False,
comment="Total elements in document")
translated_elements = Column(Integer, default=0, nullable=False,
comment="Number of elements translated")
total_characters = Column(Integer, default=0, nullable=False,
comment="Total characters translated")
# Cost tracking (estimated based on token pricing)
estimated_cost = Column(Float, default=0.0, nullable=False,
comment="Estimated cost in USD")
# Processing info
processing_time_seconds = Column(Float, default=0.0, nullable=False,
comment="Translation processing time")
provider = Column(String(50), default="dify", nullable=False,
comment="Translation provider (e.g., 'dify')")
# Timestamps
created_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True)
# Relationships
user = relationship("User", back_populates="translation_logs")
def __repr__(self):
return f"<TranslationLog(id={self.id}, task_id='{self.task_id}', target_lang='{self.target_lang}', tokens={self.total_tokens})>"
def to_dict(self):
"""Convert translation log to dictionary.
All datetime fields are serialized with 'Z' suffix to indicate UTC timezone.
"""
return {
"id": self.id,
"user_id": self.user_id,
"task_id": self.task_id,
"target_lang": self.target_lang,
"source_lang": self.source_lang,
"input_tokens": self.input_tokens,
"output_tokens": self.output_tokens,
"total_tokens": self.total_tokens,
"total_elements": self.total_elements,
"translated_elements": self.translated_elements,
"total_characters": self.total_characters,
"estimated_cost": self.estimated_cost,
"processing_time_seconds": self.processing_time_seconds,
"provider": self.provider,
"created_at": self.created_at.isoformat() + 'Z' if self.created_at else None
}