feat: add document translation via DIFY AI API
Implement document translation feature using DIFY AI API with batch processing: Backend: - Add DIFY client with batch translation support (5000 chars, 20 items per batch) - Add translation service with element extraction and result building - Add translation router with start/status/result/list/delete endpoints - Add translation schemas (TranslationRequest, TranslationStatus, etc.) Frontend: - Enable translation UI in TaskDetailPage - Add translation API methods to apiV2.ts - Add translation types Features: - Batch translation with numbered markers [1], [2], [3]... - Support for text, title, header, footer, paragraph, footnote, table cells - Translation result JSON with statistics (tokens, latency, batch_count) - Background task processing with progress tracking 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
332
backend/app/services/dify_client.py
Normal file
332
backend/app/services/dify_client.py
Normal file
@@ -0,0 +1,332 @@
|
||||
"""
|
||||
Tool_OCR - DIFY AI Client
|
||||
HTTP client for DIFY translation API with batch support
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import httpx
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# DIFY API Configuration
|
||||
DIFY_BASE_URL = "https://dify.theaken.com/v1"
|
||||
DIFY_API_KEY = "app-YOPrF2ro5fshzMkCZviIuUJd"
|
||||
DIFY_TIMEOUT = 120.0 # seconds (increased for batch)
|
||||
DIFY_MAX_RETRIES = 3
|
||||
|
||||
# Batch translation limits
|
||||
# Conservative limits to avoid gateway timeouts
|
||||
# DIFY server may have processing time limits
|
||||
MAX_BATCH_CHARS = 5000
|
||||
MAX_BATCH_ITEMS = 20
|
||||
|
||||
# Language name mapping
|
||||
LANGUAGE_NAMES = {
|
||||
"en": "English",
|
||||
"zh-TW": "Traditional Chinese",
|
||||
"zh-CN": "Simplified Chinese",
|
||||
"ja": "Japanese",
|
||||
"ko": "Korean",
|
||||
"de": "German",
|
||||
"fr": "French",
|
||||
"es": "Spanish",
|
||||
"pt": "Portuguese",
|
||||
"it": "Italian",
|
||||
"ru": "Russian",
|
||||
"vi": "Vietnamese",
|
||||
"th": "Thai",
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class TranslationResponse:
|
||||
"""Response from DIFY translation API"""
|
||||
translated_text: str
|
||||
total_tokens: int
|
||||
latency: float
|
||||
conversation_id: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class BatchTranslationResponse:
|
||||
"""Response from DIFY batch translation API"""
|
||||
translations: Dict[int, str] # marker_id -> translated_text
|
||||
total_tokens: int
|
||||
latency: float
|
||||
conversation_id: str
|
||||
missing_markers: List[int] = field(default_factory=list)
|
||||
|
||||
|
||||
class DifyTranslationError(Exception):
|
||||
"""Error during DIFY API translation"""
|
||||
pass
|
||||
|
||||
|
||||
class DifyClient:
|
||||
"""
|
||||
Client for DIFY AI translation API.
|
||||
|
||||
Features:
|
||||
- Single and batch translation
|
||||
- Blocking mode API calls
|
||||
- Automatic retry with exponential backoff
|
||||
- Token and latency tracking
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
base_url: str = DIFY_BASE_URL,
|
||||
api_key: str = DIFY_API_KEY,
|
||||
timeout: float = DIFY_TIMEOUT,
|
||||
max_retries: int = DIFY_MAX_RETRIES
|
||||
):
|
||||
self.base_url = base_url
|
||||
self.api_key = api_key
|
||||
self.timeout = timeout
|
||||
self.max_retries = max_retries
|
||||
self._total_tokens = 0
|
||||
self._total_requests = 0
|
||||
|
||||
def _get_language_name(self, lang_code: str) -> str:
|
||||
"""Convert language code to full name for prompt"""
|
||||
return LANGUAGE_NAMES.get(lang_code, lang_code)
|
||||
|
||||
def _build_prompt(self, text: str, target_lang: str) -> str:
|
||||
"""Build translation prompt for single text"""
|
||||
lang_name = self._get_language_name(target_lang)
|
||||
return (
|
||||
f"Translate the following text to {lang_name}.\n"
|
||||
f"Return ONLY the translated text, no explanations.\n\n"
|
||||
f"{text}"
|
||||
)
|
||||
|
||||
def _build_batch_prompt(self, texts: List[str], target_lang: str) -> str:
|
||||
"""
|
||||
Build batch translation prompt with numbered markers.
|
||||
|
||||
Format:
|
||||
Translate the following texts to {Language}.
|
||||
Each text is marked with [N]. Return translations in the same format.
|
||||
Return ONLY the translations with their markers, no explanations.
|
||||
|
||||
[1] First text
|
||||
[2] Second text
|
||||
...
|
||||
"""
|
||||
lang_name = self._get_language_name(target_lang)
|
||||
|
||||
# Build numbered text list
|
||||
numbered_texts = []
|
||||
for i, text in enumerate(texts, start=1):
|
||||
# Clean text - remove newlines within each item to avoid parsing issues
|
||||
clean_text = ' '.join(text.split())
|
||||
numbered_texts.append(f"[{i}] {clean_text}")
|
||||
|
||||
texts_block = "\n".join(numbered_texts)
|
||||
|
||||
prompt = (
|
||||
f"Translate the following texts to {lang_name}.\n"
|
||||
f"Each text is marked with [N]. Return translations in the same format.\n"
|
||||
f"Return ONLY the translations with their markers, no explanations.\n\n"
|
||||
f"{texts_block}"
|
||||
)
|
||||
|
||||
return prompt
|
||||
|
||||
def _parse_batch_response(self, response_text: str, expected_count: int) -> Dict[int, str]:
|
||||
"""
|
||||
Parse batch translation response with numbered markers.
|
||||
|
||||
Expected format:
|
||||
[1] 翻譯文字一
|
||||
[2] 翻譯文字二
|
||||
...
|
||||
|
||||
Returns:
|
||||
Dict mapping marker number to translated text
|
||||
"""
|
||||
translations = {}
|
||||
|
||||
# Pattern to match [N] followed by text until next [N] or end
|
||||
# Use DOTALL to match across lines, but be careful with greedy matching
|
||||
pattern = r'\[(\d+)\]\s*(.+?)(?=\[\d+\]|$)'
|
||||
matches = re.findall(pattern, response_text, re.DOTALL)
|
||||
|
||||
for match in matches:
|
||||
try:
|
||||
marker_id = int(match[0])
|
||||
text = match[1].strip()
|
||||
if text:
|
||||
translations[marker_id] = text
|
||||
except (ValueError, IndexError):
|
||||
continue
|
||||
|
||||
return translations
|
||||
|
||||
def _call_api(self, prompt: str, user_id: str) -> dict:
|
||||
"""Make API call to DIFY with retry logic"""
|
||||
payload = {
|
||||
"inputs": {},
|
||||
"query": prompt,
|
||||
"response_mode": "blocking",
|
||||
"conversation_id": "",
|
||||
"user": user_id
|
||||
}
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Bearer {self.api_key}",
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
|
||||
last_error = None
|
||||
|
||||
for attempt in range(self.max_retries):
|
||||
try:
|
||||
with httpx.Client(timeout=self.timeout) as client:
|
||||
response = client.post(
|
||||
f"{self.base_url}/chat-messages",
|
||||
json=payload,
|
||||
headers=headers
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise DifyTranslationError(
|
||||
f"API returned status {response.status_code}: {response.text}"
|
||||
)
|
||||
|
||||
return response.json()
|
||||
|
||||
except httpx.TimeoutException as e:
|
||||
last_error = e
|
||||
logger.warning(f"DIFY API timeout (attempt {attempt + 1}/{self.max_retries})")
|
||||
|
||||
except httpx.RequestError as e:
|
||||
last_error = e
|
||||
logger.warning(f"DIFY API request error (attempt {attempt + 1}/{self.max_retries}): {e}")
|
||||
|
||||
except Exception as e:
|
||||
last_error = e
|
||||
logger.warning(f"DIFY API error (attempt {attempt + 1}/{self.max_retries}): {e}")
|
||||
|
||||
# Exponential backoff
|
||||
if attempt < self.max_retries - 1:
|
||||
wait_time = 2 ** attempt
|
||||
logger.info(f"Retrying in {wait_time}s...")
|
||||
time.sleep(wait_time)
|
||||
|
||||
raise DifyTranslationError(f"API call failed after {self.max_retries} attempts: {last_error}")
|
||||
|
||||
def translate(
|
||||
self,
|
||||
text: str,
|
||||
target_lang: str,
|
||||
user_id: str = "tool-ocr"
|
||||
) -> TranslationResponse:
|
||||
"""
|
||||
Translate single text using DIFY API.
|
||||
|
||||
Args:
|
||||
text: Text to translate
|
||||
target_lang: Target language code (e.g., 'en', 'zh-TW')
|
||||
user_id: User identifier for tracking
|
||||
|
||||
Returns:
|
||||
TranslationResponse with translated text and metadata
|
||||
"""
|
||||
prompt = self._build_prompt(text, target_lang)
|
||||
data = self._call_api(prompt, user_id)
|
||||
|
||||
# Extract response fields
|
||||
translated_text = data.get("answer", "")
|
||||
usage = data.get("metadata", {}).get("usage", {})
|
||||
|
||||
self._total_tokens += usage.get("total_tokens", 0)
|
||||
self._total_requests += 1
|
||||
|
||||
return TranslationResponse(
|
||||
translated_text=translated_text,
|
||||
total_tokens=usage.get("total_tokens", 0),
|
||||
latency=usage.get("latency", 0.0),
|
||||
conversation_id=data.get("conversation_id", "")
|
||||
)
|
||||
|
||||
def translate_batch(
|
||||
self,
|
||||
texts: List[str],
|
||||
target_lang: str,
|
||||
user_id: str = "tool-ocr"
|
||||
) -> BatchTranslationResponse:
|
||||
"""
|
||||
Translate multiple texts in a single API call.
|
||||
|
||||
Args:
|
||||
texts: List of texts to translate
|
||||
target_lang: Target language code
|
||||
user_id: User identifier for tracking
|
||||
|
||||
Returns:
|
||||
BatchTranslationResponse with translations dict and metadata
|
||||
"""
|
||||
if not texts:
|
||||
return BatchTranslationResponse(
|
||||
translations={},
|
||||
total_tokens=0,
|
||||
latency=0.0,
|
||||
conversation_id=""
|
||||
)
|
||||
|
||||
prompt = self._build_batch_prompt(texts, target_lang)
|
||||
|
||||
logger.debug(f"Batch translation: {len(texts)} items, ~{len(prompt)} chars")
|
||||
|
||||
data = self._call_api(prompt, user_id)
|
||||
|
||||
# Extract and parse response
|
||||
answer = data.get("answer", "")
|
||||
usage = data.get("metadata", {}).get("usage", {})
|
||||
|
||||
translations = self._parse_batch_response(answer, len(texts))
|
||||
|
||||
# Check for missing markers
|
||||
missing_markers = []
|
||||
for i in range(1, len(texts) + 1):
|
||||
if i not in translations:
|
||||
missing_markers.append(i)
|
||||
logger.warning(f"Missing translation for marker [{i}]")
|
||||
|
||||
self._total_tokens += usage.get("total_tokens", 0)
|
||||
self._total_requests += 1
|
||||
|
||||
return BatchTranslationResponse(
|
||||
translations=translations,
|
||||
total_tokens=usage.get("total_tokens", 0),
|
||||
latency=usage.get("latency", 0.0),
|
||||
conversation_id=data.get("conversation_id", ""),
|
||||
missing_markers=missing_markers
|
||||
)
|
||||
|
||||
def get_stats(self) -> dict:
|
||||
"""Get client statistics"""
|
||||
return {
|
||||
"total_tokens": self._total_tokens,
|
||||
"total_requests": self._total_requests,
|
||||
"base_url": self.base_url,
|
||||
}
|
||||
|
||||
|
||||
# Global singleton
|
||||
_dify_client: Optional[DifyClient] = None
|
||||
|
||||
|
||||
def get_dify_client() -> DifyClient:
|
||||
"""Get the global DifyClient instance"""
|
||||
global _dify_client
|
||||
if _dify_client is None:
|
||||
_dify_client = DifyClient()
|
||||
return _dify_client
|
||||
490
backend/app/services/translation_service.py
Normal file
490
backend/app/services/translation_service.py
Normal file
@@ -0,0 +1,490 @@
|
||||
"""
|
||||
Tool_OCR - Translation Service
|
||||
Document translation using DIFY AI API with batch processing
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import threading
|
||||
import time
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
from app.schemas.translation import (
|
||||
TranslatableItem,
|
||||
TranslatedItem,
|
||||
TranslationJobState,
|
||||
TranslationProgress,
|
||||
TranslationStatusEnum,
|
||||
)
|
||||
from app.services.dify_client import (
|
||||
DifyClient,
|
||||
DifyTranslationError,
|
||||
get_dify_client,
|
||||
MAX_BATCH_CHARS,
|
||||
MAX_BATCH_ITEMS,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Element types that should be translated
|
||||
TRANSLATABLE_TEXT_TYPES = {'text', 'title', 'header', 'footer', 'paragraph', 'footnote'}
|
||||
TABLE_TYPE = 'table'
|
||||
SKIP_TYPES = {'page_number', 'image', 'chart', 'logo', 'reference'}
|
||||
|
||||
|
||||
@dataclass
|
||||
class TranslationBatch:
|
||||
"""A batch of items to translate together"""
|
||||
items: List[TranslatableItem] = field(default_factory=list)
|
||||
total_chars: int = 0
|
||||
|
||||
def can_add(self, item: TranslatableItem) -> bool:
|
||||
"""Check if item can be added to this batch"""
|
||||
item_chars = len(item.content)
|
||||
return (
|
||||
len(self.items) < MAX_BATCH_ITEMS and
|
||||
self.total_chars + item_chars <= MAX_BATCH_CHARS
|
||||
)
|
||||
|
||||
def add(self, item: TranslatableItem):
|
||||
"""Add item to batch"""
|
||||
self.items.append(item)
|
||||
self.total_chars += len(item.content)
|
||||
|
||||
|
||||
class TranslationService:
|
||||
"""
|
||||
Main translation service for document translation using DIFY AI.
|
||||
|
||||
Features:
|
||||
- Extract translatable elements from UnifiedDocument
|
||||
- Batch translation via DIFY API (efficient)
|
||||
- Fallback to single-item translation for failures
|
||||
- Translation JSON generation
|
||||
- Progress tracking
|
||||
"""
|
||||
|
||||
def __init__(self, dify_client: Optional[DifyClient] = None):
|
||||
self.dify_client = dify_client or get_dify_client()
|
||||
self._active_jobs: Dict[str, TranslationJobState] = {}
|
||||
self._jobs_lock = threading.Lock()
|
||||
self._total_tokens = 0
|
||||
self._total_latency = 0.0
|
||||
|
||||
def extract_translatable_elements(
|
||||
self,
|
||||
result_json: Dict
|
||||
) -> Tuple[List[TranslatableItem], int]:
|
||||
"""
|
||||
Extract all translatable elements from a result JSON.
|
||||
|
||||
Args:
|
||||
result_json: UnifiedDocument JSON data
|
||||
|
||||
Returns:
|
||||
Tuple of (list of TranslatableItem, total element count)
|
||||
"""
|
||||
items = []
|
||||
total_elements = 0
|
||||
|
||||
for page in result_json.get('pages', []):
|
||||
page_number = page.get('page_number', 1)
|
||||
|
||||
for elem in page.get('elements', []):
|
||||
total_elements += 1
|
||||
elem_type = elem.get('type', '')
|
||||
elem_id = elem.get('element_id', '')
|
||||
content = elem.get('content')
|
||||
|
||||
# Skip non-translatable types
|
||||
if elem_type in SKIP_TYPES:
|
||||
continue
|
||||
|
||||
# Handle text elements
|
||||
if elem_type in TRANSLATABLE_TEXT_TYPES and isinstance(content, str):
|
||||
text = content.strip()
|
||||
if text: # Skip empty content
|
||||
items.append(TranslatableItem(
|
||||
element_id=elem_id,
|
||||
content=text,
|
||||
element_type=elem_type,
|
||||
page_number=page_number
|
||||
))
|
||||
|
||||
# Handle table elements
|
||||
elif elem_type == TABLE_TYPE and isinstance(content, dict):
|
||||
cells = content.get('cells', [])
|
||||
for cell in cells:
|
||||
cell_content = cell.get('content', '')
|
||||
if isinstance(cell_content, str) and cell_content.strip():
|
||||
row = cell.get('row', 0)
|
||||
col = cell.get('col', 0)
|
||||
items.append(TranslatableItem(
|
||||
element_id=elem_id,
|
||||
content=cell_content.strip(),
|
||||
element_type='table_cell',
|
||||
page_number=page_number,
|
||||
cell_position=(row, col)
|
||||
))
|
||||
|
||||
logger.info(
|
||||
f"Extracted {len(items)} translatable items from {total_elements} elements"
|
||||
)
|
||||
return items, total_elements
|
||||
|
||||
def create_batches(self, items: List[TranslatableItem]) -> List[TranslationBatch]:
|
||||
"""
|
||||
Create translation batches from items based on character limits.
|
||||
|
||||
Args:
|
||||
items: List of TranslatableItem
|
||||
|
||||
Returns:
|
||||
List of TranslationBatch
|
||||
"""
|
||||
batches = []
|
||||
current_batch = TranslationBatch()
|
||||
|
||||
for item in items:
|
||||
if current_batch.can_add(item):
|
||||
current_batch.add(item)
|
||||
else:
|
||||
# Save current batch and start new one
|
||||
if current_batch.items:
|
||||
batches.append(current_batch)
|
||||
current_batch = TranslationBatch()
|
||||
current_batch.add(item)
|
||||
|
||||
# Don't forget the last batch
|
||||
if current_batch.items:
|
||||
batches.append(current_batch)
|
||||
|
||||
logger.info(
|
||||
f"Created {len(batches)} batches from {len(items)} items "
|
||||
f"(max {MAX_BATCH_CHARS} chars, max {MAX_BATCH_ITEMS} items per batch)"
|
||||
)
|
||||
|
||||
return batches
|
||||
|
||||
def translate_batch(
|
||||
self,
|
||||
batch: TranslationBatch,
|
||||
target_lang: str,
|
||||
user_id: str
|
||||
) -> List[TranslatedItem]:
|
||||
"""
|
||||
Translate a batch of items using DIFY API.
|
||||
|
||||
Args:
|
||||
batch: TranslationBatch to translate
|
||||
target_lang: Target language code
|
||||
user_id: User identifier for tracking
|
||||
|
||||
Returns:
|
||||
List of TranslatedItem
|
||||
"""
|
||||
if not batch.items:
|
||||
return []
|
||||
|
||||
# Extract texts in order
|
||||
texts = [item.content for item in batch.items]
|
||||
|
||||
try:
|
||||
response = self.dify_client.translate_batch(
|
||||
texts=texts,
|
||||
target_lang=target_lang,
|
||||
user_id=user_id
|
||||
)
|
||||
|
||||
self._total_tokens += response.total_tokens
|
||||
self._total_latency += response.latency
|
||||
|
||||
# Map translations back to items
|
||||
translated_items = []
|
||||
for idx, item in enumerate(batch.items):
|
||||
marker_id = idx + 1 # Markers are 1-indexed
|
||||
|
||||
if marker_id in response.translations:
|
||||
translated_content = response.translations[marker_id]
|
||||
else:
|
||||
# Missing translation - use original
|
||||
logger.warning(f"Missing translation for {item.element_id}, using original")
|
||||
translated_content = item.content
|
||||
|
||||
translated_items.append(TranslatedItem(
|
||||
element_id=item.element_id,
|
||||
original_content=item.content,
|
||||
translated_content=translated_content,
|
||||
element_type=item.element_type,
|
||||
cell_position=item.cell_position
|
||||
))
|
||||
|
||||
return translated_items
|
||||
|
||||
except DifyTranslationError as e:
|
||||
logger.error(f"Batch translation failed: {e}")
|
||||
# Return items with original content on failure
|
||||
return [
|
||||
TranslatedItem(
|
||||
element_id=item.element_id,
|
||||
original_content=item.content,
|
||||
translated_content=item.content, # Keep original
|
||||
element_type=item.element_type,
|
||||
cell_position=item.cell_position
|
||||
)
|
||||
for item in batch.items
|
||||
]
|
||||
|
||||
def translate_item(
|
||||
self,
|
||||
item: TranslatableItem,
|
||||
target_lang: str,
|
||||
user_id: str
|
||||
) -> TranslatedItem:
|
||||
"""
|
||||
Translate a single item using DIFY API (fallback for batch failures).
|
||||
|
||||
Args:
|
||||
item: TranslatableItem to translate
|
||||
target_lang: Target language code
|
||||
user_id: User identifier for tracking
|
||||
|
||||
Returns:
|
||||
TranslatedItem with translation result
|
||||
"""
|
||||
try:
|
||||
response = self.dify_client.translate(
|
||||
text=item.content,
|
||||
target_lang=target_lang,
|
||||
user_id=user_id
|
||||
)
|
||||
|
||||
self._total_tokens += response.total_tokens
|
||||
self._total_latency += response.latency
|
||||
|
||||
return TranslatedItem(
|
||||
element_id=item.element_id,
|
||||
original_content=item.content,
|
||||
translated_content=response.translated_text,
|
||||
element_type=item.element_type,
|
||||
cell_position=item.cell_position
|
||||
)
|
||||
|
||||
except DifyTranslationError as e:
|
||||
logger.error(f"Translation failed for {item.element_id}: {e}")
|
||||
# Return original content on failure
|
||||
return TranslatedItem(
|
||||
element_id=item.element_id,
|
||||
original_content=item.content,
|
||||
translated_content=item.content, # Keep original
|
||||
element_type=item.element_type,
|
||||
cell_position=item.cell_position
|
||||
)
|
||||
|
||||
def build_translation_result(
|
||||
self,
|
||||
translated_items: List[TranslatedItem],
|
||||
source_document: str,
|
||||
source_lang: str,
|
||||
target_lang: str,
|
||||
total_elements: int,
|
||||
processing_time: float,
|
||||
batch_count: int
|
||||
) -> Dict:
|
||||
"""
|
||||
Build the translation result JSON structure.
|
||||
|
||||
Args:
|
||||
translated_items: List of TranslatedItem
|
||||
source_document: Source document filename
|
||||
source_lang: Source language
|
||||
target_lang: Target language
|
||||
total_elements: Total elements in document
|
||||
processing_time: Processing time in seconds
|
||||
batch_count: Number of batches used
|
||||
|
||||
Returns:
|
||||
Translation result dictionary
|
||||
"""
|
||||
# Build translations dict
|
||||
translations: Dict[str, Any] = {}
|
||||
total_chars = 0
|
||||
|
||||
for item in translated_items:
|
||||
total_chars += len(item.translated_content)
|
||||
|
||||
if item.element_type == 'table_cell':
|
||||
# Group table cells by element_id
|
||||
if item.element_id not in translations:
|
||||
translations[item.element_id] = {'cells': []}
|
||||
|
||||
translations[item.element_id]['cells'].append({
|
||||
'row': item.cell_position[0] if item.cell_position else 0,
|
||||
'col': item.cell_position[1] if item.cell_position else 0,
|
||||
'content': item.translated_content
|
||||
})
|
||||
else:
|
||||
translations[item.element_id] = item.translated_content
|
||||
|
||||
# Build statistics
|
||||
translated_element_ids = set(item.element_id for item in translated_items)
|
||||
skipped = total_elements - len(translated_element_ids)
|
||||
|
||||
result = {
|
||||
'schema_version': '1.0.0',
|
||||
'source_document': source_document,
|
||||
'source_lang': source_lang,
|
||||
'target_lang': target_lang,
|
||||
'provider': 'dify',
|
||||
'translated_at': datetime.utcnow().isoformat() + 'Z',
|
||||
'statistics': {
|
||||
'total_elements': total_elements,
|
||||
'translated_elements': len(translated_element_ids),
|
||||
'skipped_elements': skipped,
|
||||
'total_characters': total_chars,
|
||||
'processing_time_seconds': round(processing_time, 2),
|
||||
'total_tokens': self._total_tokens,
|
||||
'batch_count': batch_count
|
||||
},
|
||||
'translations': translations
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
def translate_document(
|
||||
self,
|
||||
task_id: str,
|
||||
result_json_path: Path,
|
||||
target_lang: str,
|
||||
source_lang: str = 'auto',
|
||||
progress_callback: Optional[callable] = None
|
||||
) -> Tuple[bool, Optional[Path], Optional[str]]:
|
||||
"""
|
||||
Translate a document using batch processing and save the result.
|
||||
|
||||
Args:
|
||||
task_id: Task ID
|
||||
result_json_path: Path to source result.json
|
||||
target_lang: Target language (e.g., 'en', 'zh-TW')
|
||||
source_lang: Source language ('auto' for detection)
|
||||
progress_callback: Optional callback(progress: TranslationProgress)
|
||||
|
||||
Returns:
|
||||
Tuple of (success, output_path, error_message)
|
||||
"""
|
||||
start_time = time.time()
|
||||
self._total_tokens = 0
|
||||
self._total_latency = 0.0
|
||||
|
||||
logger.info(
|
||||
f"Starting translation: task_id={task_id}, target={target_lang}"
|
||||
)
|
||||
|
||||
try:
|
||||
# Load source JSON
|
||||
with open(result_json_path, 'r', encoding='utf-8') as f:
|
||||
result_json = json.load(f)
|
||||
|
||||
source_document = result_json.get('metadata', {}).get('filename', 'unknown')
|
||||
|
||||
# Extract translatable elements
|
||||
items, total_elements = self.extract_translatable_elements(result_json)
|
||||
|
||||
if not items:
|
||||
logger.warning("No translatable elements found")
|
||||
return False, None, "No translatable elements found"
|
||||
|
||||
# Create batches
|
||||
batches = self.create_batches(items)
|
||||
|
||||
# Update initial progress
|
||||
if progress_callback:
|
||||
progress_callback(TranslationProgress(
|
||||
total_elements=len(items)
|
||||
))
|
||||
|
||||
# Translate each batch
|
||||
all_translated: List[TranslatedItem] = []
|
||||
user_id = f"tool-ocr-{task_id}"
|
||||
processed_items = 0
|
||||
|
||||
for batch_idx, batch in enumerate(batches):
|
||||
logger.info(
|
||||
f"Translating batch {batch_idx + 1}/{len(batches)} "
|
||||
f"({len(batch.items)} items, {batch.total_chars} chars)"
|
||||
)
|
||||
|
||||
translated = self.translate_batch(batch, target_lang, user_id)
|
||||
all_translated.extend(translated)
|
||||
processed_items += len(batch.items)
|
||||
|
||||
# Update progress
|
||||
if progress_callback:
|
||||
progress_callback(TranslationProgress(
|
||||
current_element=processed_items,
|
||||
total_elements=len(items),
|
||||
percentage=(processed_items / len(items)) * 100
|
||||
))
|
||||
|
||||
# Build result
|
||||
processing_time = time.time() - start_time
|
||||
result = self.build_translation_result(
|
||||
translated_items=all_translated,
|
||||
source_document=source_document,
|
||||
source_lang=source_lang,
|
||||
target_lang=target_lang,
|
||||
total_elements=total_elements,
|
||||
processing_time=processing_time,
|
||||
batch_count=len(batches)
|
||||
)
|
||||
|
||||
# Save result
|
||||
output_filename = result_json_path.stem.replace('_result', '')
|
||||
output_path = result_json_path.parent / f"{output_filename}_translated_{target_lang}.json"
|
||||
|
||||
with open(output_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(result, f, ensure_ascii=False, indent=2)
|
||||
|
||||
logger.info(
|
||||
f"Translation completed: {len(all_translated)} items in {len(batches)} batches, "
|
||||
f"{processing_time:.2f}s, {self._total_tokens} tokens, "
|
||||
f"saved to {output_path}"
|
||||
)
|
||||
|
||||
return True, output_path, None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Translation failed: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False, None, str(e)
|
||||
|
||||
def get_job_state(self, task_id: str) -> Optional[TranslationJobState]:
|
||||
"""Get the current state of a translation job"""
|
||||
with self._jobs_lock:
|
||||
return self._active_jobs.get(task_id)
|
||||
|
||||
def set_job_state(self, task_id: str, state: TranslationJobState):
|
||||
"""Set the state of a translation job"""
|
||||
with self._jobs_lock:
|
||||
self._active_jobs[task_id] = state
|
||||
|
||||
def remove_job_state(self, task_id: str):
|
||||
"""Remove a translation job state"""
|
||||
with self._jobs_lock:
|
||||
self._active_jobs.pop(task_id, None)
|
||||
|
||||
|
||||
# Global singleton
|
||||
_translation_service: Optional[TranslationService] = None
|
||||
|
||||
|
||||
def get_translation_service() -> TranslationService:
|
||||
"""Get the global TranslationService instance"""
|
||||
global _translation_service
|
||||
if _translation_service is None:
|
||||
_translation_service = TranslationService()
|
||||
return _translation_service
|
||||
Reference in New Issue
Block a user