9TH_FIX REPORT
This commit is contained in:
303
app/api/admin.py
303
app/api/admin.py
@@ -9,7 +9,7 @@ Modified: 2024-01-28
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from flask import Blueprint, request, jsonify, g
|
||||
from flask import Blueprint, request, jsonify, g, send_file
|
||||
from app.utils.decorators import admin_required
|
||||
from app.utils.validators import validate_pagination, validate_date_range
|
||||
from app.utils.helpers import create_response
|
||||
@@ -34,7 +34,15 @@ def get_system_stats():
|
||||
try:
|
||||
from app import db
|
||||
|
||||
# 基本統計
|
||||
# 基本統計 - 計算實際的總成本和今日活躍用戶
|
||||
total_cost = db.session.query(func.sum(TranslationJob.total_cost)).scalar() or 0.0
|
||||
|
||||
# 計算今日活躍用戶 (今天有任務活動的用戶)
|
||||
today = datetime.utcnow().date()
|
||||
active_users_today = db.session.query(TranslationJob.user_id).filter(
|
||||
func.date(TranslationJob.created_at) == today
|
||||
).distinct().count()
|
||||
|
||||
overview = {
|
||||
'total_jobs': TranslationJob.query.count(),
|
||||
'completed_jobs': TranslationJob.query.filter_by(status='COMPLETED').count(),
|
||||
@@ -42,15 +50,16 @@ def get_system_stats():
|
||||
'pending_jobs': TranslationJob.query.filter_by(status='PENDING').count(),
|
||||
'processing_jobs': TranslationJob.query.filter_by(status='PROCESSING').count(),
|
||||
'total_users': User.query.count(),
|
||||
'active_users_today': 0, # 簡化版本先設為0
|
||||
'total_cost': 0.0 # 簡化版本先設為0
|
||||
'active_users_today': active_users_today,
|
||||
'total_cost': float(total_cost)
|
||||
}
|
||||
|
||||
# 簡化的用戶排行榜 - 按任務數排序
|
||||
# 用戶排行榜 - 按任務數和成本排序
|
||||
user_rankings = db.session.query(
|
||||
User.id,
|
||||
User.display_name,
|
||||
func.count(TranslationJob.id).label('job_count')
|
||||
func.count(TranslationJob.id).label('job_count'),
|
||||
func.sum(TranslationJob.total_cost).label('total_cost')
|
||||
).outerjoin(TranslationJob).group_by(
|
||||
User.id, User.display_name
|
||||
).order_by(
|
||||
@@ -63,7 +72,7 @@ def get_system_stats():
|
||||
'user_id': ranking.id,
|
||||
'display_name': ranking.display_name,
|
||||
'job_count': ranking.job_count or 0,
|
||||
'total_cost': 0.0 # 簡化版本
|
||||
'total_cost': float(ranking.total_cost or 0.0)
|
||||
})
|
||||
|
||||
# 簡化的每日統計 - 只返回空數組
|
||||
@@ -502,14 +511,50 @@ def cleanup_system():
|
||||
'days_kept': cache_days
|
||||
}
|
||||
|
||||
# 清理舊檔案(這裡會在檔案服務中實作)
|
||||
# 清理舊檔案
|
||||
if cleanup_files:
|
||||
# from app.services.file_service import cleanup_old_files
|
||||
# deleted_files = cleanup_old_files(days_to_keep=files_days)
|
||||
cleanup_results['files'] = {
|
||||
'message': 'File cleanup not implemented yet',
|
||||
'days_kept': files_days
|
||||
}
|
||||
try:
|
||||
from datetime import datetime, timedelta
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
# 找到超過指定天數的已完成或失敗任務
|
||||
cutoff_date = datetime.utcnow() - timedelta(days=files_days)
|
||||
old_jobs = TranslationJob.query.filter(
|
||||
TranslationJob.created_at < cutoff_date,
|
||||
TranslationJob.status.in_(['COMPLETED', 'FAILED'])
|
||||
).all()
|
||||
|
||||
deleted_files_count = 0
|
||||
for job in old_jobs:
|
||||
try:
|
||||
# 刪除與任務相關的所有檔案
|
||||
for file_record in job.files:
|
||||
file_path = Path(file_record.file_path)
|
||||
if file_path.exists():
|
||||
os.remove(file_path)
|
||||
deleted_files_count += 1
|
||||
|
||||
# 也刪除任務目錄
|
||||
if job.file_path:
|
||||
job_dir = Path(job.file_path).parent
|
||||
if job_dir.exists() and len(list(job_dir.iterdir())) == 0:
|
||||
job_dir.rmdir()
|
||||
|
||||
except Exception as file_error:
|
||||
logger.warning(f"Failed to cleanup files for job {job.job_uuid}: {file_error}")
|
||||
|
||||
cleanup_results['files'] = {
|
||||
'deleted_count': deleted_files_count,
|
||||
'jobs_processed': len(old_jobs),
|
||||
'days_kept': files_days
|
||||
}
|
||||
|
||||
except Exception as cleanup_error:
|
||||
cleanup_results['files'] = {
|
||||
'error': f'File cleanup failed: {str(cleanup_error)}',
|
||||
'days_kept': files_days
|
||||
}
|
||||
|
||||
# 記錄維護日誌
|
||||
SystemLog.info(
|
||||
@@ -537,4 +582,232 @@ def cleanup_system():
|
||||
success=False,
|
||||
error='SYSTEM_ERROR',
|
||||
message='系統清理失敗'
|
||||
)), 500
|
||||
)), 500
|
||||
|
||||
|
||||
@admin_bp.route('/export/<report_type>', methods=['GET'])
|
||||
@admin_required
|
||||
def export_report(report_type):
|
||||
"""匯出報表"""
|
||||
try:
|
||||
from io import BytesIO
|
||||
import pandas as pd
|
||||
from datetime import datetime, timedelta
|
||||
from app import db
|
||||
|
||||
# 驗證報表類型
|
||||
valid_types = ['usage', 'cost', 'jobs']
|
||||
if report_type not in valid_types:
|
||||
return jsonify(create_response(
|
||||
success=False,
|
||||
error='INVALID_REPORT_TYPE',
|
||||
message='無效的報表類型'
|
||||
)), 400
|
||||
|
||||
# 取得查詢參數
|
||||
start_date = request.args.get('start_date')
|
||||
end_date = request.args.get('end_date')
|
||||
|
||||
# 設定預設時間範圍(最近30天)
|
||||
if not end_date:
|
||||
end_date = datetime.utcnow()
|
||||
else:
|
||||
end_date = datetime.fromisoformat(end_date.replace('Z', '+00:00'))
|
||||
|
||||
if not start_date:
|
||||
start_date = end_date - timedelta(days=30)
|
||||
else:
|
||||
start_date = datetime.fromisoformat(start_date.replace('Z', '+00:00'))
|
||||
|
||||
# 生成報表數據
|
||||
if report_type == 'usage':
|
||||
# 使用統計報表
|
||||
data = generate_usage_report(start_date, end_date)
|
||||
filename = f'usage_report_{start_date.strftime("%Y%m%d")}_{end_date.strftime("%Y%m%d")}.xlsx'
|
||||
|
||||
elif report_type == 'cost':
|
||||
# 成本分析報表
|
||||
data = generate_cost_report(start_date, end_date)
|
||||
filename = f'cost_report_{start_date.strftime("%Y%m%d")}_{end_date.strftime("%Y%m%d")}.xlsx'
|
||||
|
||||
elif report_type == 'jobs':
|
||||
# 任務清單報表
|
||||
data = generate_jobs_report(start_date, end_date)
|
||||
filename = f'jobs_report_{start_date.strftime("%Y%m%d")}_{end_date.strftime("%Y%m%d")}.xlsx'
|
||||
|
||||
# 建立Excel檔案
|
||||
output = BytesIO()
|
||||
with pd.ExcelWriter(output, engine='openpyxl') as writer:
|
||||
for sheet_name, df in data.items():
|
||||
df.to_excel(writer, sheet_name=sheet_name, index=False)
|
||||
|
||||
output.seek(0)
|
||||
|
||||
# 記錄匯出日誌
|
||||
SystemLog.info(
|
||||
'admin.export_report',
|
||||
f'Report exported: {report_type}',
|
||||
user_id=g.current_user.id,
|
||||
extra_data={
|
||||
'report_type': report_type,
|
||||
'start_date': start_date.isoformat(),
|
||||
'end_date': end_date.isoformat()
|
||||
}
|
||||
)
|
||||
|
||||
logger.info(f"Report exported by {g.current_user.username}: {report_type}")
|
||||
|
||||
# 發送檔案
|
||||
return send_file(
|
||||
BytesIO(output.getvalue()),
|
||||
mimetype='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
||||
as_attachment=True,
|
||||
download_name=filename
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Export report error: {str(e)}")
|
||||
|
||||
return jsonify(create_response(
|
||||
success=False,
|
||||
error='SYSTEM_ERROR',
|
||||
message='匯出報表失敗'
|
||||
)), 500
|
||||
|
||||
|
||||
def generate_usage_report(start_date, end_date):
|
||||
"""生成使用統計報表"""
|
||||
import pandas as pd
|
||||
from app import db
|
||||
|
||||
# 用戶使用統計
|
||||
user_stats = db.session.query(
|
||||
User.username,
|
||||
User.display_name,
|
||||
User.department,
|
||||
func.count(TranslationJob.id).label('job_count'),
|
||||
func.sum(TranslationJob.total_cost).label('total_cost'),
|
||||
func.sum(TranslationJob.total_tokens).label('total_tokens')
|
||||
).outerjoin(TranslationJob).filter(
|
||||
TranslationJob.created_at.between(start_date, end_date)
|
||||
).group_by(
|
||||
User.id, User.username, User.display_name, User.department
|
||||
).order_by(func.count(TranslationJob.id).desc()).all()
|
||||
|
||||
user_df = pd.DataFrame([{
|
||||
'用戶名': stat.username,
|
||||
'顯示名稱': stat.display_name,
|
||||
'部門': stat.department or '',
|
||||
'任務數': stat.job_count or 0,
|
||||
'總成本 ($)': float(stat.total_cost or 0.0),
|
||||
'總Token數': stat.total_tokens or 0
|
||||
} for stat in user_stats])
|
||||
|
||||
# 每日使用統計
|
||||
daily_stats = db.session.query(
|
||||
func.date(TranslationJob.created_at).label('date'),
|
||||
func.count(TranslationJob.id).label('job_count'),
|
||||
func.sum(TranslationJob.total_cost).label('total_cost'),
|
||||
func.sum(TranslationJob.total_tokens).label('total_tokens')
|
||||
).filter(
|
||||
TranslationJob.created_at.between(start_date, end_date)
|
||||
).group_by(
|
||||
func.date(TranslationJob.created_at)
|
||||
).order_by(func.date(TranslationJob.created_at)).all()
|
||||
|
||||
daily_df = pd.DataFrame([{
|
||||
'日期': stat.date.strftime('%Y-%m-%d'),
|
||||
'任務數': stat.job_count,
|
||||
'總成本 ($)': float(stat.total_cost or 0.0),
|
||||
'總Token數': stat.total_tokens or 0
|
||||
} for stat in daily_stats])
|
||||
|
||||
return {
|
||||
'用戶使用統計': user_df,
|
||||
'每日使用統計': daily_df
|
||||
}
|
||||
|
||||
|
||||
def generate_cost_report(start_date, end_date):
|
||||
"""生成成本分析報表"""
|
||||
import pandas as pd
|
||||
from app import db
|
||||
|
||||
# 按語言的成本統計
|
||||
lang_costs = {}
|
||||
jobs = TranslationJob.query.filter(
|
||||
TranslationJob.created_at.between(start_date, end_date),
|
||||
TranslationJob.total_cost.isnot(None)
|
||||
).all()
|
||||
|
||||
for job in jobs:
|
||||
for lang in job.target_languages:
|
||||
if lang not in lang_costs:
|
||||
lang_costs[lang] = {'count': 0, 'cost': 0.0, 'tokens': 0}
|
||||
lang_costs[lang]['count'] += 1
|
||||
lang_costs[lang]['cost'] += float(job.total_cost or 0.0) / len(job.target_languages)
|
||||
lang_costs[lang]['tokens'] += (job.total_tokens or 0) // len(job.target_languages)
|
||||
|
||||
lang_df = pd.DataFrame([{
|
||||
'目標語言': lang,
|
||||
'任務數': data['count'],
|
||||
'總成本 ($)': data['cost'],
|
||||
'總Token數': data['tokens'],
|
||||
'平均單次成本 ($)': data['cost'] / data['count'] if data['count'] > 0 else 0
|
||||
} for lang, data in lang_costs.items()])
|
||||
|
||||
# 按檔案類型的成本統計
|
||||
file_stats = db.session.query(
|
||||
TranslationJob.file_extension,
|
||||
func.count(TranslationJob.id).label('job_count'),
|
||||
func.sum(TranslationJob.total_cost).label('total_cost'),
|
||||
func.sum(TranslationJob.total_tokens).label('total_tokens')
|
||||
).filter(
|
||||
TranslationJob.created_at.between(start_date, end_date)
|
||||
).group_by(TranslationJob.file_extension).all()
|
||||
|
||||
file_df = pd.DataFrame([{
|
||||
'檔案類型': stat.file_extension,
|
||||
'任務數': stat.job_count,
|
||||
'總成本 ($)': float(stat.total_cost or 0.0),
|
||||
'總Token數': stat.total_tokens or 0,
|
||||
'平均單次成本 ($)': float(stat.total_cost or 0.0) / stat.job_count if stat.job_count > 0 else 0
|
||||
} for stat in file_stats])
|
||||
|
||||
return {
|
||||
'按語言成本分析': lang_df,
|
||||
'按檔案類型成本分析': file_df
|
||||
}
|
||||
|
||||
|
||||
def generate_jobs_report(start_date, end_date):
|
||||
"""生成任務清單報表"""
|
||||
import pandas as pd
|
||||
from app import db
|
||||
|
||||
jobs = db.session.query(TranslationJob).filter(
|
||||
TranslationJob.created_at.between(start_date, end_date)
|
||||
).options(db.joinedload(TranslationJob.user)).order_by(
|
||||
TranslationJob.created_at.desc()
|
||||
).all()
|
||||
|
||||
jobs_df = pd.DataFrame([{
|
||||
'任務ID': job.job_uuid,
|
||||
'用戶名': job.user.username if job.user else '',
|
||||
'顯示名稱': job.user.display_name if job.user else '',
|
||||
'部門': job.user.department if job.user and job.user.department else '',
|
||||
'原始檔案': job.original_filename,
|
||||
'檔案大小': job.file_size,
|
||||
'來源語言': job.source_language,
|
||||
'目標語言': ', '.join(job.target_languages),
|
||||
'狀態': job.status,
|
||||
'總成本 ($)': float(job.total_cost or 0.0),
|
||||
'總Token數': job.total_tokens or 0,
|
||||
'建立時間': job.created_at.strftime('%Y-%m-%d %H:%M:%S'),
|
||||
'完成時間': job.completed_at.strftime('%Y-%m-%d %H:%M:%S') if job.completed_at else '',
|
||||
'錯誤訊息': job.error_message or ''
|
||||
} for job in jobs])
|
||||
|
||||
return {
|
||||
'任務清單': jobs_df
|
||||
}
|
Reference in New Issue
Block a user