- Create app.py with Flask server on port 5002 - Add requirements.txt with Python dependencies - Add run_flask.bat for Windows users - Add run_flask.sh for Linux/Mac users - Complete Flask setup documentation - Database integration with PyMySQL - Full LLM API support (Gemini, DeepSeek, OpenAI, Claude) - CORS configuration - Error handling middleware Features: ✅ Runs on http://127.0.0.1:5002 ✅ All LLM APIs supported ✅ Database connection ✅ API proxy for CORS fix ✅ Auto setup scripts 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
499 lines
17 KiB
Python
499 lines
17 KiB
Python
"""
|
|
Flask Application
|
|
HR 績效評核系統 - Python Flask 後端伺服器
|
|
運行於 127.0.0.1:5002
|
|
"""
|
|
|
|
import os
|
|
import json
|
|
from datetime import datetime
|
|
from flask import Flask, request, jsonify, send_from_directory
|
|
from flask_cors import CORS
|
|
from dotenv import load_dotenv
|
|
import pymysql
|
|
import requests
|
|
from functools import wraps
|
|
|
|
# 載入環境變數
|
|
load_dotenv()
|
|
|
|
# 建立 Flask 應用
|
|
app = Flask(__name__, static_folder='public', static_url_path='')
|
|
|
|
# CORS 設定
|
|
CORS(app, resources={
|
|
r"/api/*": {
|
|
"origins": os.getenv('FRONTEND_URL', '*'),
|
|
"methods": ["GET", "POST", "PUT", "DELETE", "PATCH"],
|
|
"allow_headers": ["Content-Type", "Authorization"]
|
|
}
|
|
})
|
|
|
|
# 應用配置
|
|
app.config['JSON_AS_ASCII'] = False # 支援中文
|
|
app.config['JSON_SORT_KEYS'] = False
|
|
app.config['MAX_CONTENT_LENGTH'] = int(os.getenv('MAX_FILE_SIZE', 5242880)) # 5MB
|
|
|
|
# ============================================
|
|
# 資料庫連線
|
|
# ============================================
|
|
|
|
def get_db_connection():
|
|
"""建立資料庫連線"""
|
|
try:
|
|
connection = pymysql.connect(
|
|
host=os.getenv('DB_HOST'),
|
|
port=int(os.getenv('DB_PORT', 3306)),
|
|
user=os.getenv('DB_USER'),
|
|
password=os.getenv('DB_PASSWORD'),
|
|
database=os.getenv('DB_NAME'),
|
|
charset='utf8mb4',
|
|
cursorclass=pymysql.cursors.DictCursor
|
|
)
|
|
return connection
|
|
except Exception as e:
|
|
print(f"資料庫連線錯誤: {e}")
|
|
return None
|
|
|
|
def test_db_connection():
|
|
"""測試資料庫連線"""
|
|
try:
|
|
conn = get_db_connection()
|
|
if conn:
|
|
with conn.cursor() as cursor:
|
|
cursor.execute("SELECT 1")
|
|
conn.close()
|
|
return True
|
|
return False
|
|
except:
|
|
return False
|
|
|
|
# ============================================
|
|
# LLM 服務整合
|
|
# ============================================
|
|
|
|
class LLMService:
|
|
"""LLM 服務類別"""
|
|
|
|
@staticmethod
|
|
def get_config(provider):
|
|
"""取得 LLM 配置"""
|
|
configs = {
|
|
'gemini': {
|
|
'api_key': os.getenv('GEMINI_API_KEY'),
|
|
'api_url': 'https://generativelanguage.googleapis.com/v1beta',
|
|
'model': os.getenv('GEMINI_MODEL', 'gemini-pro')
|
|
},
|
|
'deepseek': {
|
|
'api_key': os.getenv('DEEPSEEK_API_KEY'),
|
|
'api_url': os.getenv('DEEPSEEK_API_URL', 'https://api.deepseek.com/v1'),
|
|
'model': os.getenv('DEEPSEEK_MODEL', 'deepseek-chat')
|
|
},
|
|
'openai': {
|
|
'api_key': os.getenv('OPENAI_API_KEY'),
|
|
'api_url': os.getenv('OPENAI_API_URL', 'https://api.openai.com/v1'),
|
|
'model': os.getenv('OPENAI_MODEL', 'gpt-4')
|
|
},
|
|
'claude': {
|
|
'api_key': os.getenv('CLAUDE_API_KEY'),
|
|
'api_url': os.getenv('CLAUDE_API_URL', 'https://api.anthropic.com/v1'),
|
|
'model': os.getenv('CLAUDE_MODEL', 'claude-3-5-sonnet-20241022'),
|
|
'version': '2023-06-01'
|
|
}
|
|
}
|
|
return configs.get(provider)
|
|
|
|
@staticmethod
|
|
def test_gemini():
|
|
"""測試 Gemini API"""
|
|
try:
|
|
config = LLMService.get_config('gemini')
|
|
if not config['api_key']:
|
|
return {'success': False, 'message': 'Gemini API key not configured', 'provider': 'gemini'}
|
|
|
|
url = f"{config['api_url']}/models/{config['model']}:generateContent"
|
|
response = requests.post(
|
|
url,
|
|
params={'key': config['api_key']},
|
|
json={'contents': [{'parts': [{'text': 'Hello'}]}]},
|
|
timeout=30
|
|
)
|
|
|
|
if response.status_code == 200:
|
|
return {'success': True, 'message': 'Gemini API connection successful', 'provider': 'gemini', 'model': config['model']}
|
|
return {'success': False, 'message': f'HTTP {response.status_code}', 'provider': 'gemini'}
|
|
except Exception as e:
|
|
return {'success': False, 'message': str(e), 'provider': 'gemini'}
|
|
|
|
@staticmethod
|
|
def test_deepseek():
|
|
"""測試 DeepSeek API"""
|
|
try:
|
|
config = LLMService.get_config('deepseek')
|
|
if not config['api_key']:
|
|
return {'success': False, 'message': 'DeepSeek API key not configured', 'provider': 'deepseek'}
|
|
|
|
url = f"{config['api_url']}/chat/completions"
|
|
response = requests.post(
|
|
url,
|
|
headers={'Authorization': f"Bearer {config['api_key']}"},
|
|
json={'model': config['model'], 'messages': [{'role': 'user', 'content': 'Hello'}], 'max_tokens': 50},
|
|
timeout=30
|
|
)
|
|
|
|
if response.status_code == 200:
|
|
return {'success': True, 'message': 'DeepSeek API connection successful', 'provider': 'deepseek', 'model': config['model']}
|
|
return {'success': False, 'message': f'HTTP {response.status_code}', 'provider': 'deepseek'}
|
|
except Exception as e:
|
|
return {'success': False, 'message': str(e), 'provider': 'deepseek'}
|
|
|
|
@staticmethod
|
|
def test_openai():
|
|
"""測試 OpenAI API"""
|
|
try:
|
|
config = LLMService.get_config('openai')
|
|
if not config['api_key']:
|
|
return {'success': False, 'message': 'OpenAI API key not configured', 'provider': 'openai'}
|
|
|
|
url = f"{config['api_url']}/chat/completions"
|
|
response = requests.post(
|
|
url,
|
|
headers={'Authorization': f"Bearer {config['api_key']}"},
|
|
json={'model': config['model'], 'messages': [{'role': 'user', 'content': 'Hello'}], 'max_tokens': 50},
|
|
timeout=30
|
|
)
|
|
|
|
if response.status_code == 200:
|
|
return {'success': True, 'message': 'OpenAI API connection successful', 'provider': 'openai', 'model': config['model']}
|
|
return {'success': False, 'message': f'HTTP {response.status_code}', 'provider': 'openai'}
|
|
except Exception as e:
|
|
return {'success': False, 'message': str(e), 'provider': 'openai'}
|
|
|
|
@staticmethod
|
|
def test_claude():
|
|
"""測試 Claude API"""
|
|
try:
|
|
config = LLMService.get_config('claude')
|
|
if not config['api_key']:
|
|
return {'success': False, 'message': 'Claude API key not configured', 'provider': 'claude'}
|
|
|
|
url = f"{config['api_url']}/messages"
|
|
response = requests.post(
|
|
url,
|
|
headers={
|
|
'x-api-key': config['api_key'],
|
|
'anthropic-version': config['version'],
|
|
'content-type': 'application/json'
|
|
},
|
|
json={
|
|
'model': config['model'],
|
|
'max_tokens': 50,
|
|
'messages': [{'role': 'user', 'content': 'Hello'}]
|
|
},
|
|
timeout=30
|
|
)
|
|
|
|
if response.status_code == 200:
|
|
return {'success': True, 'message': 'Claude API connection successful', 'provider': 'claude', 'model': config['model']}
|
|
return {'success': False, 'message': f'HTTP {response.status_code}', 'provider': 'claude'}
|
|
except Exception as e:
|
|
return {'success': False, 'message': str(e), 'provider': 'claude'}
|
|
|
|
@staticmethod
|
|
def generate_content(prompt, provider='claude', options=None):
|
|
"""使用指定的 LLM 生成內容"""
|
|
if options is None:
|
|
options = {}
|
|
|
|
config = LLMService.get_config(provider)
|
|
if not config or not config['api_key']:
|
|
raise Exception(f'{provider} API not configured')
|
|
|
|
try:
|
|
if provider == 'claude':
|
|
url = f"{config['api_url']}/messages"
|
|
response = requests.post(
|
|
url,
|
|
headers={
|
|
'x-api-key': config['api_key'],
|
|
'anthropic-version': config['version'],
|
|
'content-type': 'application/json'
|
|
},
|
|
json={
|
|
'model': config['model'],
|
|
'max_tokens': options.get('maxTokens', 2000),
|
|
'temperature': options.get('temperature', 0.7),
|
|
'messages': [{'role': 'user', 'content': prompt}]
|
|
},
|
|
timeout=30
|
|
)
|
|
|
|
if response.status_code == 200:
|
|
data = response.json()
|
|
return {'success': True, 'content': data['content'][0]['text'], 'provider': provider}
|
|
|
|
elif provider == 'gemini':
|
|
url = f"{config['api_url']}/models/{config['model']}:generateContent"
|
|
response = requests.post(
|
|
url,
|
|
params={'key': config['api_key']},
|
|
json={
|
|
'contents': [{'parts': [{'text': prompt}]}],
|
|
'generationConfig': {
|
|
'temperature': options.get('temperature', 0.7),
|
|
'maxOutputTokens': options.get('maxTokens', 2000)
|
|
}
|
|
},
|
|
timeout=30
|
|
)
|
|
|
|
if response.status_code == 200:
|
|
data = response.json()
|
|
return {'success': True, 'content': data['candidates'][0]['content']['parts'][0]['text'], 'provider': provider}
|
|
|
|
elif provider in ['deepseek', 'openai']:
|
|
url = f"{config['api_url']}/chat/completions"
|
|
response = requests.post(
|
|
url,
|
|
headers={'Authorization': f"Bearer {config['api_key']}"},
|
|
json={
|
|
'model': config['model'],
|
|
'messages': [{'role': 'user', 'content': prompt}],
|
|
'temperature': options.get('temperature', 0.7),
|
|
'max_tokens': options.get('maxTokens', 2000)
|
|
},
|
|
timeout=30
|
|
)
|
|
|
|
if response.status_code == 200:
|
|
data = response.json()
|
|
return {'success': True, 'content': data['choices'][0]['message']['content'], 'provider': provider}
|
|
|
|
raise Exception(f'Failed to generate content: HTTP {response.status_code}')
|
|
|
|
except Exception as e:
|
|
raise Exception(f'{provider} API error: {str(e)}')
|
|
|
|
# ============================================
|
|
# 錯誤處理
|
|
# ============================================
|
|
|
|
def handle_error(error, status_code=500):
|
|
"""統一錯誤處理"""
|
|
return jsonify({
|
|
'success': False,
|
|
'error': {
|
|
'statusCode': status_code,
|
|
'message': str(error),
|
|
'timestamp': datetime.now().isoformat(),
|
|
'path': request.path
|
|
}
|
|
}), status_code
|
|
|
|
@app.errorhandler(404)
|
|
def not_found(error):
|
|
"""404 錯誤處理"""
|
|
return jsonify({
|
|
'success': False,
|
|
'error': {
|
|
'statusCode': 404,
|
|
'message': f'Cannot {request.method} {request.path}',
|
|
'timestamp': datetime.now().isoformat(),
|
|
'path': request.path
|
|
}
|
|
}), 404
|
|
|
|
@app.errorhandler(500)
|
|
def internal_error(error):
|
|
"""500 錯誤處理"""
|
|
return handle_error(error, 500)
|
|
|
|
# ============================================
|
|
# 路由
|
|
# ============================================
|
|
|
|
@app.route('/')
|
|
def index():
|
|
"""根路由"""
|
|
return jsonify({
|
|
'name': 'HR Performance System API',
|
|
'version': '1.0.0',
|
|
'description': '四卡循環績效管理系統 (Python Flask)',
|
|
'server': 'Flask/Python',
|
|
'endpoints': {
|
|
'health': '/health',
|
|
'database': '/api/db/test',
|
|
'llm': '/api/llm',
|
|
'example': '/api-proxy-example.html'
|
|
}
|
|
})
|
|
|
|
@app.route('/health')
|
|
def health():
|
|
"""健康檢查"""
|
|
db_status = test_db_connection()
|
|
return jsonify({
|
|
'success': True,
|
|
'message': 'HR Performance System API is running',
|
|
'timestamp': datetime.now().isoformat(),
|
|
'environment': os.getenv('NODE_ENV', 'development'),
|
|
'database': 'connected' if db_status else 'disconnected',
|
|
'server': 'Flask/Python'
|
|
})
|
|
|
|
# ============================================
|
|
# 資料庫 API
|
|
# ============================================
|
|
|
|
@app.route('/api/db/test', methods=['POST'])
|
|
def test_database():
|
|
"""測試資料庫連線"""
|
|
try:
|
|
conn = get_db_connection()
|
|
if not conn:
|
|
return handle_error('無法連接到資料庫', 500)
|
|
|
|
with conn.cursor() as cursor:
|
|
cursor.execute("SELECT VERSION() as version")
|
|
result = cursor.fetchone()
|
|
|
|
cursor.execute("SELECT DATABASE() as database_name")
|
|
db_info = cursor.fetchone()
|
|
|
|
conn.close()
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'message': '資料庫連線成功',
|
|
'database': db_info['database_name'],
|
|
'version': result['version']
|
|
})
|
|
except Exception as e:
|
|
return handle_error(e, 500)
|
|
|
|
@app.route('/api/db/tables', methods=['GET'])
|
|
def list_tables():
|
|
"""列出所有資料表"""
|
|
try:
|
|
conn = get_db_connection()
|
|
if not conn:
|
|
return handle_error('無法連接到資料庫', 500)
|
|
|
|
with conn.cursor() as cursor:
|
|
cursor.execute("SHOW TABLES")
|
|
tables = [list(row.values())[0] for row in cursor.fetchall()]
|
|
|
|
conn.close()
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'count': len(tables),
|
|
'tables': tables
|
|
})
|
|
except Exception as e:
|
|
return handle_error(e, 500)
|
|
|
|
# ============================================
|
|
# LLM API
|
|
# ============================================
|
|
|
|
@app.route('/api/llm/test/gemini', methods=['POST'])
|
|
def test_llm_gemini():
|
|
"""測試 Gemini API"""
|
|
result = LLMService.test_gemini()
|
|
return jsonify(result)
|
|
|
|
@app.route('/api/llm/test/deepseek', methods=['POST'])
|
|
def test_llm_deepseek():
|
|
"""測試 DeepSeek API"""
|
|
result = LLMService.test_deepseek()
|
|
return jsonify(result)
|
|
|
|
@app.route('/api/llm/test/openai', methods=['POST'])
|
|
def test_llm_openai():
|
|
"""測試 OpenAI API"""
|
|
result = LLMService.test_openai()
|
|
return jsonify(result)
|
|
|
|
@app.route('/api/llm/test/claude', methods=['POST'])
|
|
def test_llm_claude():
|
|
"""測試 Claude API"""
|
|
result = LLMService.test_claude()
|
|
return jsonify(result)
|
|
|
|
@app.route('/api/llm/test/all', methods=['POST'])
|
|
def test_llm_all():
|
|
"""測試所有 LLM API"""
|
|
results = {
|
|
'gemini': LLMService.test_gemini(),
|
|
'deepseek': LLMService.test_deepseek(),
|
|
'openai': LLMService.test_openai(),
|
|
'claude': LLMService.test_claude()
|
|
}
|
|
return jsonify(results)
|
|
|
|
@app.route('/api/llm/generate', methods=['POST'])
|
|
def generate_content():
|
|
"""使用 LLM 生成內容"""
|
|
try:
|
|
data = request.get_json()
|
|
|
|
if not data or 'prompt' not in data:
|
|
return handle_error('缺少必要參數: prompt', 400)
|
|
|
|
prompt = data['prompt']
|
|
provider = data.get('provider', 'claude')
|
|
options = data.get('options', {})
|
|
|
|
result = LLMService.generate_content(prompt, provider, options)
|
|
return jsonify(result)
|
|
|
|
except Exception as e:
|
|
return handle_error(e, 500)
|
|
|
|
# ============================================
|
|
# 靜態檔案
|
|
# ============================================
|
|
|
|
@app.route('/api-proxy-example.html')
|
|
def api_example():
|
|
"""API 範例頁面"""
|
|
return send_from_directory('public', 'api-proxy-example.html')
|
|
|
|
# ============================================
|
|
# 啟動伺服器
|
|
# ============================================
|
|
|
|
if __name__ == '__main__':
|
|
print('=' * 60)
|
|
print('🚀 HR Performance System API Server (Flask/Python)')
|
|
print('=' * 60)
|
|
print(f'📡 Server running on: http://127.0.0.1:5002')
|
|
print(f'🌍 Environment: {os.getenv("NODE_ENV", "development")}')
|
|
print(f'📅 Started at: {datetime.now().strftime("%Y-%m-%d %H:%M:%S")}')
|
|
print('=' * 60)
|
|
print('\n📚 Available endpoints:')
|
|
print(' GET / - API information')
|
|
print(' GET /health - Health check')
|
|
print(' POST /api/db/test - Test database connection')
|
|
print(' GET /api/db/tables - List all tables')
|
|
print(' POST /api/llm/test/* - Test LLM connections')
|
|
print(' POST /api/llm/generate - Generate content with LLM')
|
|
print(' GET /api-proxy-example.html - API example page')
|
|
print('\n✨ Server is ready to accept connections!\n')
|
|
|
|
# 測試資料庫連線
|
|
if test_db_connection():
|
|
print('✅ Database connection: OK')
|
|
else:
|
|
print('⚠️ Database connection: FAILED')
|
|
|
|
print('')
|
|
|
|
# 啟動伺服器
|
|
app.run(
|
|
host='127.0.0.1',
|
|
port=5002,
|
|
debug=os.getenv('NODE_ENV') == 'development'
|
|
)
|