Initial commit: HR Position System

- Database schema with MySQL support
- LLM API integration (Gemini 2.5 Flash, DeepSeek, OpenAI)
- Error handling with copyable error messages
- CORS fix for API calls
- Complete setup documentation

🤖 Generated with Claude Code
https://claude.com/claude-code

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
2025-12-04 00:46:53 +08:00
commit 29c1633e49
13 changed files with 6184 additions and 0 deletions

308
llm_config.py Normal file
View File

@@ -0,0 +1,308 @@
"""
LLM API Configuration Module
Supports Gemini, DeepSeek, and OpenAI APIs with connection testing
"""
import os
import requests
from dotenv import load_dotenv
from typing import Dict, List, Tuple
# Load environment variables
load_dotenv()
class LLMConfig:
"""LLM API configuration and management"""
def __init__(self):
self.apis = {
'gemini': {
'name': 'Google Gemini',
'api_key': os.getenv('GEMINI_API_KEY', ''),
'endpoint': 'https://generativelanguage.googleapis.com/v1/models',
'enabled': bool(os.getenv('GEMINI_API_KEY'))
},
'deepseek': {
'name': 'DeepSeek',
'api_key': os.getenv('DEEPSEEK_API_KEY', ''),
'endpoint': os.getenv('DEEPSEEK_API_URL', 'https://api.deepseek.com/v1'),
'enabled': bool(os.getenv('DEEPSEEK_API_KEY'))
},
'openai': {
'name': 'OpenAI',
'api_key': os.getenv('OPENAI_API_KEY', ''),
'endpoint': os.getenv('OPENAI_API_URL', 'https://api.openai.com/v1'),
'enabled': bool(os.getenv('OPENAI_API_KEY'))
}
}
def get_enabled_apis(self) -> List[str]:
"""Get list of enabled APIs"""
return [key for key, config in self.apis.items() if config['enabled']]
def get_api_config(self, api_name: str) -> Dict:
"""Get configuration for specific API"""
return self.apis.get(api_name, {})
def test_gemini_connection(self) -> Tuple[bool, str]:
"""Test Gemini API connection"""
try:
api_key = self.apis['gemini']['api_key']
if not api_key:
return False, "API Key 未設定"
# Test endpoint - list models
url = f"https://generativelanguage.googleapis.com/v1beta/models?key={api_key}"
response = requests.get(url, timeout=10)
if response.status_code == 200:
data = response.json()
models = data.get('models', [])
model_count = len(models)
return True, f"連線成功!找到 {model_count} 個可用模型"
elif response.status_code == 400:
return False, "API Key 格式錯誤"
elif response.status_code == 403:
return False, "API Key 無效或權限不足"
else:
return False, f"連線失敗 (HTTP {response.status_code})"
except requests.exceptions.Timeout:
return False, "連線逾時"
except requests.exceptions.ConnectionError:
return False, "無法連接到伺服器"
except Exception as e:
return False, f"錯誤: {str(e)}"
def test_deepseek_connection(self) -> Tuple[bool, str]:
"""Test DeepSeek API connection"""
try:
api_key = self.apis['deepseek']['api_key']
endpoint = self.apis['deepseek']['endpoint']
if not api_key:
return False, "API Key 未設定"
# Test endpoint - list models
url = f"{endpoint}/models"
headers = {
'Authorization': f'Bearer {api_key}',
'Content-Type': 'application/json'
}
response = requests.get(url, headers=headers, timeout=10)
if response.status_code == 200:
data = response.json()
models = data.get('data', [])
if models:
model_count = len(models)
return True, f"連線成功!找到 {model_count} 個可用模型"
else:
return True, "連線成功!"
elif response.status_code == 401:
return False, "API Key 無效"
elif response.status_code == 403:
return False, "權限不足"
else:
return False, f"連線失敗 (HTTP {response.status_code})"
except requests.exceptions.Timeout:
return False, "連線逾時"
except requests.exceptions.ConnectionError:
return False, "無法連接到伺服器"
except Exception as e:
return False, f"錯誤: {str(e)}"
def test_openai_connection(self) -> Tuple[bool, str]:
"""Test OpenAI API connection"""
try:
api_key = self.apis['openai']['api_key']
endpoint = self.apis['openai']['endpoint']
if not api_key:
return False, "API Key 未設定"
# Test endpoint - list models
url = f"{endpoint}/models"
headers = {
'Authorization': f'Bearer {api_key}',
'Content-Type': 'application/json'
}
response = requests.get(url, headers=headers, timeout=10)
if response.status_code == 200:
data = response.json()
models = data.get('data', [])
model_count = len(models)
return True, f"連線成功!找到 {model_count} 個可用模型"
elif response.status_code == 401:
return False, "API Key 無效"
elif response.status_code == 403:
return False, "權限不足"
else:
return False, f"連線失敗 (HTTP {response.status_code})"
except requests.exceptions.Timeout:
return False, "連線逾時"
except requests.exceptions.ConnectionError:
return False, "無法連接到伺服器"
except Exception as e:
return False, f"錯誤: {str(e)}"
def test_all_connections(self) -> Dict[str, Tuple[bool, str]]:
"""Test all configured API connections"""
results = {}
if self.apis['gemini']['enabled']:
results['gemini'] = self.test_gemini_connection()
if self.apis['deepseek']['enabled']:
results['deepseek'] = self.test_deepseek_connection()
if self.apis['openai']['enabled']:
results['openai'] = self.test_openai_connection()
return results
def generate_text_gemini(self, prompt: str, max_tokens: int = 2000) -> Tuple[bool, str]:
"""Generate text using Gemini API"""
try:
api_key = self.apis['gemini']['api_key']
if not api_key:
return False, "API Key 未設定"
url = f"https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash:generateContent?key={api_key}"
data = {
"contents": [
{
"parts": [
{"text": prompt}
]
}
],
"generationConfig": {
"maxOutputTokens": max_tokens,
"temperature": 0.7
}
}
response = requests.post(url, json=data, timeout=30)
if response.status_code == 200:
result = response.json()
text = result['candidates'][0]['content']['parts'][0]['text']
return True, text
else:
return False, f"生成失敗 (HTTP {response.status_code}): {response.text}"
except Exception as e:
return False, f"錯誤: {str(e)}"
def generate_text_deepseek(self, prompt: str, max_tokens: int = 2000) -> Tuple[bool, str]:
"""Generate text using DeepSeek API"""
try:
api_key = self.apis['deepseek']['api_key']
endpoint = self.apis['deepseek']['endpoint']
if not api_key:
return False, "API Key 未設定"
url = f"{endpoint}/chat/completions"
headers = {
'Authorization': f'Bearer {api_key}',
'Content-Type': 'application/json'
}
data = {
"model": "deepseek-chat",
"messages": [
{"role": "user", "content": prompt}
],
"max_tokens": max_tokens,
"temperature": 0.7
}
response = requests.post(url, json=data, headers=headers, timeout=30)
if response.status_code == 200:
result = response.json()
text = result['choices'][0]['message']['content']
return True, text
else:
return False, f"生成失敗 (HTTP {response.status_code}): {response.text}"
except Exception as e:
return False, f"錯誤: {str(e)}"
def generate_text_openai(self, prompt: str, model: str = "gpt-3.5-turbo", max_tokens: int = 2000) -> Tuple[bool, str]:
"""Generate text using OpenAI API"""
try:
api_key = self.apis['openai']['api_key']
endpoint = self.apis['openai']['endpoint']
if not api_key:
return False, "API Key 未設定"
url = f"{endpoint}/chat/completions"
headers = {
'Authorization': f'Bearer {api_key}',
'Content-Type': 'application/json'
}
data = {
"model": model,
"messages": [
{"role": "user", "content": prompt}
],
"max_tokens": max_tokens,
"temperature": 0.7
}
response = requests.post(url, json=data, headers=headers, timeout=30)
if response.status_code == 200:
result = response.json()
text = result['choices'][0]['message']['content']
return True, text
else:
return False, f"生成失敗 (HTTP {response.status_code}): {response.text}"
except Exception as e:
return False, f"錯誤: {str(e)}"
def main():
"""Test script"""
print("=" * 60)
print("LLM API Configuration Test")
print("=" * 60)
print()
config = LLMConfig()
# Show enabled APIs
enabled = config.get_enabled_apis()
print(f"已啟用的 API: {', '.join(enabled) if enabled else ''}")
print()
# Test all connections
print("測試 API 連線:")
print("-" * 60)
results = config.test_all_connections()
for api_name, (success, message) in results.items():
status = "" if success else ""
api_display_name = config.apis[api_name]['name']
print(f"{status} {api_display_name}: {message}")
print()
if __name__ == '__main__':
main()