Initial commit: HR Performance System
- Database schema with 31 tables for 4-card system - LLM API integration (Gemini, DeepSeek, OpenAI) - Error handling system with modal component - Connection test UI for LLM services - Environment configuration files - Complete database documentation 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
77
config/llm.config.js
Normal file
77
config/llm.config.js
Normal file
@@ -0,0 +1,77 @@
|
||||
/**
|
||||
* LLM API Configuration
|
||||
* 支援 Gemini, DeepSeek, OpenAI 三種 LLM 服務
|
||||
*/
|
||||
|
||||
require('dotenv').config();
|
||||
|
||||
const llmConfig = {
|
||||
// Gemini Configuration
|
||||
gemini: {
|
||||
apiKey: process.env.GEMINI_API_KEY,
|
||||
model: process.env.GEMINI_MODEL || 'gemini-pro',
|
||||
apiUrl: 'https://generativelanguage.googleapis.com/v1beta',
|
||||
enabled: !!process.env.GEMINI_API_KEY,
|
||||
timeout: 30000, // 30 seconds
|
||||
},
|
||||
|
||||
// DeepSeek Configuration
|
||||
deepseek: {
|
||||
apiKey: process.env.DEEPSEEK_API_KEY,
|
||||
apiUrl: process.env.DEEPSEEK_API_URL || 'https://api.deepseek.com/v1',
|
||||
model: process.env.DEEPSEEK_MODEL || 'deepseek-chat',
|
||||
enabled: !!process.env.DEEPSEEK_API_KEY,
|
||||
timeout: 30000,
|
||||
},
|
||||
|
||||
// OpenAI Configuration
|
||||
openai: {
|
||||
apiKey: process.env.OPENAI_API_KEY,
|
||||
apiUrl: process.env.OPENAI_API_URL || 'https://api.openai.com/v1',
|
||||
model: process.env.OPENAI_MODEL || 'gpt-4',
|
||||
enabled: !!process.env.OPENAI_API_KEY,
|
||||
timeout: 30000,
|
||||
},
|
||||
|
||||
// Default LLM Provider
|
||||
defaultProvider: 'gemini',
|
||||
|
||||
// Common Settings
|
||||
maxTokens: 2000,
|
||||
temperature: 0.7,
|
||||
};
|
||||
|
||||
/**
|
||||
* 取得啟用的 LLM 服務清單
|
||||
*/
|
||||
function getEnabledProviders() {
|
||||
const enabled = [];
|
||||
if (llmConfig.gemini.enabled) enabled.push('gemini');
|
||||
if (llmConfig.deepseek.enabled) enabled.push('deepseek');
|
||||
if (llmConfig.openai.enabled) enabled.push('openai');
|
||||
return enabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* 取得指定 LLM 服務的設定
|
||||
*/
|
||||
function getProviderConfig(provider) {
|
||||
if (!llmConfig[provider]) {
|
||||
throw new Error(`Unknown LLM provider: ${provider}`);
|
||||
}
|
||||
return llmConfig[provider];
|
||||
}
|
||||
|
||||
/**
|
||||
* 檢查 LLM 服務是否可用
|
||||
*/
|
||||
function isProviderEnabled(provider) {
|
||||
return llmConfig[provider]?.enabled || false;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
llmConfig,
|
||||
getEnabledProviders,
|
||||
getProviderConfig,
|
||||
isProviderEnabled,
|
||||
};
|
||||
Reference in New Issue
Block a user