- Add Claude API integration to LLM service - Create Express backend server with CORS support - Add API proxy example page - Fix CORS errors by routing through backend - Update LLM configuration to support Claude - Add package.json with dependencies 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
89 lines
2.2 KiB
JavaScript
89 lines
2.2 KiB
JavaScript
/**
|
|
* LLM API Configuration
|
|
* 支援 Gemini, DeepSeek, OpenAI 三種 LLM 服務
|
|
*/
|
|
|
|
require('dotenv').config();
|
|
|
|
const llmConfig = {
|
|
// Gemini Configuration
|
|
gemini: {
|
|
apiKey: process.env.GEMINI_API_KEY,
|
|
model: process.env.GEMINI_MODEL || 'gemini-pro',
|
|
apiUrl: 'https://generativelanguage.googleapis.com/v1beta',
|
|
enabled: !!process.env.GEMINI_API_KEY,
|
|
timeout: 30000, // 30 seconds
|
|
},
|
|
|
|
// DeepSeek Configuration
|
|
deepseek: {
|
|
apiKey: process.env.DEEPSEEK_API_KEY,
|
|
apiUrl: process.env.DEEPSEEK_API_URL || 'https://api.deepseek.com/v1',
|
|
model: process.env.DEEPSEEK_MODEL || 'deepseek-chat',
|
|
enabled: !!process.env.DEEPSEEK_API_KEY,
|
|
timeout: 30000,
|
|
},
|
|
|
|
// OpenAI Configuration
|
|
openai: {
|
|
apiKey: process.env.OPENAI_API_KEY,
|
|
apiUrl: process.env.OPENAI_API_URL || 'https://api.openai.com/v1',
|
|
model: process.env.OPENAI_MODEL || 'gpt-4',
|
|
enabled: !!process.env.OPENAI_API_KEY,
|
|
timeout: 30000,
|
|
},
|
|
|
|
// Claude Configuration
|
|
claude: {
|
|
apiKey: process.env.CLAUDE_API_KEY,
|
|
apiUrl: process.env.CLAUDE_API_URL || 'https://api.anthropic.com/v1',
|
|
model: process.env.CLAUDE_MODEL || 'claude-3-5-sonnet-20241022',
|
|
enabled: !!process.env.CLAUDE_API_KEY,
|
|
timeout: 30000,
|
|
version: '2023-06-01', // Anthropic API version
|
|
},
|
|
|
|
// Default LLM Provider
|
|
defaultProvider: 'claude',
|
|
|
|
// Common Settings
|
|
maxTokens: 2000,
|
|
temperature: 0.7,
|
|
};
|
|
|
|
/**
|
|
* 取得啟用的 LLM 服務清單
|
|
*/
|
|
function getEnabledProviders() {
|
|
const enabled = [];
|
|
if (llmConfig.gemini.enabled) enabled.push('gemini');
|
|
if (llmConfig.deepseek.enabled) enabled.push('deepseek');
|
|
if (llmConfig.openai.enabled) enabled.push('openai');
|
|
if (llmConfig.claude.enabled) enabled.push('claude');
|
|
return enabled;
|
|
}
|
|
|
|
/**
|
|
* 取得指定 LLM 服務的設定
|
|
*/
|
|
function getProviderConfig(provider) {
|
|
if (!llmConfig[provider]) {
|
|
throw new Error(`Unknown LLM provider: ${provider}`);
|
|
}
|
|
return llmConfig[provider];
|
|
}
|
|
|
|
/**
|
|
* 檢查 LLM 服務是否可用
|
|
*/
|
|
function isProviderEnabled(provider) {
|
|
return llmConfig[provider]?.enabled || false;
|
|
}
|
|
|
|
module.exports = {
|
|
llmConfig,
|
|
getEnabledProviders,
|
|
getProviderConfig,
|
|
isProviderEnabled,
|
|
};
|