feat: Add multi-LLM provider support with DeepSeek integration
Major Features: - ✨ Multi-LLM provider support (DeepSeek, Ollama, OpenAI, Custom) - 🤖 Admin panel LLM configuration management UI - 🔄 Dynamic provider switching without restart - 🧪 Built-in API connection testing - 🔒 Secure API key management Backend Changes: - Add routes/llmConfig.js: Complete LLM config CRUD API - Update routes/analyze.js: Use database LLM configuration - Update server.js: Add LLM config routes - Add scripts/add-deepseek-config.js: DeepSeek setup script Frontend Changes: - Update src/pages/AdminPage.jsx: Add LLM Config tab + modal - Update src/services/api.js: Add LLM config API methods - Provider presets for DeepSeek, Ollama, OpenAI - Test connection feature in config modal Configuration: - Update .env.example: Add DeepSeek API configuration - Update package.json: Add llm:add-deepseek script Documentation: - Add docs/LLM_CONFIGURATION_GUIDE.md: Complete guide - Add DEEPSEEK_INTEGRATION.md: Integration summary - Quick setup instructions for DeepSeek API Endpoints: - GET /api/llm-config: List all configurations - GET /api/llm-config/active: Get active configuration - POST /api/llm-config: Create configuration - PUT /api/llm-config/🆔 Update configuration - PUT /api/llm-config/:id/activate: Activate configuration - DELETE /api/llm-config/🆔 Delete configuration - POST /api/llm-config/test: Test API connection Database: - Uses existing llm_configs table - Only one config active at a time - Fallback to Ollama if no database config Security: - Admin-only access to LLM configuration - API keys never returned in GET requests - Audit logging for all config changes - Cannot delete active configuration DeepSeek Model: - Model: deepseek-chat - High-quality 5 Why analysis - Excellent Chinese language support - Cost-effective pricing 🤖 Generated with Claude Code Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
11
server.js
11
server.js
@@ -12,6 +12,7 @@ import { notFoundHandler, errorHandler } from './middleware/errorHandler.js';
|
||||
import authRoutes from './routes/auth.js';
|
||||
import analyzeRoutes from './routes/analyze.js';
|
||||
import adminRoutes from './routes/admin.js';
|
||||
import llmConfigRoutes from './routes/llmConfig.js';
|
||||
|
||||
// 載入環境變數
|
||||
dotenv.config();
|
||||
@@ -104,6 +105,7 @@ app.get('/health/db', async (req, res) => {
|
||||
app.use('/api/auth', authRoutes);
|
||||
app.use('/api/analyze', analyzeRoutes);
|
||||
app.use('/api/admin', adminRoutes);
|
||||
app.use('/api/llm-config', llmConfigRoutes);
|
||||
|
||||
// Root Endpoint
|
||||
app.get('/', (req, res) => {
|
||||
@@ -128,6 +130,15 @@ app.get('/', (req, res) => {
|
||||
users: 'GET /api/admin/users',
|
||||
analyses: 'GET /api/admin/analyses',
|
||||
auditLogs: 'GET /api/admin/audit-logs'
|
||||
},
|
||||
llmConfig: {
|
||||
list: 'GET /api/llm-config',
|
||||
active: 'GET /api/llm-config/active',
|
||||
create: 'POST /api/llm-config',
|
||||
update: 'PUT /api/llm-config/:id',
|
||||
activate: 'PUT /api/llm-config/:id/activate',
|
||||
delete: 'DELETE /api/llm-config/:id',
|
||||
test: 'POST /api/llm-config/test'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user