Files
5why-analyzer/routes/llmTest.js
donald 1568a12a96 fix: Add localhost host binding to Vite config and index fix script
- Add host: 'localhost' to vite.config.js to ensure consistent IP
- Add scripts/fix-indexes.js for database index verification
- Add routes/llmTest.js for LLM testing endpoint

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2025-12-09 18:28:29 +08:00

220 lines
5.7 KiB
JavaScript

import express from 'express';
import { asyncHandler } from '../middleware/errorHandler.js';
import { requireAuth, requireAdmin } from '../middleware/auth.js';
const router = express.Router();
/**
* GET /api/llm-test/models
* 列出可用的 LLM 模型(從 API 動態獲取)
*/
router.get('/models', requireAuth, asyncHandler(async (req, res) => {
const { api_url } = req.query;
const targetUrl = api_url || 'https://ollama_pjapi.theaken.com';
try {
const axios = (await import('axios')).default;
const response = await axios.get(`${targetUrl}/v1/models`, {
timeout: 10000
});
if (response.data && response.data.data) {
const models = response.data.data.map(model => ({
id: model.id,
name: model.info?.name || model.id,
description: model.info?.description || '',
best_for: model.info?.best_for || '',
owned_by: model.owned_by || 'unknown'
}));
res.json({
success: true,
data: models
});
} else {
throw new Error('Invalid response format');
}
} catch (error) {
res.status(500).json({
success: false,
error: '無法取得模型列表',
message: error.message
});
}
}));
/**
* POST /api/llm-test/quick
* 快速測試 LLM 連線(僅管理員)
*/
router.post('/quick', requireAdmin, asyncHandler(async (req, res) => {
const { api_url, api_key, model_name } = req.body;
if (!api_url || !model_name) {
return res.status(400).json({
success: false,
error: '請提供 API 端點和模型名稱'
});
}
try {
const axios = (await import('axios')).default;
const startTime = Date.now();
const response = await axios.post(
`${api_url}/v1/chat/completions`,
{
model: model_name,
messages: [
{ role: 'user', content: 'Hello, please respond with just "OK"' }
],
max_tokens: 10
},
{
timeout: 15000,
headers: {
'Content-Type': 'application/json',
...(api_key && { 'Authorization': `Bearer ${api_key}` })
}
}
);
const responseTime = Date.now() - startTime;
if (response.data && response.data.choices) {
res.json({
success: true,
message: 'LLM API 連線測試成功',
responseTime: responseTime,
response: response.data.choices[0]?.message?.content || ''
});
} else {
throw new Error('Invalid API response format');
}
} catch (error) {
res.status(500).json({
success: false,
error: 'LLM API 連線測試失敗',
message: error.message
});
}
}));
/**
* POST /api/llm-test/chat
* 直接與 LLM 對話(測試用,僅管理員)
*/
router.post('/chat', requireAdmin, asyncHandler(async (req, res) => {
const { api_url, api_key, model_name, messages, temperature, max_tokens, stream } = req.body;
if (!api_url || !model_name || !messages) {
return res.status(400).json({
success: false,
error: '請提供必要參數'
});
}
try {
const axios = (await import('axios')).default;
const startTime = Date.now();
// 非串流模式
if (!stream) {
const response = await axios.post(
`${api_url}/v1/chat/completions`,
{
model: model_name,
messages: messages,
temperature: temperature || 0.7,
max_tokens: max_tokens || 2000,
stream: false
},
{
timeout: 120000,
headers: {
'Content-Type': 'application/json',
...(api_key && { 'Authorization': `Bearer ${api_key}` })
}
}
);
const responseTime = Date.now() - startTime;
res.json({
success: true,
data: {
content: response.data.choices[0]?.message?.content || '',
usage: response.data.usage,
model: response.data.model,
responseTime: responseTime
}
});
} else {
// 串流模式 - 使用 Server-Sent Events
res.setHeader('Content-Type', 'text/event-stream');
res.setHeader('Cache-Control', 'no-cache');
res.setHeader('Connection', 'keep-alive');
const response = await axios.post(
`${api_url}/v1/chat/completions`,
{
model: model_name,
messages: messages,
temperature: temperature || 0.7,
max_tokens: max_tokens || 2000,
stream: true
},
{
timeout: 120000,
responseType: 'stream',
headers: {
'Content-Type': 'application/json',
...(api_key && { 'Authorization': `Bearer ${api_key}` })
}
}
);
response.data.on('data', (chunk) => {
const lines = chunk.toString().split('\n');
for (const line of lines) {
if (line.startsWith('data: ')) {
const dataStr = line.slice(6).trim();
if (dataStr && dataStr !== '[DONE]') {
try {
const data = JSON.parse(dataStr);
const content = data.choices?.[0]?.delta?.content;
if (content) {
res.write(`data: ${JSON.stringify({ content })}\n\n`);
}
} catch (e) {
// Skip invalid JSON
}
}
}
}
});
response.data.on('end', () => {
res.write('data: [DONE]\n\n');
res.end();
});
response.data.on('error', (error) => {
res.write(`data: ${JSON.stringify({ error: error.message })}\n\n`);
res.end();
});
}
} catch (error) {
if (!res.headersSent) {
res.status(500).json({
success: false,
error: 'LLM 對話失敗',
message: error.message
});
}
}
}));
export default router;