Files
hr-performance-system/routes/llm.routes.js
donald c24634f4b7 Initial commit: HR Performance System
- Database schema with 31 tables for 4-card system
- LLM API integration (Gemini, DeepSeek, OpenAI)
- Error handling system with modal component
- Connection test UI for LLM services
- Environment configuration files
- Complete database documentation

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-12-03 23:34:13 +08:00

172 lines
4.1 KiB
JavaScript
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

/**
* LLM API Routes
* 處理 LLM 相關的 API 請求
*/
const express = require('express');
const router = express.Router();
const llmService = require('../services/llm.service');
const { asyncHandler, createError } = require('../utils/errorHandler');
/**
* POST /api/llm/test/gemini
* 測試 Gemini API 連線
*/
router.post('/test/gemini', asyncHandler(async (req, res) => {
const result = await llmService.testGeminiConnection();
res.json(result);
}));
/**
* POST /api/llm/test/deepseek
* 測試 DeepSeek API 連線
*/
router.post('/test/deepseek', asyncHandler(async (req, res) => {
const result = await llmService.testDeepSeekConnection();
res.json(result);
}));
/**
* POST /api/llm/test/openai
* 測試 OpenAI API 連線
*/
router.post('/test/openai', asyncHandler(async (req, res) => {
const result = await llmService.testOpenAIConnection();
res.json(result);
}));
/**
* POST /api/llm/test/all
* 測試所有 LLM API 連線
*/
router.post('/test/all', asyncHandler(async (req, res) => {
const results = await llmService.testAllConnections();
res.json(results);
}));
/**
* POST /api/llm/generate
* 使用 LLM 生成內容
*
* Body:
* {
* "prompt": "你的提示內容",
* "provider": "gemini|deepseek|openai", (可選,預設使用 gemini)
* "options": {
* "temperature": 0.7,
* "maxTokens": 2000
* }
* }
*/
router.post('/generate', asyncHandler(async (req, res) => {
const { prompt, provider, options } = req.body;
if (!prompt) {
throw createError('BAD_REQUEST', '缺少必要參數: prompt');
}
const result = await llmService.generate(prompt, provider, options);
res.json(result);
}));
/**
* POST /api/llm/help-me-fill
* Help Me AI 智能填寫功能
*
* Body:
* {
* "cardType": "role|competency|performance|growth",
* "cardId": "卡片ID",
* "filledFields": { ... },
* "emptyFields": [ ... ],
* "context": { ... }
* }
*/
router.post('/help-me-fill', asyncHandler(async (req, res) => {
const { cardType, cardId, filledFields, emptyFields, context } = req.body;
if (!cardType || !emptyFields || emptyFields.length === 0) {
throw createError('BAD_REQUEST', '缺少必要參數');
}
// 建立提示詞
const prompt = buildHelpMeFillPrompt(cardType, filledFields, emptyFields, context);
// 使用 LLM 生成建議內容
const result = await llmService.generate(prompt, 'gemini', {
temperature: 0.7,
maxTokens: 2000,
});
if (!result.success) {
throw createError('LLM_API_ERROR', '生成內容失敗');
}
// 解析生成的內容
const suggestions = parseHelpMeFillResponse(result.content, emptyFields);
res.json({
success: true,
filledCount: Object.keys(suggestions).length,
suggestions,
});
}));
/**
* 建立 Help Me Fill 提示詞
*/
function buildHelpMeFillPrompt(cardType, filledFields, emptyFields, context) {
let prompt = `你是一個 HR 績效評核系統的智能助手。請根據以下資訊,為空白欄位生成合適的內容。\n\n`;
prompt += `卡片類型: ${cardType}\n\n`;
if (Object.keys(filledFields).length > 0) {
prompt += `已填寫的欄位:\n`;
for (const [key, value] of Object.entries(filledFields)) {
prompt += `- ${key}: ${value}\n`;
}
prompt += `\n`;
}
if (context) {
prompt += `上下文資訊:\n`;
prompt += JSON.stringify(context, null, 2);
prompt += `\n\n`;
}
prompt += `請為以下空白欄位生成內容:\n`;
emptyFields.forEach(field => {
prompt += `- ${field}\n`;
});
prompt += `\n請以 JSON 格式回覆,格式如下:\n`;
prompt += `{\n`;
emptyFields.forEach(field => {
prompt += ` "${field}": "生成的內容",\n`;
});
prompt += `}\n`;
return prompt;
}
/**
* 解析 Help Me Fill 回應
*/
function parseHelpMeFillResponse(content, emptyFields) {
try {
// 嘗試直接解析 JSON
const jsonMatch = content.match(/\{[\s\S]*\}/);
if (jsonMatch) {
return JSON.parse(jsonMatch[0]);
}
// 如果無法解析 JSON則返回空物件
return {};
} catch (error) {
console.error('解析 LLM 回應失敗:', error);
return {};
}
}
module.exports = router;