- Database schema with 31 tables for 4-card system - LLM API integration (Gemini, DeepSeek, OpenAI) - Error handling system with modal component - Connection test UI for LLM services - Environment configuration files - Complete database documentation 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
365 lines
8.9 KiB
JavaScript
365 lines
8.9 KiB
JavaScript
/**
|
|
* LLM Service
|
|
* 整合 Gemini, DeepSeek, OpenAI 三種 LLM API
|
|
*/
|
|
|
|
const axios = require('axios');
|
|
const { llmConfig, getProviderConfig, isProviderEnabled } = require('../config/llm.config');
|
|
|
|
class LLMService {
|
|
/**
|
|
* 測試 Gemini API 連線
|
|
*/
|
|
async testGeminiConnection() {
|
|
try {
|
|
if (!isProviderEnabled('gemini')) {
|
|
return {
|
|
success: false,
|
|
message: 'Gemini API key not configured',
|
|
provider: 'gemini',
|
|
};
|
|
}
|
|
|
|
const config = getProviderConfig('gemini');
|
|
const url = `${config.apiUrl}/models/${config.model}:generateContent`;
|
|
|
|
const response = await axios.post(
|
|
url,
|
|
{
|
|
contents: [
|
|
{
|
|
parts: [
|
|
{
|
|
text: 'Hello, this is a connection test.',
|
|
},
|
|
],
|
|
},
|
|
],
|
|
},
|
|
{
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
},
|
|
params: {
|
|
key: config.apiKey,
|
|
},
|
|
timeout: config.timeout,
|
|
}
|
|
);
|
|
|
|
if (response.status === 200 && response.data) {
|
|
return {
|
|
success: true,
|
|
message: 'Gemini API connection successful',
|
|
provider: 'gemini',
|
|
model: config.model,
|
|
};
|
|
}
|
|
|
|
return {
|
|
success: false,
|
|
message: 'Unexpected response from Gemini API',
|
|
provider: 'gemini',
|
|
};
|
|
} catch (error) {
|
|
return {
|
|
success: false,
|
|
message: error.response?.data?.error?.message || error.message,
|
|
provider: 'gemini',
|
|
error: error.message,
|
|
};
|
|
}
|
|
}
|
|
|
|
/**
|
|
* 測試 DeepSeek API 連線
|
|
*/
|
|
async testDeepSeekConnection() {
|
|
try {
|
|
if (!isProviderEnabled('deepseek')) {
|
|
return {
|
|
success: false,
|
|
message: 'DeepSeek API key not configured',
|
|
provider: 'deepseek',
|
|
};
|
|
}
|
|
|
|
const config = getProviderConfig('deepseek');
|
|
const url = `${config.apiUrl}/chat/completions`;
|
|
|
|
const response = await axios.post(
|
|
url,
|
|
{
|
|
model: config.model,
|
|
messages: [
|
|
{
|
|
role: 'user',
|
|
content: 'Hello, this is a connection test.',
|
|
},
|
|
],
|
|
max_tokens: 50,
|
|
},
|
|
{
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
Authorization: `Bearer ${config.apiKey}`,
|
|
},
|
|
timeout: config.timeout,
|
|
}
|
|
);
|
|
|
|
if (response.status === 200 && response.data.choices) {
|
|
return {
|
|
success: true,
|
|
message: 'DeepSeek API connection successful',
|
|
provider: 'deepseek',
|
|
model: config.model,
|
|
};
|
|
}
|
|
|
|
return {
|
|
success: false,
|
|
message: 'Unexpected response from DeepSeek API',
|
|
provider: 'deepseek',
|
|
};
|
|
} catch (error) {
|
|
return {
|
|
success: false,
|
|
message: error.response?.data?.error?.message || error.message,
|
|
provider: 'deepseek',
|
|
error: error.message,
|
|
};
|
|
}
|
|
}
|
|
|
|
/**
|
|
* 測試 OpenAI API 連線
|
|
*/
|
|
async testOpenAIConnection() {
|
|
try {
|
|
if (!isProviderEnabled('openai')) {
|
|
return {
|
|
success: false,
|
|
message: 'OpenAI API key not configured',
|
|
provider: 'openai',
|
|
};
|
|
}
|
|
|
|
const config = getProviderConfig('openai');
|
|
const url = `${config.apiUrl}/chat/completions`;
|
|
|
|
const response = await axios.post(
|
|
url,
|
|
{
|
|
model: config.model,
|
|
messages: [
|
|
{
|
|
role: 'user',
|
|
content: 'Hello, this is a connection test.',
|
|
},
|
|
],
|
|
max_tokens: 50,
|
|
},
|
|
{
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
Authorization: `Bearer ${config.apiKey}`,
|
|
},
|
|
timeout: config.timeout,
|
|
}
|
|
);
|
|
|
|
if (response.status === 200 && response.data.choices) {
|
|
return {
|
|
success: true,
|
|
message: 'OpenAI API connection successful',
|
|
provider: 'openai',
|
|
model: config.model,
|
|
};
|
|
}
|
|
|
|
return {
|
|
success: false,
|
|
message: 'Unexpected response from OpenAI API',
|
|
provider: 'openai',
|
|
};
|
|
} catch (error) {
|
|
return {
|
|
success: false,
|
|
message: error.response?.data?.error?.message || error.message,
|
|
provider: 'openai',
|
|
error: error.message,
|
|
};
|
|
}
|
|
}
|
|
|
|
/**
|
|
* 測試所有 LLM 連線
|
|
*/
|
|
async testAllConnections() {
|
|
const results = {
|
|
gemini: await this.testGeminiConnection(),
|
|
deepseek: await this.testDeepSeekConnection(),
|
|
openai: await this.testOpenAIConnection(),
|
|
};
|
|
|
|
return results;
|
|
}
|
|
|
|
/**
|
|
* 使用 Gemini 生成內容
|
|
*/
|
|
async generateWithGemini(prompt, options = {}) {
|
|
try {
|
|
if (!isProviderEnabled('gemini')) {
|
|
throw new Error('Gemini API not configured');
|
|
}
|
|
|
|
const config = getProviderConfig('gemini');
|
|
const url = `${config.apiUrl}/models/${config.model}:generateContent`;
|
|
|
|
const response = await axios.post(
|
|
url,
|
|
{
|
|
contents: [
|
|
{
|
|
parts: [{ text: prompt }],
|
|
},
|
|
],
|
|
generationConfig: {
|
|
temperature: options.temperature || llmConfig.temperature,
|
|
maxOutputTokens: options.maxTokens || llmConfig.maxTokens,
|
|
},
|
|
},
|
|
{
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
},
|
|
params: {
|
|
key: config.apiKey,
|
|
},
|
|
timeout: config.timeout,
|
|
}
|
|
);
|
|
|
|
if (response.data?.candidates?.[0]?.content?.parts?.[0]?.text) {
|
|
return {
|
|
success: true,
|
|
content: response.data.candidates[0].content.parts[0].text,
|
|
provider: 'gemini',
|
|
};
|
|
}
|
|
|
|
throw new Error('Invalid response format from Gemini');
|
|
} catch (error) {
|
|
throw new Error(`Gemini API error: ${error.message}`);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* 使用 DeepSeek 生成內容
|
|
*/
|
|
async generateWithDeepSeek(prompt, options = {}) {
|
|
try {
|
|
if (!isProviderEnabled('deepseek')) {
|
|
throw new Error('DeepSeek API not configured');
|
|
}
|
|
|
|
const config = getProviderConfig('deepseek');
|
|
const url = `${config.apiUrl}/chat/completions`;
|
|
|
|
const response = await axios.post(
|
|
url,
|
|
{
|
|
model: config.model,
|
|
messages: [{ role: 'user', content: prompt }],
|
|
temperature: options.temperature || llmConfig.temperature,
|
|
max_tokens: options.maxTokens || llmConfig.maxTokens,
|
|
},
|
|
{
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
Authorization: `Bearer ${config.apiKey}`,
|
|
},
|
|
timeout: config.timeout,
|
|
}
|
|
);
|
|
|
|
if (response.data?.choices?.[0]?.message?.content) {
|
|
return {
|
|
success: true,
|
|
content: response.data.choices[0].message.content,
|
|
provider: 'deepseek',
|
|
};
|
|
}
|
|
|
|
throw new Error('Invalid response format from DeepSeek');
|
|
} catch (error) {
|
|
throw new Error(`DeepSeek API error: ${error.message}`);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* 使用 OpenAI 生成內容
|
|
*/
|
|
async generateWithOpenAI(prompt, options = {}) {
|
|
try {
|
|
if (!isProviderEnabled('openai')) {
|
|
throw new Error('OpenAI API not configured');
|
|
}
|
|
|
|
const config = getProviderConfig('openai');
|
|
const url = `${config.apiUrl}/chat/completions`;
|
|
|
|
const response = await axios.post(
|
|
url,
|
|
{
|
|
model: config.model,
|
|
messages: [{ role: 'user', content: prompt }],
|
|
temperature: options.temperature || llmConfig.temperature,
|
|
max_tokens: options.maxTokens || llmConfig.maxTokens,
|
|
},
|
|
{
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
Authorization: `Bearer ${config.apiKey}`,
|
|
},
|
|
timeout: config.timeout,
|
|
}
|
|
);
|
|
|
|
if (response.data?.choices?.[0]?.message?.content) {
|
|
return {
|
|
success: true,
|
|
content: response.data.choices[0].message.content,
|
|
provider: 'openai',
|
|
};
|
|
}
|
|
|
|
throw new Error('Invalid response format from OpenAI');
|
|
} catch (error) {
|
|
throw new Error(`OpenAI API error: ${error.message}`);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* 使用預設或指定的 LLM 生成內容
|
|
*/
|
|
async generate(prompt, provider = null, options = {}) {
|
|
const selectedProvider = provider || llmConfig.defaultProvider;
|
|
|
|
switch (selectedProvider) {
|
|
case 'gemini':
|
|
return await this.generateWithGemini(prompt, options);
|
|
case 'deepseek':
|
|
return await this.generateWithDeepSeek(prompt, options);
|
|
case 'openai':
|
|
return await this.generateWithOpenAI(prompt, options);
|
|
default:
|
|
throw new Error(`Unknown provider: ${selectedProvider}`);
|
|
}
|
|
}
|
|
}
|
|
|
|
module.exports = new LLMService();
|