- Add Claude API integration to LLM service - Create Express backend server with CORS support - Add API proxy example page - Fix CORS errors by routing through backend - Update LLM configuration to support Claude - Add package.json with dependencies 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
479 lines
12 KiB
JavaScript
479 lines
12 KiB
JavaScript
/**
|
|
* LLM Service
|
|
* 整合 Gemini, DeepSeek, OpenAI 三種 LLM API
|
|
*/
|
|
|
|
const axios = require('axios');
|
|
const { llmConfig, getProviderConfig, isProviderEnabled } = require('../config/llm.config');
|
|
|
|
class LLMService {
|
|
/**
|
|
* 測試 Gemini API 連線
|
|
*/
|
|
async testGeminiConnection() {
|
|
try {
|
|
if (!isProviderEnabled('gemini')) {
|
|
return {
|
|
success: false,
|
|
message: 'Gemini API key not configured',
|
|
provider: 'gemini',
|
|
};
|
|
}
|
|
|
|
const config = getProviderConfig('gemini');
|
|
const url = `${config.apiUrl}/models/${config.model}:generateContent`;
|
|
|
|
const response = await axios.post(
|
|
url,
|
|
{
|
|
contents: [
|
|
{
|
|
parts: [
|
|
{
|
|
text: 'Hello, this is a connection test.',
|
|
},
|
|
],
|
|
},
|
|
],
|
|
},
|
|
{
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
},
|
|
params: {
|
|
key: config.apiKey,
|
|
},
|
|
timeout: config.timeout,
|
|
}
|
|
);
|
|
|
|
if (response.status === 200 && response.data) {
|
|
return {
|
|
success: true,
|
|
message: 'Gemini API connection successful',
|
|
provider: 'gemini',
|
|
model: config.model,
|
|
};
|
|
}
|
|
|
|
return {
|
|
success: false,
|
|
message: 'Unexpected response from Gemini API',
|
|
provider: 'gemini',
|
|
};
|
|
} catch (error) {
|
|
return {
|
|
success: false,
|
|
message: error.response?.data?.error?.message || error.message,
|
|
provider: 'gemini',
|
|
error: error.message,
|
|
};
|
|
}
|
|
}
|
|
|
|
/**
|
|
* 測試 DeepSeek API 連線
|
|
*/
|
|
async testDeepSeekConnection() {
|
|
try {
|
|
if (!isProviderEnabled('deepseek')) {
|
|
return {
|
|
success: false,
|
|
message: 'DeepSeek API key not configured',
|
|
provider: 'deepseek',
|
|
};
|
|
}
|
|
|
|
const config = getProviderConfig('deepseek');
|
|
const url = `${config.apiUrl}/chat/completions`;
|
|
|
|
const response = await axios.post(
|
|
url,
|
|
{
|
|
model: config.model,
|
|
messages: [
|
|
{
|
|
role: 'user',
|
|
content: 'Hello, this is a connection test.',
|
|
},
|
|
],
|
|
max_tokens: 50,
|
|
},
|
|
{
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
Authorization: `Bearer ${config.apiKey}`,
|
|
},
|
|
timeout: config.timeout,
|
|
}
|
|
);
|
|
|
|
if (response.status === 200 && response.data.choices) {
|
|
return {
|
|
success: true,
|
|
message: 'DeepSeek API connection successful',
|
|
provider: 'deepseek',
|
|
model: config.model,
|
|
};
|
|
}
|
|
|
|
return {
|
|
success: false,
|
|
message: 'Unexpected response from DeepSeek API',
|
|
provider: 'deepseek',
|
|
};
|
|
} catch (error) {
|
|
return {
|
|
success: false,
|
|
message: error.response?.data?.error?.message || error.message,
|
|
provider: 'deepseek',
|
|
error: error.message,
|
|
};
|
|
}
|
|
}
|
|
|
|
/**
|
|
* 測試 OpenAI API 連線
|
|
*/
|
|
async testOpenAIConnection() {
|
|
try {
|
|
if (!isProviderEnabled('openai')) {
|
|
return {
|
|
success: false,
|
|
message: 'OpenAI API key not configured',
|
|
provider: 'openai',
|
|
};
|
|
}
|
|
|
|
const config = getProviderConfig('openai');
|
|
const url = `${config.apiUrl}/chat/completions`;
|
|
|
|
const response = await axios.post(
|
|
url,
|
|
{
|
|
model: config.model,
|
|
messages: [
|
|
{
|
|
role: 'user',
|
|
content: 'Hello, this is a connection test.',
|
|
},
|
|
],
|
|
max_tokens: 50,
|
|
},
|
|
{
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
Authorization: `Bearer ${config.apiKey}`,
|
|
},
|
|
timeout: config.timeout,
|
|
}
|
|
);
|
|
|
|
if (response.status === 200 && response.data.choices) {
|
|
return {
|
|
success: true,
|
|
message: 'OpenAI API connection successful',
|
|
provider: 'openai',
|
|
model: config.model,
|
|
};
|
|
}
|
|
|
|
return {
|
|
success: false,
|
|
message: 'Unexpected response from OpenAI API',
|
|
provider: 'openai',
|
|
};
|
|
} catch (error) {
|
|
return {
|
|
success: false,
|
|
message: error.response?.data?.error?.message || error.message,
|
|
provider: 'openai',
|
|
error: error.message,
|
|
};
|
|
}
|
|
}
|
|
|
|
/**
|
|
* 測試 Claude API 連線
|
|
*/
|
|
async testClaudeConnection() {
|
|
try {
|
|
if (!isProviderEnabled('claude')) {
|
|
return {
|
|
success: false,
|
|
message: 'Claude API key not configured',
|
|
provider: 'claude',
|
|
};
|
|
}
|
|
|
|
const config = getProviderConfig('claude');
|
|
const url = `${config.apiUrl}/messages`;
|
|
|
|
const response = await axios.post(
|
|
url,
|
|
{
|
|
model: config.model,
|
|
max_tokens: 50,
|
|
messages: [
|
|
{
|
|
role: 'user',
|
|
content: 'Hello, this is a connection test.',
|
|
},
|
|
],
|
|
},
|
|
{
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
'x-api-key': config.apiKey,
|
|
'anthropic-version': config.version,
|
|
},
|
|
timeout: config.timeout,
|
|
}
|
|
);
|
|
|
|
if (response.status === 200 && response.data.content) {
|
|
return {
|
|
success: true,
|
|
message: 'Claude API connection successful',
|
|
provider: 'claude',
|
|
model: config.model,
|
|
};
|
|
}
|
|
|
|
return {
|
|
success: false,
|
|
message: 'Unexpected response from Claude API',
|
|
provider: 'claude',
|
|
};
|
|
} catch (error) {
|
|
return {
|
|
success: false,
|
|
message: error.response?.data?.error?.message || error.message,
|
|
provider: 'claude',
|
|
error: error.message,
|
|
};
|
|
}
|
|
}
|
|
|
|
/**
|
|
* 測試所有 LLM 連線
|
|
*/
|
|
async testAllConnections() {
|
|
const results = {
|
|
gemini: await this.testGeminiConnection(),
|
|
deepseek: await this.testDeepSeekConnection(),
|
|
openai: await this.testOpenAIConnection(),
|
|
claude: await this.testClaudeConnection(),
|
|
};
|
|
|
|
return results;
|
|
}
|
|
|
|
/**
|
|
* 使用 Gemini 生成內容
|
|
*/
|
|
async generateWithGemini(prompt, options = {}) {
|
|
try {
|
|
if (!isProviderEnabled('gemini')) {
|
|
throw new Error('Gemini API not configured');
|
|
}
|
|
|
|
const config = getProviderConfig('gemini');
|
|
const url = `${config.apiUrl}/models/${config.model}:generateContent`;
|
|
|
|
const response = await axios.post(
|
|
url,
|
|
{
|
|
contents: [
|
|
{
|
|
parts: [{ text: prompt }],
|
|
},
|
|
],
|
|
generationConfig: {
|
|
temperature: options.temperature || llmConfig.temperature,
|
|
maxOutputTokens: options.maxTokens || llmConfig.maxTokens,
|
|
},
|
|
},
|
|
{
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
},
|
|
params: {
|
|
key: config.apiKey,
|
|
},
|
|
timeout: config.timeout,
|
|
}
|
|
);
|
|
|
|
if (response.data?.candidates?.[0]?.content?.parts?.[0]?.text) {
|
|
return {
|
|
success: true,
|
|
content: response.data.candidates[0].content.parts[0].text,
|
|
provider: 'gemini',
|
|
};
|
|
}
|
|
|
|
throw new Error('Invalid response format from Gemini');
|
|
} catch (error) {
|
|
throw new Error(`Gemini API error: ${error.message}`);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* 使用 DeepSeek 生成內容
|
|
*/
|
|
async generateWithDeepSeek(prompt, options = {}) {
|
|
try {
|
|
if (!isProviderEnabled('deepseek')) {
|
|
throw new Error('DeepSeek API not configured');
|
|
}
|
|
|
|
const config = getProviderConfig('deepseek');
|
|
const url = `${config.apiUrl}/chat/completions`;
|
|
|
|
const response = await axios.post(
|
|
url,
|
|
{
|
|
model: config.model,
|
|
messages: [{ role: 'user', content: prompt }],
|
|
temperature: options.temperature || llmConfig.temperature,
|
|
max_tokens: options.maxTokens || llmConfig.maxTokens,
|
|
},
|
|
{
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
Authorization: `Bearer ${config.apiKey}`,
|
|
},
|
|
timeout: config.timeout,
|
|
}
|
|
);
|
|
|
|
if (response.data?.choices?.[0]?.message?.content) {
|
|
return {
|
|
success: true,
|
|
content: response.data.choices[0].message.content,
|
|
provider: 'deepseek',
|
|
};
|
|
}
|
|
|
|
throw new Error('Invalid response format from DeepSeek');
|
|
} catch (error) {
|
|
throw new Error(`DeepSeek API error: ${error.message}`);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* 使用 OpenAI 生成內容
|
|
*/
|
|
async generateWithOpenAI(prompt, options = {}) {
|
|
try {
|
|
if (!isProviderEnabled('openai')) {
|
|
throw new Error('OpenAI API not configured');
|
|
}
|
|
|
|
const config = getProviderConfig('openai');
|
|
const url = `${config.apiUrl}/chat/completions`;
|
|
|
|
const response = await axios.post(
|
|
url,
|
|
{
|
|
model: config.model,
|
|
messages: [{ role: 'user', content: prompt }],
|
|
temperature: options.temperature || llmConfig.temperature,
|
|
max_tokens: options.maxTokens || llmConfig.maxTokens,
|
|
},
|
|
{
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
Authorization: `Bearer ${config.apiKey}`,
|
|
},
|
|
timeout: config.timeout,
|
|
}
|
|
);
|
|
|
|
if (response.data?.choices?.[0]?.message?.content) {
|
|
return {
|
|
success: true,
|
|
content: response.data.choices[0].message.content,
|
|
provider: 'openai',
|
|
};
|
|
}
|
|
|
|
throw new Error('Invalid response format from OpenAI');
|
|
} catch (error) {
|
|
throw new Error(`OpenAI API error: ${error.message}`);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* 使用 Claude 生成內容
|
|
*/
|
|
async generateWithClaude(prompt, options = {}) {
|
|
try {
|
|
if (!isProviderEnabled('claude')) {
|
|
throw new Error('Claude API not configured');
|
|
}
|
|
|
|
const config = getProviderConfig('claude');
|
|
const url = `${config.apiUrl}/messages`;
|
|
|
|
const response = await axios.post(
|
|
url,
|
|
{
|
|
model: config.model,
|
|
max_tokens: options.maxTokens || llmConfig.maxTokens,
|
|
temperature: options.temperature || llmConfig.temperature,
|
|
messages: [
|
|
{
|
|
role: 'user',
|
|
content: prompt,
|
|
},
|
|
],
|
|
},
|
|
{
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
'x-api-key': config.apiKey,
|
|
'anthropic-version': config.version,
|
|
},
|
|
timeout: config.timeout,
|
|
}
|
|
);
|
|
|
|
if (response.data?.content?.[0]?.text) {
|
|
return {
|
|
success: true,
|
|
content: response.data.content[0].text,
|
|
provider: 'claude',
|
|
};
|
|
}
|
|
|
|
throw new Error('Invalid response format from Claude');
|
|
} catch (error) {
|
|
throw new Error(`Claude API error: ${error.message}`);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* 使用預設或指定的 LLM 生成內容
|
|
*/
|
|
async generate(prompt, provider = null, options = {}) {
|
|
const selectedProvider = provider || llmConfig.defaultProvider;
|
|
|
|
switch (selectedProvider) {
|
|
case 'gemini':
|
|
return await this.generateWithGemini(prompt, options);
|
|
case 'deepseek':
|
|
return await this.generateWithDeepSeek(prompt, options);
|
|
case 'openai':
|
|
return await this.generateWithOpenAI(prompt, options);
|
|
case 'claude':
|
|
return await this.generateWithClaude(prompt, options);
|
|
default:
|
|
throw new Error(`Unknown provider: ${selectedProvider}`);
|
|
}
|
|
}
|
|
}
|
|
|
|
module.exports = new LLMService();
|