feat: Add 5Why_ prefix to all database tables
- Rename all tables with 5Why_ prefix for namespace isolation - Update models: User.js, Analysis.js, AuditLog.js - Update routes: llmConfig.js - Update scripts: seed-test-users.js, add-deepseek-config.js, add-ollama-config.js - Add migrate-table-prefix.js script for database migration - Update db_schema.sql with new table names - Update views: 5Why_user_analysis_stats, 5Why_recent_analyses Tables renamed: - users -> 5Why_users - analyses -> 5Why_analyses - analysis_perspectives -> 5Why_analysis_perspectives - analysis_whys -> 5Why_analysis_whys - llm_configs -> 5Why_llm_configs - system_settings -> 5Why_system_settings - audit_logs -> 5Why_audit_logs - sessions -> 5Why_sessions 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
81
scripts/add-ollama-config.js
Normal file
81
scripts/add-ollama-config.js
Normal file
@@ -0,0 +1,81 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Add Ollama LLM Configuration
|
||||
* This script adds Ollama configuration to the llm_configs table
|
||||
*/
|
||||
|
||||
import { pool, query } from '../config.js';
|
||||
import dotenv from 'dotenv';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
async function addOllamaConfig() {
|
||||
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
||||
console.log(' Adding Ollama LLM Configuration');
|
||||
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n');
|
||||
|
||||
try {
|
||||
// Check if Ollama config already exists
|
||||
const existing = await query(
|
||||
`SELECT id FROM 5Why_llm_configs WHERE provider = 'Ollama' LIMIT 1`
|
||||
);
|
||||
|
||||
if (existing.length > 0) {
|
||||
console.log('✅ Ollama configuration already exists (ID:', existing[0].id, ')');
|
||||
console.log(' Skipping...\n');
|
||||
return;
|
||||
}
|
||||
|
||||
// First, deactivate all existing configs
|
||||
await query('UPDATE 5Why_llm_configs SET is_active = 0');
|
||||
|
||||
// Insert Ollama configuration
|
||||
const result = await query(
|
||||
`INSERT INTO 5Why_llm_configs
|
||||
(provider, api_url, api_key, model_name, temperature, max_tokens, timeout, is_active)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
[
|
||||
'Ollama',
|
||||
process.env.OLLAMA_API_URL || 'https://ollama_pjapi.theaken.com',
|
||||
null, // Ollama doesn't need API key
|
||||
process.env.OLLAMA_MODEL || 'qwen2.5:3b',
|
||||
0.7,
|
||||
6000,
|
||||
120000,
|
||||
1 // Set as active
|
||||
]
|
||||
);
|
||||
|
||||
console.log('✅ Ollama configuration added successfully!');
|
||||
console.log(' Config ID:', result.insertId);
|
||||
console.log(' Provider: Ollama');
|
||||
console.log(' Model: qwen2.5:3b');
|
||||
console.log(' API URL:', process.env.OLLAMA_API_URL || 'https://ollama_pjapi.theaken.com');
|
||||
console.log(' Status: Active\n');
|
||||
|
||||
console.log('📝 Notes:');
|
||||
console.log(' - Ollama is FREE and runs on your infrastructure');
|
||||
console.log(' - No API key required');
|
||||
console.log(' - Current model: qwen2.5:3b');
|
||||
console.log(' - You can manage it in Admin Panel > LLM 配置\n');
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Error adding Ollama configuration:', error.message);
|
||||
process.exit(1);
|
||||
} finally {
|
||||
await pool.end();
|
||||
}
|
||||
}
|
||||
|
||||
// Run the script
|
||||
addOllamaConfig()
|
||||
.then(() => {
|
||||
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
||||
console.log(' Configuration Complete');
|
||||
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n');
|
||||
process.exit(0);
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error('Fatal error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
Reference in New Issue
Block a user