變更內容: - 所有資料表加上 HR_position_ 前綴 - 整理完整欄位顯示名稱與 ID 對照表 - 模組化 JS 檔案 (admin.js, ai.js, csv.js 等) - 專案結構優化 (docs/, scripts/, tests/ 等) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
111 lines
3.0 KiB
Python
111 lines
3.0 KiB
Python
"""
|
|
Final Ollama API Integration Test
|
|
Tests the integration with the Flask app
|
|
"""
|
|
import requests
|
|
import json
|
|
import sys
|
|
|
|
# Set UTF-8 encoding for output
|
|
if sys.platform == 'win32':
|
|
import codecs
|
|
sys.stdout = codecs.getwriter('utf-8')(sys.stdout.buffer, 'strict')
|
|
|
|
print("=" * 60)
|
|
print("Ollama API Integration Test (via Flask App)")
|
|
print("=" * 60)
|
|
print()
|
|
|
|
# Test 1: Test Ollama connection status
|
|
print("Test 1: Checking Ollama API configuration...")
|
|
try:
|
|
response = requests.get("http://localhost:5000/api/llm/config", timeout=10)
|
|
if response.status_code == 200:
|
|
config = response.json()
|
|
ollama_config = config.get('ollama', {})
|
|
print(f" Name: {ollama_config.get('name', 'N/A')}")
|
|
print(f" Enabled: {ollama_config.get('enabled', False)}")
|
|
print(f" Endpoint: {ollama_config.get('endpoint', 'N/A')}")
|
|
print(" Status: ✓ Configuration OK")
|
|
else:
|
|
print(f" Status: ✗ Error {response.status_code}")
|
|
except Exception as e:
|
|
print(f" Status: ✗ Error: {str(e)}")
|
|
|
|
print()
|
|
|
|
# Test 2: Generate text using Ollama
|
|
print("Test 2: Testing text generation with Ollama...")
|
|
try:
|
|
payload = {
|
|
"api": "ollama",
|
|
"prompt": "請用中文回答:你好嗎?",
|
|
"max_tokens": 100
|
|
}
|
|
|
|
response = requests.post(
|
|
"http://localhost:5000/api/llm/generate",
|
|
json=payload,
|
|
headers={'Content-Type': 'application/json'},
|
|
timeout=60
|
|
)
|
|
|
|
print(f" Status Code: {response.status_code}")
|
|
|
|
result = response.json()
|
|
|
|
if result.get('success'):
|
|
text = result.get('text', '')
|
|
print(f" Status: ✓ Generation successful")
|
|
print(f" Response length: {len(text)} characters")
|
|
print(f" Response preview: {text[:100]}...")
|
|
|
|
# Save full response to file
|
|
with open('ollama_response.txt', 'w', encoding='utf-8') as f:
|
|
f.write(text)
|
|
print(f" Full response saved to: ollama_response.txt")
|
|
else:
|
|
error = result.get('error', 'Unknown error')
|
|
print(f" Status: ✗ Generation failed")
|
|
print(f" Error: {error}")
|
|
|
|
except Exception as e:
|
|
print(f" Status: ✗ Error: {str(e)}")
|
|
|
|
print()
|
|
|
|
# Test 3: Test with English prompt
|
|
print("Test 3: Testing with English prompt...")
|
|
try:
|
|
payload = {
|
|
"api": "ollama",
|
|
"prompt": "Write a haiku about coding.",
|
|
"max_tokens": 100
|
|
}
|
|
|
|
response = requests.post(
|
|
"http://localhost:5000/api/llm/generate",
|
|
json=payload,
|
|
headers={'Content-Type': 'application/json'},
|
|
timeout=60
|
|
)
|
|
|
|
result = response.json()
|
|
|
|
if result.get('success'):
|
|
text = result.get('text', '')
|
|
print(f" Status: ✓ Generation successful")
|
|
print(f" Response:\n{text}")
|
|
else:
|
|
error = result.get('error', 'Unknown error')
|
|
print(f" Status: ✗ Generation failed")
|
|
print(f" Error: {error}")
|
|
|
|
except Exception as e:
|
|
print(f" Status: ✗ Error: {str(e)}")
|
|
|
|
print()
|
|
print("=" * 60)
|
|
print("Integration test completed!")
|
|
print("=" * 60)
|