Files
hr-position-system/tests/test_ollama.py
DonaldFang 方士碩 a6af297623 backup: 完成 HR_position_ 表格前綴重命名與欄位對照表整理
變更內容:
- 所有資料表加上 HR_position_ 前綴
- 整理完整欄位顯示名稱與 ID 對照表
- 模組化 JS 檔案 (admin.js, ai.js, csv.js 等)
- 專案結構優化 (docs/, scripts/, tests/ 等)

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2025-12-09 12:05:20 +08:00

71 lines
1.7 KiB
Python

"""
Test Ollama API integration
"""
import requests
import json
import urllib3
# Disable SSL warnings
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
API_URL = "https://ollama_pjapi.theaken.com"
print("=" * 60)
print("Testing Ollama API Connection")
print("=" * 60)
print()
# Test 1: List models
print("Test 1: Listing available models...")
try:
response = requests.get(f"{API_URL}/v1/models", timeout=10, verify=False)
print(f"Status Code: {response.status_code}")
if response.status_code == 200:
data = response.json()
models = data.get('data', [])
print(f"Found {len(models)} models:")
for model in models[:5]:
print(f" - {model.get('id', 'Unknown')}")
else:
print(f"Error: {response.text}")
except Exception as e:
print(f"Error: {str(e)}")
print()
# Test 2: Chat completion
print("Test 2: Testing chat completion...")
try:
chat_request = {
"model": "qwen2.5:3b",
"messages": [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "Say hello in Chinese."}
],
"temperature": 0.7,
"max_tokens": 50
}
response = requests.post(
f"{API_URL}/v1/chat/completions",
json=chat_request,
headers={'Content-Type': 'application/json'},
timeout=60,
verify=False
)
print(f"Status Code: {response.status_code}")
if response.status_code == 200:
result = response.json()
text = result['choices'][0]['message']['content']
print(f"Response: {text}")
else:
print(f"Error: {response.text}")
except Exception as e:
print(f"Error: {str(e)}")
print()
print("=" * 60)