主要變更: - 移除所有內網 IP (192.168.x.x) - 改用外網端點 (https://llama.theaken.com) - 新增 llama_external_api.py 專門處理外網連接 - 更新所有文檔為外網版本 - 加入備用端點自動切換機制 - 優化錯誤處理和超時設定
76 lines
2.3 KiB
Python
76 lines
2.3 KiB
Python
"""
|
|
快速測試 Llama API 外網連接
|
|
"""
|
|
|
|
from openai import OpenAI
|
|
import sys
|
|
|
|
# API 設定
|
|
API_KEY = "paVrIT+XU1NhwCAOb0X4aYi75QKogK5YNMGvQF1dCyo="
|
|
BASE_URL = "https://llama.theaken.com/v1" # 使用外網端點
|
|
|
|
def quick_test():
|
|
print("="*50)
|
|
print("Llama API 快速測試")
|
|
print("="*50)
|
|
print(f"連接到: {BASE_URL}")
|
|
print("-" * 50)
|
|
|
|
try:
|
|
client = OpenAI(
|
|
api_key=API_KEY,
|
|
base_url=BASE_URL,
|
|
timeout=15.0 # 15秒超時
|
|
)
|
|
|
|
# 測試對話
|
|
test_messages = [
|
|
"你好,請自我介紹",
|
|
"1 + 1 等於多少?",
|
|
"今天天氣如何?"
|
|
]
|
|
|
|
for msg in test_messages:
|
|
print(f"\n問: {msg}")
|
|
|
|
try:
|
|
response = client.chat.completions.create(
|
|
model="gpt-oss-120b",
|
|
messages=[
|
|
{"role": "user", "content": msg}
|
|
],
|
|
temperature=0.7,
|
|
max_tokens=200
|
|
)
|
|
|
|
answer = response.choices[0].message.content
|
|
# 清理可能的思考標記
|
|
if "<think>" in answer:
|
|
answer = answer.split("</think>")[-1].strip()
|
|
if "<|channel|>" in answer:
|
|
answer = answer.split("<|message|>")[-1].strip()
|
|
|
|
print(f"答: {answer[:200]}") # 限制顯示長度
|
|
|
|
except Exception as e:
|
|
error_msg = str(e)
|
|
if "502" in error_msg:
|
|
print("錯誤: 伺服器暫時無法使用 (502)")
|
|
elif "timeout" in error_msg.lower():
|
|
print("錯誤: 請求超時")
|
|
else:
|
|
print(f"錯誤: {error_msg[:100]}")
|
|
|
|
print("\n" + "="*50)
|
|
print("測試完成!")
|
|
|
|
except Exception as e:
|
|
print(f"\n連接失敗: {str(e)[:100]}")
|
|
print("\n建議:")
|
|
print("1. 檢查網路連接")
|
|
print("2. 確認可以訪問 https://llama.theaken.com")
|
|
print("3. 稍後再試(如果是 502 錯誤)")
|
|
sys.exit(1)
|
|
|
|
if __name__ == "__main__":
|
|
quick_test() |