移除不存在的備用端點

變更內容:
- 從 llama_chat.py 移除備用端點配置
- 從 llama_full_api.py 移除備用端點配置
- 簡化端點測試邏輯
- 更新所有文檔移除備用端點說明
- 專注於實際存在的三個端點:
  * https://llama.theaken.com/v1
  * https://llama.theaken.com/v1/gpt-oss-120b
  * https://llama.theaken.com/v1/deepseek-r1-671b

程式結構更清晰,移除虛假的備用選項。
This commit is contained in:
2025-09-19 22:09:15 +08:00
parent 3c0fba5fc8
commit 390a8cc7f7
4 changed files with 25 additions and 80 deletions

View File

@@ -33,18 +33,6 @@ ENDPOINTS = {
"models": ["deepseek-r1-671b"]
}
],
"備用": [
{
"name": "備用 API 端點 1",
"url": "https://api.llama.theaken.com/v1",
"models": ["gpt-oss-120b", "deepseek-r1-671b", "qwen3-embedding-8b"]
},
{
"name": "備用 API 端點 2",
"url": "https://llama-api.theaken.com/v1",
"models": ["gpt-oss-120b", "deepseek-r1-671b", "qwen3-embedding-8b"]
}
]
}
def clean_response(text):
@@ -116,8 +104,8 @@ def test_all_endpoints():
available_endpoints = []
# 測試主要端點
print("\n[主要端點測試]")
# 測試所有端點
print("\n[端點測試]")
for endpoint in ENDPOINTS["主要"]:
print(f" 測試 {endpoint['name']}...", end="", flush=True)
if test_endpoint(endpoint):
@@ -126,16 +114,6 @@ def test_all_endpoints():
else:
print(" [FAIL]")
# 測試備用端點
print("\n[備用端點測試]")
for endpoint in ENDPOINTS["備用"]:
print(f" 測試 {endpoint['name']}...", end="", flush=True)
if test_endpoint(endpoint):
print(" [OK]")
available_endpoints.append(("備用", endpoint))
else:
print(" [FAIL]")
return available_endpoints
def chat_session(endpoint_info):