Initial commit: Llama API Client with full documentation

- Added complete Python client for Llama AI models
- Support for internal network endpoints (tested and working)
- Support for external network endpoints (configured)
- Interactive chat interface with multiple models
- Automatic endpoint testing and failover
- Response cleaning for special markers
- Full documentation in English and Chinese
- Complete test suite and examples
- MIT License and contribution guidelines
This commit is contained in:
2025-09-19 21:38:15 +08:00
commit c6cc91da7f
18 changed files with 2072 additions and 0 deletions

54
quick_test.py Normal file
View File

@@ -0,0 +1,54 @@
"""
快速測試內網 Llama API
"""
from openai import OpenAI
# API 設定
API_KEY = "paVrIT+XU1NhwCAOb0X4aYi75QKogK5YNMGvQF1dCyo="
BASE_URL = "http://192.168.0.6:21180/v1" # 使用第一個可用端點
def quick_test():
print("連接到內網 API...")
print(f"端點: {BASE_URL}")
print("-" * 50)
client = OpenAI(
api_key=API_KEY,
base_url=BASE_URL
)
# 測試對話
test_messages = [
"你好,請自我介紹",
"1 + 1 等於多少?",
"今天天氣如何?"
]
for msg in test_messages:
print(f"\n問: {msg}")
try:
response = client.chat.completions.create(
model="gpt-oss-120b",
messages=[
{"role": "user", "content": msg}
],
temperature=0.7,
max_tokens=200
)
answer = response.choices[0].message.content
# 清理可能的思考標記
if "<think>" in answer:
answer = answer.split("</think>")[-1].strip()
if "<|channel|>" in answer:
answer = answer.split("<|message|>")[-1].strip()
print(f"答: {answer}")
except Exception as e:
print(f"錯誤: {str(e)[:100]}")
if __name__ == "__main__":
quick_test()