Initial commit with Llama API client and docs

Add Python scripts for Llama API chat clients, endpoint testing, and quick tests. Include documentation (README, CONTRIBUTING, 操作指南), license, and .gitignore. Supports multiple endpoints and models for OpenAI-compatible Llama API usage.
This commit is contained in:
2025-09-19 21:44:02 +08:00
parent 4e28c131d2
commit 8a929936ad
18 changed files with 2073 additions and 0 deletions

99
llama_test.py Normal file
View File

@@ -0,0 +1,99 @@
from openai import OpenAI
import sys
API_KEY = "paVrIT+XU1NhwCAOb0X4aYi75QKogK5YNMGvQF1dCyo="
BASE_URL = "https://llama.theaken.com/v1"
AVAILABLE_MODELS = [
"gpt-oss-120b",
"deepseek-r1-671b",
"qwen3-embedding-8b"
]
def chat_with_llama(model_name="gpt-oss-120b"):
client = OpenAI(
api_key=API_KEY,
base_url=BASE_URL
)
print(f"\n使用模型: {model_name}")
print("-" * 50)
print("輸入 'exit''quit' 來結束對話")
print("-" * 50)
messages = []
while True:
user_input = input("\n你: ").strip()
if user_input.lower() in ['exit', 'quit']:
print("對話結束")
break
if not user_input:
continue
messages.append({"role": "user", "content": user_input})
try:
response = client.chat.completions.create(
model=model_name,
messages=messages,
temperature=0.7,
max_tokens=2000
)
assistant_reply = response.choices[0].message.content
print(f"\nAI: {assistant_reply}")
messages.append({"role": "assistant", "content": assistant_reply})
except Exception as e:
print(f"\n錯誤: {str(e)}")
print("請檢查網路連接和 API 設定")
def test_connection():
print("測試連接到 Llama API...")
client = OpenAI(
api_key=API_KEY,
base_url=BASE_URL
)
try:
response = client.chat.completions.create(
model="gpt-oss-120b",
messages=[{"role": "user", "content": "Hello, this is a test message."}],
max_tokens=50
)
print("[OK] 連接成功!")
print(f"測試回應: {response.choices[0].message.content}")
return True
except Exception as e:
print(f"[ERROR] 連接失敗: {str(e)[:200]}")
return False
def main():
print("=" * 50)
print("Llama 模型對話測試程式")
print("=" * 50)
print("\n可用的模型:")
for i, model in enumerate(AVAILABLE_MODELS, 1):
print(f" {i}. {model}")
if test_connection():
print("\n選擇要使用的模型 (輸入數字 1-3預設: 1):")
choice = input().strip()
if choice == "2":
model = AVAILABLE_MODELS[1]
elif choice == "3":
model = AVAILABLE_MODELS[2]
else:
model = AVAILABLE_MODELS[0]
chat_with_llama(model)
if __name__ == "__main__":
main()