Current status includes: - Virtual monitor surface and components - Monitor store for state management - Tool call animations and transitions - Liquid glass shader integration Known issue to fix: Tool status display timing - "正在xx" appears after tool execution completes instead of when tool call starts. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
69 lines
2.3 KiB
Python
69 lines
2.3 KiB
Python
"""Official Moonshot sample adapted to print raw JSON responses."""
|
||
|
||
from __future__ import annotations
|
||
|
||
import json
|
||
import os
|
||
from pathlib import Path
|
||
from typing import Any, Dict
|
||
|
||
from openai import OpenAI
|
||
|
||
|
||
def resolve_env_value(keys: tuple[str, ...], *, default: str | None = None) -> str:
|
||
"""Try process env first, then fall back to `.env` file."""
|
||
for key in keys:
|
||
value = os.environ.get(key)
|
||
if value:
|
||
return value
|
||
env_path = Path(".env")
|
||
if env_path.exists():
|
||
for line in env_path.read_text(encoding="utf-8").splitlines():
|
||
stripped = line.strip()
|
||
if not stripped or stripped.startswith("#") or "=" not in stripped:
|
||
continue
|
||
env_key, env_value = stripped.split("=", 1)
|
||
if env_key in keys and env_value.strip():
|
||
return env_value.strip()
|
||
if default is not None:
|
||
return default
|
||
raise RuntimeError(f"Missing required environment variable from {keys}.")
|
||
|
||
|
||
client = OpenAI(
|
||
api_key=resolve_env_value(("MOONSHOT_API_KEY", "AGENT_API_KEY")),
|
||
base_url=resolve_env_value(
|
||
("MOONSHOT_BASE_URL", "AGENT_API_BASE_URL"), default="https://api.moonshot.cn/v1"
|
||
),
|
||
)
|
||
|
||
history = [
|
||
{
|
||
"role": "system",
|
||
"content": (
|
||
"你是 Kimi,由 Moonshot AI 提供的人工智能助手,你更擅长中文和英文的对话。"
|
||
"你会为用户提供安全,有帮助,准确的回答。同时,你会拒绝一切涉及恐怖主义,"
|
||
"种族歧视,黄色暴力等问题的回答。Moonshot AI 为专有名词,不可翻译成其他语言。"
|
||
),
|
||
}
|
||
]
|
||
|
||
|
||
def chat(query: str, history_list: list[Dict[str, str]]) -> Dict[str, Any]:
|
||
history_list.append({"role": "user", "content": query})
|
||
completion = client.chat.completions.create(
|
||
model="kimi-k2-turbo-preview",
|
||
messages=history_list,
|
||
temperature=0.6,
|
||
)
|
||
result_content = completion.choices[0].message.content
|
||
history_list.append({"role": "assistant", "content": result_content})
|
||
return completion.model_dump()
|
||
|
||
|
||
if __name__ == "__main__":
|
||
earth = chat("地球的自转周期是多少?", history)
|
||
moon = chat("月球呢?", history)
|
||
print(json.dumps(earth, ensure_ascii=False, indent=2))
|
||
print(json.dumps(moon, ensure_ascii=False, indent=2))
|