feat: Adjust LLM temperature and update prompts to refine agent behavior.。。。。初版定稿校验
This commit is contained in:
@@ -21,7 +21,7 @@ class LLMConfig:
|
||||
api_key: str = os.environ.get("OPENAI_API_KEY", "sk-c44i1hy64xgzwox6x08o4zug93frq6rgn84oqugf2pje1tg4")
|
||||
base_url: str = os.environ.get("OPENAI_BASE_URL", "https://api.xiaomimimo.com/v1")
|
||||
model: str = os.environ.get("OPENAI_MODEL", "mimo-v2-flash")
|
||||
temperature: float = 0.3
|
||||
temperature: float = 0.5
|
||||
max_tokens: int = 131072
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
|
||||
Reference in New Issue
Block a user