feat: 浼樺寲AI寤鸿鍔熻兘鍜屽伐鍗曞悓姝?- 鏀寔鍚屼竴澶╁娆℃洿鏂扮紪鍙?- 鏂板缓璁彃鍏ュ埌椤堕儴淇濇寔鏃堕棿鍊掑簭 - 鍙傝€冨鐞嗚繃绋嬭褰曠敓鎴愬缓璁?- 绠€鍖栨彁绀鸿瘝閬垮厤寮哄埗鏃ュ織鍒嗘瀽 - 淇涓枃娉ㄩ噴涔辩爜闂
This commit is contained in:
@@ -34,3 +34,27 @@ ANTHROPIC_CONFIG = LLMConfig(
|
||||
|
||||
# 默认使用千问模型
|
||||
DEFAULT_CONFIG = QWEN_CONFIG
|
||||
|
||||
|
||||
def get_default_llm_config() -> LLMConfig:
|
||||
"""
|
||||
获取默认的LLM配置
|
||||
优先从统一配置管理器获取,如果失败则使用本地配置
|
||||
"""
|
||||
try:
|
||||
from src.config.unified_config import get_config
|
||||
config = get_config()
|
||||
llm_dict = config.get_llm_config()
|
||||
|
||||
# 创建LLMConfig对象
|
||||
return LLMConfig(
|
||||
provider=llm_dict.get("provider", "qwen"),
|
||||
api_key=llm_dict.get("api_key", ""),
|
||||
base_url=llm_dict.get("base_url", "https://dashscope.aliyuncs.com/compatible-mode/v1"),
|
||||
model=llm_dict.get("model", "qwen-plus-latest"),
|
||||
temperature=llm_dict.get("temperature", 0.7),
|
||||
max_tokens=llm_dict.get("max_tokens", 2000)
|
||||
)
|
||||
except Exception:
|
||||
# 如果统一配置不可用,使用本地配置
|
||||
return DEFAULT_CONFIG
|
||||
|
||||
Reference in New Issue
Block a user