feat: optimize AI suggestion and workorder sync - support same-day multiple update numbering - insert new suggestions at top maintaining reverse chronological order - reference process history when generating suggestions - simplify prompts to avoid forcing log analysis - fix Chinese comment encoding issues
This commit is contained in:
@@ -34,3 +34,27 @@ ANTHROPIC_CONFIG = LLMConfig(
|
||||
|
||||
# 默认使用千问模型
|
||||
DEFAULT_CONFIG = QWEN_CONFIG
|
||||
|
||||
|
||||
def get_default_llm_config() -> LLMConfig:
|
||||
"""
|
||||
获取默认的LLM配置
|
||||
优先从统一配置管理器获取,如果失败则使用本地配置
|
||||
"""
|
||||
try:
|
||||
from src.config.unified_config import get_config
|
||||
config = get_config()
|
||||
llm_dict = config.get_llm_config()
|
||||
|
||||
# 创建LLMConfig对象
|
||||
return LLMConfig(
|
||||
provider=llm_dict.get("provider", "qwen"),
|
||||
api_key=llm_dict.get("api_key", ""),
|
||||
base_url=llm_dict.get("base_url", "https://dashscope.aliyuncs.com/compatible-mode/v1"),
|
||||
model=llm_dict.get("model", "qwen-plus-latest"),
|
||||
temperature=llm_dict.get("temperature", 0.7),
|
||||
max_tokens=llm_dict.get("max_tokens", 2000)
|
||||
)
|
||||
except Exception:
|
||||
# 如果统一配置不可用,使用本地配置
|
||||
return DEFAULT_CONFIG
|
||||
|
||||
Reference in New Issue
Block a user