docs: update README and CLAUDE.md to v2.2.0

- Added documentation for audit tracking (IP address, invocation method).
- Updated database model descriptions for enhanced WorkOrder and Conversation fields.
- Documented the new UnifiedConfig system.
- Reflected enhanced logging transparency for knowledge base parsing.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
zhaojie
2026-02-11 00:08:09 +08:00
parent 2026007045
commit c3560b43fd
218 changed files with 3354 additions and 5096 deletions

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -7,73 +7,71 @@ import logging
from .models import Base
from .cache_manager import cache_manager, cache_query
from ..config.config import Config
from src.config.unified_config import get_config
logger = logging.getLogger(__name__)
class DatabaseManager:
"""数据库管理器"""
def __init__(self):
self.engine = None
self.SessionLocal = None
self._initialize_database()
def _initialize_database(self):
"""初始化数据库连接"""
try:
db_config = Config.get_database_config()
config = get_config()
db_url = config.database.url
# 根据数据库类型选择不同的连接参数
if "mysql" in db_config["url"]:
if "mysql" in db_url:
# MySQL配置 - 优化连接池和重连机制
self.engine = create_engine(
db_config["url"],
echo=db_config["echo"],
pool_size=10, # 连接池大小
max_overflow=20, # 溢出连接数
pool_pre_ping=True, # 连接前检查连接是否有效
pool_recycle=3600, # 1小时后回收连接
pool_timeout=60, # 连接池超时(秒)
db_url,
echo=False,
pool_size=config.database.pool_size,
max_overflow=config.database.max_overflow,
pool_pre_ping=True,
pool_recycle=config.database.pool_recycle,
pool_timeout=config.database.pool_timeout,
connect_args={
"charset": "utf8mb4",
"autocommit": False,
"connect_timeout": 30, # 连接超时
"read_timeout": 60, # 读取超时
"write_timeout": 60, # 写入超时
"max_allowed_packet": 64*1024*1024, # 64MB
"connect_timeout": 30, # 连接超时(秒)- 适用于网络延迟较大的情况
"read_timeout": 30, # 读取超时(秒)
"write_timeout": 30, # 写入超时(秒)
"connect_timeout": 30,
"read_timeout": 60,
"write_timeout": 60,
"max_allowed_packet": 64 * 1024 * 1024,
}
)
else:
# SQLite配置 - 优化性能
self.engine = create_engine(
db_config["url"],
echo=db_config["echo"],
db_url,
echo=False,
poolclass=StaticPool,
connect_args={
"check_same_thread": False,
"timeout": 20, # 连接超时
"isolation_level": None # 自动提交模式
"timeout": 20,
"isolation_level": None
}
)
self.SessionLocal = sessionmaker(
autocommit=False,
autoflush=False,
bind=self.engine
)
# 创建所有表
Base.metadata.create_all(bind=self.engine)
logger.info("数据库初始化成功")
except Exception as e:
logger.error(f"数据库初始化失败: {e}")
raise
@contextmanager
def get_session(self) -> Generator[Session, None, None]:
"""获取数据库会话的上下文管理器"""
@@ -112,16 +110,16 @@ class DatabaseManager:
except Exception as e:
logger.error(f"数据库重新连接失败: {e}")
return False
def get_session_direct(self) -> Session:
"""直接获取数据库会话"""
return self.SessionLocal()
def close_session(self, session: Session):
"""关闭数据库会话"""
if session:
session.close()
def test_connection(self) -> bool:
"""测试数据库连接"""
try:
@@ -131,12 +129,12 @@ class DatabaseManager:
except Exception as e:
logger.error(f"数据库连接测试失败: {e}")
return False
@cache_query(ttl=60) # 缓存1分钟
def get_cached_query(self, query_key: str, query_func, *args, **kwargs):
"""执行带缓存的查询"""
return query_func(*args, **kwargs)
def invalidate_cache_pattern(self, pattern: str):
"""根据模式清除缓存"""
try:
@@ -144,7 +142,7 @@ class DatabaseManager:
logger.info(f"缓存已清除: {pattern}")
except Exception as e:
logger.error(f"清除缓存失败: {e}")
def get_cache_stats(self):
"""获取缓存统计信息"""
return cache_manager.get_stats()

View File

@@ -4,18 +4,19 @@ import logging
from typing import Dict, List, Optional, Any
from datetime import datetime
from ..config.config import Config
from src.config.unified_config import get_config
logger = logging.getLogger(__name__)
class QwenClient:
"""阿里云千问API客户端"""
def __init__(self):
self.api_config = Config.get_api_config()
self.base_url = self.api_config["base_url"]
self.api_key = self.api_config["api_key"]
self.model_name = self.api_config["model_name"]
config = get_config()
self.base_url = config.llm.base_url or "https://dashscope.aliyuncs.com/compatible-mode/v1"
self.api_key = config.llm.api_key
self.model_name = config.llm.model
self.timeout = config.llm.timeout
self.headers = {
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json"
@@ -43,7 +44,7 @@ class QwenClient:
url,
headers=self.headers,
json=payload,
timeout=Config.RESPONSE_TIMEOUT
timeout=self.timeout
)
if response.status_code == 200:

View File

@@ -66,6 +66,8 @@ class Conversation(Base):
confidence_score = Column(Float)
knowledge_used = Column(Text) # 使用的知识库条目
response_time = Column(Float) # 响应时间(秒)
ip_address = Column(String(45), nullable=True) # IP地址
invocation_method = Column(String(50), nullable=True) # 调用方式websocket, api等
work_order = relationship("WorkOrder", back_populates="conversations")

View File

@@ -90,7 +90,9 @@ class QueryOptimizer:
'assistant_response': conv.assistant_response,
'timestamp': conv.timestamp.isoformat() if conv.timestamp else None,
'confidence_score': conv.confidence_score,
'work_order_id': conv.work_order_id
'work_order_id': conv.work_order_id,
'ip_address': conv.ip_address,
'invocation_method': conv.invocation_method
})
# 记录查询时间

View File

@@ -13,7 +13,7 @@ from collections import defaultdict, deque
import psutil
import redis
from ..config.config import Config
from src.config.unified_config import get_config
from .database import db_manager
logger = logging.getLogger(__name__)
@@ -82,12 +82,7 @@ class SystemOptimizer:
def _start_monitoring(self):
"""启动监控线程"""
try:
# 检查是否启用系统监控
enable_monitoring = Config.get_config().get('system_monitoring', True)
if not enable_monitoring:
logger.info("系统监控已禁用")
return
# 默认启用系统监控
monitor_thread = threading.Thread(target=self._monitor_system, daemon=True)
monitor_thread.start()
except Exception as e: