feat: 对话历史页面租户分组展示功能

- 新增 ConversationHistoryManager.get_tenant_summary() 按租户聚合会话统计
- get_sessions_paginated() 和 get_conversation_analytics() 增加 tenant_id 过滤
- 新增 GET /api/conversations/tenants 租户汇总端点
- sessions 和 analytics API 端点支持 tenant_id 查询参数
- 前端实现租户卡片列表视图和租户详情会话表格视图
- 实现面包屑导航、搜索范围限定、统计面板上下文切换
- 会话删除后自动检测空租户并返回列表视图
- dashboard.html 添加租户视图 DOM 容器
- 交互模式与知识库租户分组视图保持一致
This commit is contained in:
2026-04-01 16:11:02 +08:00
parent e14e3ee7a5
commit 7013e9db70
27 changed files with 2753 additions and 276 deletions

View File

@@ -14,7 +14,7 @@ from ..core.database import db_manager
from ..core.models import Conversation, WorkOrder, WorkOrderSuggestion, KnowledgeEntry, ChatSession
from ..core.redis_manager import redis_manager
from src.config.unified_config import get_config
from sqlalchemy import and_, or_, desc
from sqlalchemy import and_, or_, desc, func, case
logger = logging.getLogger(__name__)
@@ -634,7 +634,8 @@ class ConversationHistoryManager:
def get_conversation_analytics(
self,
work_order_id: Optional[int] = None,
days: int = 7
days: int = 7,
tenant_id: Optional[str] = None
) -> Dict[str, Any]:
"""获取对话分析数据包含AI建议统计"""
try:
@@ -652,6 +653,8 @@ class ConversationHistoryManager:
conv_query = session.query(Conversation)
if work_order_id:
conv_query = conv_query.filter(Conversation.work_order_id == work_order_id)
if tenant_id is not None:
conv_query = conv_query.filter(Conversation.tenant_id == tenant_id)
conversations = conv_query.filter(
Conversation.timestamp >= cutoff_date
@@ -718,6 +721,49 @@ class ConversationHistoryManager:
logger.error(f"获取对话分析数据失败: {e}")
return {}
# ==================== 租户汇总方法 ====================
def get_tenant_summary(self) -> List[Dict[str, Any]]:
"""
按 tenant_id 聚合 ChatSession返回租户汇总列表。
按 last_active_time 降序排列。
数据库异常或无记录时返回空列表。
"""
try:
with db_manager.get_session() as session:
results = session.query(
ChatSession.tenant_id,
func.count(ChatSession.id).label('session_count'),
func.coalesce(func.sum(ChatSession.message_count), 0).label('message_count'),
func.sum(
case(
(ChatSession.status == 'active', 1),
else_=0
)
).label('active_session_count'),
func.max(ChatSession.updated_at).label('last_active_time')
).group_by(
ChatSession.tenant_id
).order_by(
desc('last_active_time')
).all()
summary = []
for row in results:
summary.append({
'tenant_id': row.tenant_id,
'session_count': row.session_count,
'message_count': int(row.message_count),
'active_session_count': int(row.active_session_count),
'last_active_time': row.last_active_time.isoformat() if row.last_active_time else None
})
return summary
except Exception as e:
logger.error(f"获取租户汇总失败: {e}")
return []
# ==================== 会话管理方法 ====================
def get_sessions_paginated(
@@ -726,13 +772,17 @@ class ConversationHistoryManager:
per_page: int = 20,
status: Optional[str] = None,
search: str = '',
date_filter: str = ''
date_filter: str = '',
tenant_id: Optional[str] = None
) -> Dict[str, Any]:
"""分页获取会话列表"""
try:
with db_manager.get_session() as session:
query = session.query(ChatSession)
if tenant_id is not None:
query = query.filter(ChatSession.tenant_id == tenant_id)
if status:
query = query.filter(ChatSession.status == status)

View File

@@ -232,6 +232,98 @@ class RealtimeChatManager:
"confidence": 0.1,
"ai_suggestions": []
}
def _generate_response_stream(self, user_message: str, knowledge_results: List[Dict], context: List[Dict], work_order_id: Optional[int] = None):
"""流式生成回复yield 每个 token 片段"""
try:
ai_suggestions = self._get_workorder_ai_suggestions(work_order_id)
prompt = self._build_chat_prompt(user_message, knowledge_results, context, ai_suggestions)
for chunk in self.llm_client.chat_completion_stream(
messages=[{"role": "user", "content": prompt}],
temperature=0.7,
max_tokens=1000,
):
yield chunk
except Exception as e:
logger.error(f"流式生成回复失败: {e}")
yield "抱歉,系统出现错误,请稍后再试。"
def process_message_stream(self, session_id: str, user_message: str, ip_address: str = None, invocation_method: str = "http_stream"):
"""流式处理用户消息yield SSE 事件"""
import time as _time
if session_id not in self.active_sessions:
yield f"data: {json.dumps({'error': '会话不存在'}, ensure_ascii=False)}\n\n"
return
session = self.active_sessions[session_id]
session["last_activity"] = datetime.now()
session["message_count"] += 1
session["ip_address"] = ip_address
session["invocation_method"] = invocation_method
user_msg = ChatMessage(
role="user",
content=user_message,
timestamp=datetime.now(),
message_id=f"msg_{int(_time.time())}_{session['message_count']}"
)
self.message_history[session_id].append(user_msg)
# 搜索知识 + VIN
knowledge_results = self._search_knowledge(user_message)
vin = self._extract_vin(user_message)
if vin:
latest = self.vehicle_manager.get_latest_vehicle_data_by_vin(vin)
if latest:
knowledge_results = [{
"question": f"VIN {vin} 的最新实时数据",
"answer": json.dumps(latest, ensure_ascii=False),
"similarity_score": 1.0,
"source": "vehicle_realtime"
}] + knowledge_results
knowledge_results = knowledge_results[:5]
# 流式生成
full_content = []
for chunk in self._generate_response_stream(
user_message, knowledge_results, session["context"], session["work_order_id"]
):
full_content.append(chunk)
yield f"data: {json.dumps({'chunk': chunk}, ensure_ascii=False)}\n\n"
# 拼接完整回复
content = "".join(full_content)
confidence = self._calculate_confidence(knowledge_results, content)
# 创建助手消息并保存
assistant_msg = ChatMessage(
role="assistant",
content=content,
timestamp=datetime.now(),
message_id=f"msg_{int(_time.time())}_{session['message_count'] + 1}",
work_order_id=session["work_order_id"],
knowledge_used=knowledge_results,
confidence_score=confidence,
)
self.message_history[session_id].append(assistant_msg)
session["context"].append({"role": "user", "content": user_message})
session["context"].append({"role": "assistant", "content": content})
if len(session["context"]) > 20:
session["context"] = session["context"][-20:]
self._save_conversation(session_id, user_msg, assistant_msg, ip_address, invocation_method)
if knowledge_results:
used_ids = [r["id"] for r in knowledge_results if r.get("id")]
if used_ids:
self.knowledge_manager.update_usage_count(used_ids)
# 发送完成事件
yield f"data: {json.dumps({'done': True, 'confidence_score': confidence, 'message_id': assistant_msg.message_id}, ensure_ascii=False)}\n\n"
def _build_chat_prompt(self, user_message: str, knowledge_results: List[Dict], context: List[Dict], ai_suggestions: List[str] = None) -> str:
"""构建聊天提示词"""