feat: 娣诲姞澶氫釜鏂板姛鑳藉拰淇 - 鍖呮嫭鐢ㄦ埛绠$悊銆佹暟鎹簱杩佺Щ銆丟it鎺ㄩ€佸伐鍏风瓑

This commit is contained in:
赵杰 Jie Zhao (雄狮汽车科技)
2025-11-05 10:16:34 +08:00
parent a4261ef06f
commit c9d5c80f42
43 changed files with 4435 additions and 7439 deletions

6
.idea/data_source_mapping.xml generated Normal file
View File

@@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="DataSourcePerFileMappings">
<file url="file://$APPLICATION_CONFIG_DIR$/consoles/db/715b070d-f258-43df-a066-49e825a9b04f/console.sql" value="715b070d-f258-43df-a066-49e825a9b04f" />
</component>
</project>

View File

@@ -200,18 +200,56 @@ echo ✅ 提交成功
echo.
echo [4/4] 推送到远程仓库...
:: 获取当前分支名称(在延迟变量扩展内)
set current_branch=
for /f "tokens=*" %%b in ('git branch --show-current 2^>nul') do set current_branch=%%b
if "!current_branch!"=="" (
echo ❌ 无法获取当前分支名称
echo 尝试使用默认分支 main...
set current_branch=main
) else (
echo 📍 当前分支: !current_branch!
)
echo.
:: 先尝试拉取最新更改
echo 🔄 检查远程更新...
git fetch origin main
if %errorlevel% neq 0 (
echo ⚠️ 无法获取远程更新,继续推送...
git fetch origin !current_branch! >nul 2>&1
set fetch_result=!errorlevel!
if !fetch_result! neq 0 (
echo ⚠️ 无法获取远程更新,尝试获取所有分支...
git fetch origin >nul 2>&1
set fetch_all_result=!errorlevel!
if !fetch_all_result! neq 0 (
echo ⚠️ 无法获取远程更新,继续推送...
) else (
echo ✅ 远程更新检查完成
)
) else (
echo ✅ 远程更新检查完成
)
:: 推送到远程
git push origin main
if %errorlevel% neq 0 (
:: 检查远程分支是否存在,如果不存在则设置上游
echo 🔍 检查远程分支状态...
git ls-remote --heads origin !current_branch! >nul 2>&1
set remote_exists=!errorlevel!
set push_result=0
if !remote_exists! equ 0 (
echo 远程分支 !current_branch! 已存在
:: 推送到远程(分支已存在)
git push origin !current_branch!
set push_result=!errorlevel!
) else (
echo 远程分支 !current_branch! 不存在,将创建并设置上游
:: 推送到远程并设置上游(分支不存在)
git push -u origin !current_branch!
set push_result=!errorlevel!
)
if !push_result! neq 0 (
echo ❌ 推送失败
echo.
echo 💡 可能的原因:
@@ -221,18 +259,20 @@ if %errorlevel% neq 0 (
echo - 需要先拉取远程更改
echo.
echo 🔧 尝试自动解决冲突...
git pull origin main --rebase
if %errorlevel% equ 0 (
git pull origin !current_branch! --rebase
set pull_result=!errorlevel!
if !pull_result! equ 0 (
echo ✅ 冲突已解决,重新推送...
git push origin main
if %errorlevel% equ 0 (
git push origin !current_branch!
set final_push_result=!errorlevel!
if !final_push_result! equ 0 (
echo ✅ 推送成功!
) else (
echo ❌ 重新推送失败
echo.
echo 🔧 建议手动解决:
echo 1. 运行: git pull origin main
echo 2. 解决冲突后运行: git push origin main
echo 1. 运行: git pull origin !current_branch!
echo 2. 解决冲突后运行: git push origin !current_branch!
pause
exit /b 1
)
@@ -240,8 +280,8 @@ if %errorlevel% neq 0 (
echo ❌ 无法自动解决冲突
echo.
echo 🔧 建议手动解决:
echo 1. 运行: git pull origin main
echo 2. 解决冲突后运行: git push origin main
echo 1. 运行: git pull origin !current_branch!
echo 2. 解决冲突后运行: git push origin !current_branch!
pause
exit /b 1
)

147
check_and_fix_users.py Normal file
View File

@@ -0,0 +1,147 @@
# -*- coding: utf-8 -*-
import sys
import os
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from src.core.database import db_manager
from sqlalchemy import text, inspect
print("Checking users table structure...")
try:
with db_manager.get_session() as session:
inspector = inspect(db_manager.engine)
cols = inspector.get_columns('users')
print("\nUsers table columns:")
required_fields = {}
optional_fields = {}
for col in cols:
name = col['name']
nullable = col.get('nullable', True)
default = col.get('default', None)
if nullable or default is not None:
optional_fields[name] = col
print(f" {name}: {col['type']} (nullable: {nullable}, default: {default})")
else:
required_fields[name] = col
print(f" {name}: {col['type']} (REQUIRED, nullable: {nullable})")
print(f"\nRequired fields: {list(required_fields.keys())}")
print(f"Optional fields: {list(optional_fields.keys())}")
# Check for existing admin user
result = session.execute(text("SELECT * FROM users WHERE username = 'admin' LIMIT 1"))
admin_row = result.fetchone()
if admin_row:
print("\nAdmin user found in database")
# Update password
from werkzeug.security import generate_password_hash
password_hash = generate_password_hash('admin123')
session.execute(text("""
UPDATE users
SET password_hash = :password_hash,
is_active = 1,
updated_at = NOW()
WHERE username = 'admin'
"""), {'password_hash': password_hash})
session.commit()
print("Admin password updated successfully")
else:
print("\nAdmin user not found, creating...")
from werkzeug.security import generate_password_hash
password_hash = generate_password_hash('admin123')
# Build INSERT with all required fields
insert_fields = ['username', 'email', 'password_hash', 'role']
insert_values = {
'username': 'admin',
'email': 'admin@tsp.com',
'password_hash': password_hash,
'role': 'admin'
}
# Add optional fields that exist in table
if 'is_active' in optional_fields or 'is_active' not in required_fields:
insert_fields.append('is_active')
insert_values['is_active'] = True
if 'region' in optional_fields or 'region' not in required_fields:
insert_fields.append('region')
insert_values['region'] = None
# Handle full_name if it exists
if 'full_name' in required_fields:
insert_fields.append('full_name')
insert_values['full_name'] = 'Administrator'
elif 'full_name' in optional_fields:
insert_fields.append('full_name')
insert_values['full_name'] = 'Administrator'
# Handle other required fields with defaults
for field_name in required_fields:
if field_name not in insert_fields:
if field_name in ['created_at', 'updated_at']:
insert_fields.append(field_name)
insert_values[field_name] = 'NOW()'
else:
# Use empty string or default value
insert_fields.append(field_name)
insert_values[field_name] = ''
fields_str = ', '.join(insert_fields)
values_str = ', '.join([f':{f}' for f in insert_fields])
sql = f"""
INSERT INTO users ({fields_str})
VALUES ({values_str})
"""
# Fix NOW() placeholders
final_values = {}
for k, v in insert_values.items():
if v == 'NOW()':
# Will use SQL NOW()
continue
final_values[k] = v
# Use raw SQL with NOW()
sql_final = f"""
INSERT INTO users ({fields_str.replace(':created_at', 'created_at').replace(':updated_at', 'updated_at')})
VALUES ({values_str.replace(':created_at', 'NOW()').replace(':updated_at', 'NOW()')})
"""
# Clean up the SQL
for k in ['created_at', 'updated_at']:
if f':{k}' in values_str:
values_str = values_str.replace(f':{k}', 'NOW()')
if k in final_values:
del final_values[k]
# Final SQL construction
final_sql = f"INSERT INTO users ({', '.join([f if f not in ['created_at', 'updated_at'] else f for f in insert_fields])}) VALUES ({', '.join([f':{f}' if f not in ['created_at', 'updated_at'] else 'NOW()' for f in insert_fields])})"
print(f"Executing SQL with fields: {insert_fields}")
session.execute(text(final_sql), final_values)
session.commit()
print("Admin user created successfully")
# Verify
result = session.execute(text("SELECT username, email, role, is_active FROM users WHERE username = 'admin'"))
admin_data = result.fetchone()
if admin_data:
print(f"\nVerification:")
print(f" Username: {admin_data[0]}")
print(f" Email: {admin_data[1]}")
print(f" Role: {admin_data[2]}")
print(f" Is Active: {admin_data[3]}")
print("\nAdmin user ready for login!")
except Exception as e:
print(f"Error: {e}")
import traceback
traceback.print_exc()

View File

@@ -7,7 +7,7 @@
"TR Status": "status",
"Source": "source",
"Date creation": "created_at",
"处理过程": "solution",
"处理过程": "resolution",
"TR tracking": "resolution",
"Created by": "created_by",
"Module模块": "module",

173
create_admin_user.py Normal file
View File

@@ -0,0 +1,173 @@
# -*- coding: utf-8 -*-
"""
创建或修复默认管理员用户
"""
import sys
import os
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from src.core.database import db_manager
from src.core.models import User
from werkzeug.security import generate_password_hash, check_password_hash
from sqlalchemy import text, inspect
print("=" * 60)
print("创建/修复管理员用户")
print("=" * 60)
try:
with db_manager.get_session() as session:
# 检查表结构
inspector = inspect(db_manager.engine)
if 'users' not in inspector.get_table_names():
print("错误: users表不存在请先运行 python init_database.py")
sys.exit(1)
existing_columns = [col['name'] for col in inspector.get_columns('users')]
print(f"users表字段: {existing_columns}")
# 检查是否存在admin用户
admin_user = session.query(User).filter(User.username == 'admin').first()
if admin_user:
print(f"\n找到admin用户 (ID: {admin_user.id})")
print(f" 邮箱: {admin_user.email}")
print(f" 角色: {admin_user.role}")
print(f" 激活状态: {admin_user.is_active}")
# 验证密码
password_ok = check_password_hash(admin_user.password_hash, 'admin123')
print(f" 密码验证: {'正确' if password_ok else '错误'}")
if not password_ok:
print("\n密码不匹配,正在更新密码...")
admin_user.password_hash = generate_password_hash('admin123')
admin_user.is_active = True
if hasattr(admin_user, 'region'):
admin_user.region = None
session.commit()
print("密码已更新为: admin123")
if not admin_user.is_active:
print("用户未激活,正在激活...")
admin_user.is_active = True
session.commit()
print("用户已激活")
# 最终验证
test_password = check_password_hash(admin_user.password_hash, 'admin123')
if test_password and admin_user.is_active:
print("\n管理员用户已就绪!")
print(" 用户名: admin")
print(" 密码: admin123")
print(" 状态: 已激活")
else:
print("\n警告: 用户状态异常")
print(f" 密码正确: {test_password}")
print(f" 已激活: {admin_user.is_active}")
else:
print("\n未找到admin用户正在创建...")
# 准备密码哈希
password_hash = generate_password_hash('admin123')
# 检查表结构使用SQL直接插入避免字段不匹配
try:
# 先尝试使用模型创建
new_admin = User(
username='admin',
email='admin@tsp.com',
password_hash=password_hash,
role='admin',
is_active=True
)
if 'region' in existing_columns:
new_admin.region = None
session.add(new_admin)
session.commit()
print("使用模型创建成功")
except Exception as model_error:
print(f"模型创建失败: {model_error}")
print("尝试使用SQL直接插入...")
session.rollback()
# 使用SQL直接插入
insert_fields = ['username', 'email', 'password_hash', 'role']
insert_values = {
'username': 'admin',
'email': 'admin@tsp.com',
'password_hash': password_hash,
'role': 'admin'
}
if 'is_active' in existing_columns:
insert_fields.append('is_active')
insert_values['is_active'] = True
if 'region' in existing_columns:
insert_fields.append('region')
insert_values['region'] = None
fields_str = ', '.join(insert_fields)
values_str = ', '.join([f":{k}" for k in insert_fields])
sql = f"""
INSERT INTO users ({fields_str})
VALUES ({values_str})
"""
session.execute(text(sql), insert_values)
session.commit()
print("使用SQL创建成功")
# 验证创建结果
verify_user = session.query(User).filter(User.username == 'admin').first()
if verify_user:
test_password = check_password_hash(verify_user.password_hash, 'admin123')
print(f"\n验证结果:")
print(f" 用户ID: {verify_user.id}")
print(f" 密码正确: {test_password}")
print(f" 已激活: {verify_user.is_active}")
if test_password and verify_user.is_active:
print("\n管理员用户创建成功!")
print(" 用户名: admin")
print(" 密码: admin123")
else:
print("\n警告: 用户创建成功但状态异常")
# 创建其他示例用户
for username, email, password, role, region in [
('overseas_ops', 'overseas@tsp.com', 'ops123', 'overseas_ops', 'overseas'),
('domestic_ops', 'domestic@tsp.com', 'ops123', 'domestic_ops', 'domestic')
]:
ops_user = session.query(User).filter(User.username == username).first()
if not ops_user:
print(f"\n创建{username}用户...")
try:
new_user = User(
username=username,
email=email,
password_hash=generate_password_hash(password),
role=role,
is_active=True
)
if 'region' in existing_columns:
new_user.region = region
session.add(new_user)
session.commit()
print(f" {username}用户创建成功")
except Exception as e:
print(f" {username}用户创建失败: {e}")
session.rollback()
print("\n" + "=" * 60)
print("操作完成!")
print("=" * 60)
except Exception as e:
print(f"错误: {e}")
import traceback
traceback.print_exc()
sys.exit(1)

View File

@@ -1,9 +1,9 @@
{
"init_time": "2025-09-19T18:57:01.015501",
"init_time": "2025-10-31T12:46:01.637890",
"database_version": "MySQL 8.4.6",
"database_url": "mysql+pymysql://tsp_assistant:***@43.134.68.207/tsp_assistant?charset=utf8mb4",
"migrations_applied": 0,
"tables_created": 15,
"tables_created": 16,
"initial_data_inserted": true,
"verification_passed": true
}

119
database_migration_notes.md Normal file
View File

@@ -0,0 +1,119 @@
# 数据库迁移说明
## 工单处理过程记录系统迁移
### 新增表:`work_order_process_history`
```sql
CREATE TABLE IF NOT EXISTS work_order_process_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
work_order_id INTEGER NOT NULL,
processor_name VARCHAR(100) NOT NULL,
processor_role VARCHAR(50),
processor_region VARCHAR(50),
process_content TEXT NOT NULL,
action_type VARCHAR(50) NOT NULL,
previous_status VARCHAR(50),
new_status VARCHAR(50),
assigned_module VARCHAR(50),
process_time DATETIME NOT NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (work_order_id) REFERENCES work_orders(id)
);
CREATE INDEX idx_process_history_workorder ON work_order_process_history(work_order_id);
CREATE INDEX idx_process_history_time ON work_order_process_history(process_time);
```
### WorkOrder表新增字段
如果使用SQLite可以使用以下SQL添加字段
```sql
-- 注意SQLite不支持直接ALTER TABLE添加多个列需要逐个添加
ALTER TABLE work_orders ADD COLUMN assigned_module VARCHAR(50);
ALTER TABLE work_orders ADD COLUMN module_owner VARCHAR(100);
ALTER TABLE work_orders ADD COLUMN dispatcher VARCHAR(100);
ALTER TABLE work_orders ADD COLUMN dispatch_time DATETIME;
ALTER TABLE work_orders ADD COLUMN region VARCHAR(50);
```
### 使用Python脚本迁移推荐
创建迁移脚本 `migrate_process_history.py`
```python
# -*- coding: utf-8 -*-
"""
数据库迁移脚本:添加工单处理过程记录表和相关字段
"""
from src.core.database import db_manager
from src.core.models import Base, WorkOrderProcessHistory
from sqlalchemy import text
def migrate_database():
"""执行数据库迁移"""
try:
with db_manager.get_session() as session:
# 创建新表
WorkOrderProcessHistory.__table__.create(db_manager.engine, checkfirst=True)
# 检查并添加新字段SQLite需要特殊处理
try:
# 尝试添加字段(如果已存在会报错,可以忽略)
session.execute(text("ALTER TABLE work_orders ADD COLUMN assigned_module VARCHAR(50)"))
except Exception as e:
print(f"字段 assigned_module 可能已存在: {e}")
try:
session.execute(text("ALTER TABLE work_orders ADD COLUMN module_owner VARCHAR(100)"))
except Exception as e:
print(f"字段 module_owner 可能已存在: {e}")
try:
session.execute(text("ALTER TABLE work_orders ADD COLUMN dispatcher VARCHAR(100)"))
except Exception as e:
print(f"字段 dispatcher 可能已存在: {e}")
try:
session.execute(text("ALTER TABLE work_orders ADD COLUMN dispatch_time DATETIME"))
except Exception as e:
print(f"字段 dispatch_time 可能已存在: {e}")
try:
session.execute(text("ALTER TABLE work_orders ADD COLUMN region VARCHAR(50)"))
except Exception as e:
print(f"字段 region 可能已存在: {e}")
session.commit()
print("数据库迁移完成!")
except Exception as e:
print(f"数据库迁移失败: {e}")
raise
if __name__ == "__main__":
migrate_database()
```
### 执行迁移
运行迁移脚本:
```bash
python migrate_process_history.py
```
或者直接在Python交互式环境中执行
```python
from migrate_process_history import migrate_database
migrate_database()
```
## 注意事项
1. **备份数据库**:在执行迁移前,请务必备份现有数据库
2. **SQLite限制**如果使用SQLiteALTER TABLE添加列的操作在某些情况下可能失败如果字段已存在会报错
3. **数据迁移**:现有工单的处理过程历史记录(存储在`resolution`字段中的)不会自动迁移到新表,需要手动处理
4. **索引优化**:新表已包含必要的索引,如果数据量大可以考虑添加更多索引

86
fix_admin_user.py Normal file
View File

@@ -0,0 +1,86 @@
# -*- coding: utf-8 -*-
"""
修复管理员用户
"""
import sys
import os
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from src.core.database import db_manager
from sqlalchemy import text
from werkzeug.security import generate_password_hash
print("Fixing admin user...")
try:
with db_manager.get_session() as session:
# Check if admin user exists
result = session.execute(text("SELECT id FROM users WHERE username = 'admin'"))
admin_row = result.fetchone()
password_hash = generate_password_hash('admin123')
if admin_row:
print("Admin user exists, updating password...")
session.execute(text("""
UPDATE users
SET password_hash = :password_hash,
is_active = 1,
updated_at = NOW()
WHERE username = 'admin'
"""), {'password_hash': password_hash})
session.commit()
print("Admin password updated successfully")
else:
print("Admin user not found, creating...")
session.execute(text("""
INSERT INTO users (
username, email, password_hash, role, full_name,
is_active, region, created_at, updated_at
) VALUES (
'admin', 'admin@tsp.com', :password_hash, 'admin', 'Administrator',
1, NULL, NOW(), NOW()
)
"""), {'password_hash': password_hash})
session.commit()
print("Admin user created successfully")
# Verify
result = session.execute(text("""
SELECT username, email, role, is_active, full_name
FROM users
WHERE username = 'admin'
"""))
admin_data = result.fetchone()
if admin_data:
print("\nVerification:")
print(f" Username: {admin_data[0]}")
print(f" Email: {admin_data[1]}")
print(f" Role: {admin_data[2]}")
print(f" Is Active: {admin_data[3]}")
print(f" Full Name: {admin_data[4]}")
# Test password verification
result = session.execute(text("SELECT password_hash FROM users WHERE username = 'admin'"))
stored_hash = result.fetchone()[0]
from werkzeug.security import check_password_hash
password_ok = check_password_hash(stored_hash, 'admin123')
print(f" Password Check: {'PASS' if password_ok else 'FAIL'}")
if password_ok and admin_data[3]:
print("\n[SUCCESS] Admin user is ready for login!")
print(" Username: admin")
print(" Password: admin123")
else:
print("\n[WARNING] User exists but password or status issue")
else:
print("\n[ERROR] User not found after creation")
except Exception as e:
print(f"Error: {e}")
import traceback
traceback.print_exc()
sys.exit(1)

125
fix_git_push.bat Normal file
View File

@@ -0,0 +1,125 @@
@echo off
chcp 65001 >nul
echo ========================================
echo Git推送问题诊断和修复工具
echo ========================================
echo.
:: 1. 检查Git状态
echo [1] 检查Git状态...
git status
if %errorlevel% neq 0 (
echo ? Git未初始化
pause
exit /b 1
)
echo.
:: 2. 检查远程仓库配置
echo [2] 检查远程仓库配置...
git remote -v
if %errorlevel% neq 0 (
echo ? 未配置远程仓库
echo 请先运行: git remote add origin <仓库地址>
pause
exit /b 1
)
echo.
:: 3. 检查当前分支
echo [3] 检查当前分支...
git branch --show-current
echo.
:: 4. 检查是否有未提交的更改
echo [4] 检查未提交的更改...
git status --porcelain
if %errorlevel% equ 0 (
echo ?? 有未提交的更改
set /p commit="是否先提交更改? (y/n): "
if /i "%commit%"=="y" (
git add .
set /p msg="请输入提交信息: "
if "%msg%"=="" set msg=自动提交
git commit -m "%msg%"
)
)
echo.
:: 5. 尝试获取远程分支信息
echo [5] 获取远程分支信息...
git fetch origin
if %errorlevel% neq 0 (
echo ? 无法连接到远程仓库
echo.
echo 可能的原因:
echo 1. 网络连接问题
echo 2. 远程仓库地址错误
echo 3. 需要认证请检查是否已配置SSH密钥或Token
echo.
echo 远程仓库地址:
git config --get remote.origin.url
pause
exit /b 1
)
echo ? 远程仓库连接成功
echo.
:: 6. 检查分支跟踪关系
echo [6] 检查分支跟踪关系...
git branch -vv
echo.
:: 7. 尝试推送到远程
echo [7] 尝试推送...
set current_branch=
for /f "tokens=*" %%b in ('git branch --show-current') do set current_branch=%%b
echo 当前分支: %current_branch%
echo.
:: 检查远程是否存在该分支
git ls-remote --heads origin %current_branch% >nul 2>&1
if %errorlevel% equ 0 (
echo 远程分支 %current_branch% 已存在
echo.
echo 尝试使用当前分支名称推送...
git push origin %current_branch%
) else (
echo 远程分支 %current_branch% 不存在
echo.
echo 尝试设置上游并推送...
git push -u origin %current_branch%
)
if %errorlevel% equ 0 (
echo.
echo ? 推送成功!
) else (
echo.
echo ? 推送失败
echo.
echo ? 常见问题和解决方案:
echo.
echo 1. 如果是认证问题:
echo - 检查SSH密钥: ssh -T git@github.com (GitHub) 或 ssh -T git@gitee.com (Gitee)
echo - 或使用HTTPS + Token方式
echo.
echo 2. 如果是分支冲突:
echo - 运行: git pull origin %current_branch% --rebase
echo - 解决冲突后: git push origin %current_branch%
echo.
echo 3. 如果远程分支名称不同:
echo - 检查远程分支: git branch -r
echo - 可能需要推送主分支: git push origin main 或 git push origin master
echo.
pause
exit /b 1
)
echo.
echo ========================================
echo ? 诊断完成!
echo ========================================
pause

152
init.sql
View File

@@ -23,49 +23,83 @@ CREATE TABLE IF NOT EXISTS work_orders (
id INT AUTO_INCREMENT PRIMARY KEY,
order_id VARCHAR(50) UNIQUE NOT NULL,
title VARCHAR(200) NOT NULL,
description TEXT,
category VARCHAR(50),
priority ENUM('low', 'medium', 'high', 'urgent') DEFAULT 'medium',
status ENUM('open', 'in_progress', 'resolved', 'closed') DEFAULT 'open',
user_id INT,
assigned_to INT,
description TEXT NOT NULL,
category VARCHAR(100) NOT NULL,
priority VARCHAR(20) NOT NULL DEFAULT 'medium',
status VARCHAR(20) NOT NULL DEFAULT 'pending',
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
resolved_at TIMESTAMP NULL,
FOREIGN KEY (user_id) REFERENCES users(id),
FOREIGN KEY (assigned_to) REFERENCES users(id)
resolution TEXT,
satisfaction_score FLOAT,
-- 飞书集成字段
feishu_record_id VARCHAR(100) UNIQUE,
assignee VARCHAR(100),
solution TEXT,
ai_suggestion TEXT,
-- 扩展飞书字段
source VARCHAR(50),
module VARCHAR(100),
created_by VARCHAR(100),
wilfulness VARCHAR(100),
date_of_close TIMESTAMP NULL,
vehicle_type VARCHAR(100),
vin_sim VARCHAR(50),
app_remote_control_version VARCHAR(100),
hmi_sw VARCHAR(100),
parent_record VARCHAR(100),
has_updated_same_day VARCHAR(50),
operating_time VARCHAR(100),
-- 工单分发和权限管理字段
assigned_module VARCHAR(50),
module_owner VARCHAR(100),
dispatcher VARCHAR(100),
dispatch_time TIMESTAMP NULL,
region VARCHAR(50),
INDEX idx_order_id (order_id),
INDEX idx_status (status),
INDEX idx_priority (priority),
INDEX idx_created_at (created_at),
INDEX idx_assigned_module (assigned_module),
INDEX idx_region (region),
INDEX idx_feishu_record_id (feishu_record_id)
);
-- 创建预警表
CREATE TABLE IF NOT EXISTS alerts (
id INT AUTO_INCREMENT PRIMARY KEY,
rule_name VARCHAR(100) NOT NULL,
alert_type VARCHAR(50) NOT NULL,
level VARCHAR(20) NOT NULL DEFAULT 'info',
severity VARCHAR(20) NOT NULL DEFAULT 'medium',
message TEXT NOT NULL,
level ENUM('info', 'warning', 'error', 'critical') DEFAULT 'info',
status ENUM('active', 'resolved', 'suppressed') DEFAULT 'active',
source VARCHAR(100),
metadata JSON,
data TEXT,
is_active BOOLEAN DEFAULT TRUE,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
resolved_at TIMESTAMP NULL,
INDEX idx_level (level),
INDEX idx_status (status),
INDEX idx_alert_type (alert_type),
INDEX idx_severity (severity),
INDEX idx_is_active (is_active),
INDEX idx_created_at (created_at)
);
-- 创建对话表
CREATE TABLE IF NOT EXISTS conversations (
id INT AUTO_INCREMENT PRIMARY KEY,
session_id VARCHAR(100) NOT NULL,
user_id INT,
work_order_id INT,
user_message TEXT NOT NULL,
assistant_response TEXT NOT NULL,
confidence_score DECIMAL(3,2) DEFAULT 0.50,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id),
FOREIGN KEY (work_order_id) REFERENCES work_orders(id),
INDEX idx_session_id (session_id),
INDEX idx_created_at (created_at)
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
confidence_score FLOAT,
knowledge_used TEXT,
response_time FLOAT,
FOREIGN KEY (work_order_id) REFERENCES work_orders(id) ON DELETE CASCADE,
INDEX idx_work_order_id (work_order_id),
INDEX idx_timestamp (timestamp)
);
-- 创建知识库表
@@ -73,13 +107,16 @@ CREATE TABLE IF NOT EXISTS knowledge_entries (
id INT AUTO_INCREMENT PRIMARY KEY,
question TEXT NOT NULL,
answer TEXT NOT NULL,
category VARCHAR(50),
confidence_score DECIMAL(3,2) DEFAULT 0.50,
category VARCHAR(100) NOT NULL,
confidence_score FLOAT DEFAULT 0.0,
usage_count INT DEFAULT 0,
is_verified BOOLEAN DEFAULT FALSE,
is_active BOOLEAN DEFAULT TRUE,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
is_active BOOLEAN DEFAULT TRUE,
is_verified BOOLEAN DEFAULT FALSE,
verified_by VARCHAR(100),
verified_at TIMESTAMP NULL,
vector_embedding TEXT,
INDEX idx_category (category),
INDEX idx_is_active (is_active),
INDEX idx_is_verified (is_verified)
@@ -91,9 +128,35 @@ CREATE TABLE IF NOT EXISTS work_order_suggestions (
work_order_id INT NOT NULL,
ai_suggestion TEXT,
human_resolution TEXT,
ai_similarity FLOAT,
approved BOOLEAN DEFAULT FALSE,
use_human_resolution BOOLEAN DEFAULT FALSE,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
FOREIGN KEY (work_order_id) REFERENCES work_orders(id) ON DELETE CASCADE
FOREIGN KEY (work_order_id) REFERENCES work_orders(id) ON DELETE CASCADE,
INDEX idx_work_order_id (work_order_id),
INDEX idx_approved (approved)
);
-- 创建工单处理过程记录表
CREATE TABLE IF NOT EXISTS work_order_process_history (
id INT AUTO_INCREMENT PRIMARY KEY,
work_order_id INT NOT NULL,
processor_name VARCHAR(100) NOT NULL,
processor_role VARCHAR(50),
processor_region VARCHAR(50),
process_content TEXT NOT NULL,
action_type VARCHAR(50) NOT NULL,
previous_status VARCHAR(50),
new_status VARCHAR(50),
assigned_module VARCHAR(50),
process_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (work_order_id) REFERENCES work_orders(id) ON DELETE CASCADE,
INDEX idx_work_order_id (work_order_id),
INDEX idx_process_time (process_time),
INDEX idx_action_type (action_type),
INDEX idx_processor_name (processor_name)
);
-- 创建系统配置表
@@ -118,10 +181,31 @@ INSERT IGNORE INTO system_settings (key_name, value, description) VALUES
('max_concurrent_users', '100', '最大并发用户数'),
('session_timeout', '3600', '会话超时时间(秒)');
-- 创建索引优化查询性能
CREATE INDEX idx_work_orders_status ON work_orders(status);
CREATE INDEX idx_work_orders_priority ON work_orders(priority);
CREATE INDEX idx_work_orders_created_at ON work_orders(created_at);
CREATE INDEX idx_alerts_level_status ON alerts(level, status);
CREATE INDEX idx_conversations_user_id ON conversations(user_id);
CREATE INDEX idx_knowledge_entries_category_active ON knowledge_entries(category, is_active);
-- 创建分析统计表
CREATE TABLE IF NOT EXISTS analytics (
id INT AUTO_INCREMENT PRIMARY KEY,
date TIMESTAMP NOT NULL,
total_orders INT DEFAULT 0,
resolved_orders INT DEFAULT 0,
avg_resolution_time FLOAT DEFAULT 0.0,
satisfaction_avg FLOAT DEFAULT 0.0,
knowledge_hit_rate FLOAT DEFAULT 0.0,
category_distribution TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
INDEX idx_date (date)
);
-- 创建车辆实时数据表(如果不存在)
CREATE TABLE IF NOT EXISTS vehicle_data (
id INT AUTO_INCREMENT PRIMARY KEY,
vehicle_id VARCHAR(50) NOT NULL,
vehicle_vin VARCHAR(17),
data_type VARCHAR(50) NOT NULL,
data_value TEXT NOT NULL,
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
is_active BOOLEAN DEFAULT TRUE,
INDEX idx_vehicle_id (vehicle_id),
INDEX idx_vehicle_vin (vehicle_vin),
INDEX idx_data_type (data_type),
INDEX idx_timestamp (timestamp)
);

View File

@@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
"""
TSP助手数据库初始化脚本 - 重构版本
结合项目新特性,提供更高效的数据库初始化和管理功能
TSP助手数据库初始化脚本
"""
import sys
@@ -19,10 +18,12 @@ sys.path.append(os.path.dirname(os.path.abspath(__file__)))
from src.config.config import Config
from src.utils.helpers import setup_logging
from src.core.database import db_manager
from src.core.models import Base, WorkOrder, KnowledgeEntry, Conversation, Analytics, Alert, VehicleData
from src.core.models import (
Base, WorkOrder, KnowledgeEntry, Conversation, Analytics, Alert, VehicleData,
WorkOrderSuggestion, WorkOrderProcessHistory
)
class DatabaseInitializer:
"""数据库初始化器 - 重构版本"""
def __init__(self):
self.logger = logging.getLogger(__name__)
@@ -57,11 +58,11 @@ class DatabaseInitializer:
def initialize_database(self, force_reset: bool = False) -> bool:
"""初始化数据库 - 主入口函数"""
print("=" * 80)
print("🚀 TSP智能助手数据库初始化系统")
print("TSP智能助手数据库初始化系统")
print("=" * 80)
print(f"📊 数据库类型: {self.db_version}")
print(f"🔗 连接地址: {self.db_url}")
print(f"初始化时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
print(f"数据库类型: {self.db_version}")
print(f"连接地址: {self.db_url}")
print(f"初始化时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
print("=" * 80)
try:
@@ -97,49 +98,49 @@ class DatabaseInitializer:
self._generate_init_report()
print("\n" + "=" * 80)
print("🎉 数据库初始化完成!")
print("数据库初始化完成!")
print("=" * 80)
return True
except Exception as e:
print(f"\n数据库初始化失败: {e}")
print(f"\n数据库初始化失败: {e}")
self.logger.error(f"数据库初始化失败: {e}", exc_info=True)
return False
def _test_connection(self) -> bool:
"""测试数据库连接"""
print("\n🔌 测试数据库连接...")
print("\n测试数据库连接...")
try:
if db_manager.test_connection():
print("数据库连接成功")
print("数据库连接成功")
return True
else:
print("数据库连接失败")
print("数据库连接失败")
return False
except Exception as e:
print(f"数据库连接测试异常: {e}")
print(f"数据库连接测试异常: {e}")
return False
def _reset_database(self) -> bool:
"""重置数据库(谨慎使用)"""
print("\n⚠️ 重置数据库...")
print("\n重置数据库...")
try:
# 删除所有表
Base.metadata.drop_all(bind=db_manager.engine)
print("数据库表删除成功")
print("数据库表删除成功")
# 重新创建所有表
Base.metadata.create_all(bind=db_manager.engine)
print("数据库表重新创建成功")
print("数据库表重新创建成功")
return True
except Exception as e:
print(f"数据库重置失败: {e}")
print(f"数据库重置失败: {e}")
return False
def _create_tables(self) -> bool:
"""创建数据库表"""
print("\n📋 创建数据库表...")
print("\n创建数据库表...")
try:
# 获取现有表信息
inspector = inspect(db_manager.engine)
@@ -153,18 +154,18 @@ class DatabaseInitializer:
created_tables = set(new_tables) - set(existing_tables)
if created_tables:
print(f"新创建表: {', '.join(created_tables)}")
print(f"新创建表: {', '.join(created_tables)}")
else:
print("所有表已存在")
print("所有表已存在")
return True
except Exception as e:
print(f"创建数据库表失败: {e}")
print(f"创建数据库表失败: {e}")
return False
def _run_migrations(self) -> bool:
"""执行数据库迁移"""
print("\n🔄 执行数据库迁移...")
print("\n执行数据库迁移...")
migrations = [
self._migrate_knowledge_verification_fields,
@@ -173,6 +174,8 @@ class DatabaseInitializer:
self._migrate_conversation_enhancements,
self._migrate_workorder_enhancements,
self._migrate_workorder_suggestions_enhancements,
self._migrate_workorder_dispatch_fields,
self._migrate_workorder_process_history_table,
self._migrate_analytics_enhancements,
self._migrate_system_optimization_fields
]
@@ -184,14 +187,14 @@ class DatabaseInitializer:
success_count += 1
except Exception as e:
self.logger.error(f"迁移失败: {migration.__name__}: {e}")
print(f"⚠️ 迁移 {migration.__name__} 失败: {e}")
print(f"迁移 {migration.__name__} 失败: {e}")
print(f"完成 {success_count}/{len(migrations)} 个迁移")
print(f"完成 {success_count}/{len(migrations)} 个迁移")
return success_count > 0
def _migrate_knowledge_verification_fields(self) -> bool:
"""迁移知识库验证字段"""
print(" 📝 检查知识库验证字段...")
print(" 检查知识库验证字段...")
fields_to_add = [
('is_verified', 'BOOLEAN DEFAULT FALSE'),
@@ -203,7 +206,7 @@ class DatabaseInitializer:
def _migrate_alert_severity_field(self) -> bool:
"""迁移预警严重程度字段"""
print(" 🚨 检查预警严重程度字段...")
print(" 检查预警严重程度字段...")
fields_to_add = [
('severity', 'VARCHAR(20) DEFAULT \'medium\'')
@@ -213,50 +216,50 @@ class DatabaseInitializer:
def _migrate_vehicle_data_table(self) -> bool:
"""迁移车辆数据表"""
print(" 🚗 检查车辆数据表...")
print(" 检查车辆数据表...")
try:
with db_manager.get_session() as session:
# 检查表是否存在
inspector = inspect(db_manager.engine)
if 'vehicle_data' not in inspector.get_table_names():
print(" 创建vehicle_data表...")
print(" 创建vehicle_data表...")
VehicleData.__table__.create(session.bind, checkfirst=True)
print(" vehicle_data表创建成功")
print(" vehicle_data表创建成功")
else:
print(" vehicle_data表已存在")
print(" vehicle_data表已存在")
session.commit()
return True
except Exception as e:
print(f" 车辆数据表迁移失败: {e}")
print(f" 车辆数据表迁移失败: {e}")
return False
def _migrate_conversation_enhancements(self) -> bool:
"""迁移对话增强字段"""
print(" 💬 检查对话增强字段...")
print(" 检查对话增强字段...")
fields_to_add = [
('response_time', 'FLOAT'),
('user_satisfaction', 'INTEGER'),
('ai_confidence', 'FLOAT'),
('context_data', 'TEXT')
('timestamp', 'TIMESTAMP DEFAULT CURRENT_TIMESTAMP'),
('knowledge_used', 'TEXT'),
('response_time', 'FLOAT')
]
return self._add_table_columns('conversations', fields_to_add)
def _migrate_workorder_enhancements(self) -> bool:
"""迁移工单增强字段"""
print(" 📋 检查工单增强字段...")
print(" 检查工单增强字段...")
fields_to_add = [
('ai_suggestion', 'TEXT'),
('human_resolution', 'TEXT'),
('ai_similarity', 'FLOAT'),
('ai_approved', 'BOOLEAN DEFAULT FALSE'),
('resolution', 'TEXT'),
('satisfaction_score', 'FLOAT'),
# 飞书集成字段
('feishu_record_id', 'VARCHAR(100)'),
('sync_status', 'VARCHAR(20) DEFAULT \'pending\''),
# 飞书集成扩展字段
('assignee', 'VARCHAR(100)'),
('solution', 'TEXT'),
('ai_suggestion', 'TEXT'),
# 扩展飞书字段
('source', 'VARCHAR(50)'),
('module', 'VARCHAR(100)'),
('created_by', 'VARCHAR(100)'),
@@ -275,17 +278,69 @@ class DatabaseInitializer:
def _migrate_workorder_suggestions_enhancements(self) -> bool:
"""迁移工单建议表增强字段"""
print(" 💡 检查工单建议表增强字段...")
print(" 检查工单建议表增强字段...")
fields_to_add = [
('ai_similarity', 'FLOAT'),
('approved', 'BOOLEAN DEFAULT FALSE'),
('use_human_resolution', 'BOOLEAN DEFAULT FALSE') # 是否使用人工描述入库
]
return self._add_table_columns('work_order_suggestions', fields_to_add)
def _migrate_workorder_dispatch_fields(self) -> bool:
"""迁移工单分发和权限管理字段"""
print(" 检查工单分发和权限管理字段...")
fields_to_add = [
('assigned_module', 'VARCHAR(50)'),
('module_owner', 'VARCHAR(100)'),
('dispatcher', 'VARCHAR(100)'),
('dispatch_time', 'DATETIME'),
('region', 'VARCHAR(50)')
]
return self._add_table_columns('work_orders', fields_to_add)
def _migrate_workorder_process_history_table(self) -> bool:
"""迁移工单处理过程记录表"""
print(" 检查工单处理过程记录表...")
try:
with db_manager.get_session() as session:
# 检查表是否存在
inspector = inspect(db_manager.engine)
if 'work_order_process_history' not in inspector.get_table_names():
print(" 创建work_order_process_history表...")
WorkOrderProcessHistory.__table__.create(session.bind, checkfirst=True)
print(" work_order_process_history表创建成功")
else:
print(" work_order_process_history表已存在")
# 检查字段是否完整
existing_columns = [col['name'] for col in inspector.get_columns('work_order_process_history')]
required_columns = [
'processor_name', 'processor_role', 'processor_region',
'process_content', 'action_type', 'previous_status',
'new_status', 'assigned_module', 'process_time'
]
missing_columns = [col for col in required_columns if col not in existing_columns]
if missing_columns:
print(f" 缺少字段: {', '.join(missing_columns)}")
# 这里可以选择性地添加缺失字段,但通常表已经完整创建
else:
print(" 所有必需字段已存在")
session.commit()
return True
except Exception as e:
print(f" 工单处理过程记录表迁移失败: {e}")
return False
def _migrate_analytics_enhancements(self) -> bool:
"""迁移分析增强字段"""
print(" 📊 检查分析增强字段...")
print(" 检查分析增强字段...")
fields_to_add = [
('performance_score', 'FLOAT'),
@@ -298,7 +353,7 @@ class DatabaseInitializer:
def _migrate_system_optimization_fields(self) -> bool:
"""迁移系统优化字段"""
print(" ⚙️ 检查系统优化字段...")
print(" 检查系统优化字段...")
# 为各个表添加系统优化相关字段
tables_and_fields = {
@@ -337,7 +392,7 @@ class DatabaseInitializer:
skipped_count += 1
continue
print(f" 添加字段 {table_name}.{field_name}...")
print(f" 添加字段 {table_name}.{field_name}...")
# 使用单独的会话添加每个字段,避免长时间锁定
with db_manager.get_session() as session:
@@ -345,22 +400,22 @@ class DatabaseInitializer:
session.execute(text(alter_sql))
session.commit()
print(f" 字段 {field_name} 添加成功")
print(f" 字段 {field_name} 添加成功")
added_count += 1
except Exception as field_error:
print(f" ⚠️ 字段 {field_name} 添加失败: {field_error}")
print(f" 字段 {field_name} 添加失败: {field_error}")
# 继续处理其他字段,不中断整个过程
if added_count > 0:
print(f" 📊 成功添加 {added_count} 个字段,跳过 {skipped_count} 个已存在字段")
print(f" 成功添加 {added_count} 个字段,跳过 {skipped_count} 个已存在字段")
else:
print(f" 📊 所有字段都已存在,跳过 {skipped_count} 个字段")
print(f" 所有字段都已存在,跳过 {skipped_count} 个字段")
return True
except Exception as e:
print(f" 添加字段过程失败: {e}")
print(f" 添加字段过程失败: {e}")
return False
def _column_exists(self, table_name: str, column_name: str) -> bool:
@@ -395,14 +450,14 @@ class DatabaseInitializer:
def _insert_initial_data(self) -> bool:
"""插入初始数据"""
print("\n📊 插入初始数据...")
print("\n插入初始数据...")
try:
with db_manager.get_session() as session:
# 检查是否已有数据
existing_count = session.query(KnowledgeEntry).count()
if existing_count > 0:
print(f" 数据库中已有 {existing_count} 条知识库条目,跳过初始数据插入")
print(f" 数据库中已有 {existing_count} 条知识库条目,跳过初始数据插入")
return True
# 插入初始知识库数据
@@ -412,7 +467,7 @@ class DatabaseInitializer:
session.add(entry)
session.commit()
print(f" 成功插入 {len(initial_data)} 条知识库条目")
print(f" 成功插入 {len(initial_data)} 条知识库条目")
# 添加示例车辆数据
self._add_sample_vehicle_data()
@@ -422,7 +477,7 @@ class DatabaseInitializer:
return True
except Exception as e:
print(f" 插入初始数据失败: {e}")
print(f" 插入初始数据失败: {e}")
return False
def _get_initial_knowledge_data(self) -> List[Dict[str, Any]]:
@@ -549,13 +604,13 @@ class DatabaseInitializer:
success = vehicle_manager.add_sample_vehicle_data()
if success:
print(" 示例车辆数据添加成功")
print(" 示例车辆数据添加成功")
else:
print(" 示例车辆数据添加失败")
print(" 示例车辆数据添加失败")
return success
except Exception as e:
print(f" 添加示例车辆数据失败: {e}")
print(f" 添加示例车辆数据失败: {e}")
return False
def _verify_existing_knowledge(self) -> bool:
@@ -568,7 +623,7 @@ class DatabaseInitializer:
).all()
if unverified_entries:
print(f" 📝 发现 {len(unverified_entries)} 条未验证的知识库条目")
print(f" 发现 {len(unverified_entries)} 条未验证的知识库条目")
# 将现有的知识库条目标记为已验证
for entry in unverified_entries:
@@ -581,18 +636,18 @@ class DatabaseInitializer:
entry.relevance_score = 0.7
session.commit()
print(f" 成功验证 {len(unverified_entries)} 条知识库条目")
print(f" 成功验证 {len(unverified_entries)} 条知识库条目")
else:
print(" 所有知识库条目已验证")
print(" 所有知识库条目已验证")
return True
except Exception as e:
print(f" 验证知识库条目失败: {e}")
print(f" 验证知识库条目失败: {e}")
return False
def _verify_database_integrity(self) -> bool:
"""验证数据库完整性"""
print("\n🔍 验证数据库完整性...")
print("\n验证数据库完整性...")
try:
with db_manager.get_session() as session:
@@ -603,7 +658,9 @@ class DatabaseInitializer:
'knowledge_entries': KnowledgeEntry,
'analytics': Analytics,
'alerts': Alert,
'vehicle_data': VehicleData
'vehicle_data': VehicleData,
'work_order_suggestions': WorkOrderSuggestion,
'work_order_process_history': WorkOrderProcessHistory
}
total_records = 0
@@ -611,19 +668,19 @@ class DatabaseInitializer:
try:
count = session.query(model_class).count()
total_records += count
print(f" 📋 {table_name}: {count} 条记录")
print(f" {table_name}: {count} 条记录")
except Exception as e:
print(f" ⚠️ {table_name}: 检查失败 - {e}")
print(f" {table_name}: 检查失败 - {e}")
print(f" 📊 总记录数: {total_records}")
print(f" 总记录数: {total_records}")
# 检查关键字段
self._check_critical_fields()
print(" 数据库完整性验证通过")
print(" 数据库完整性验证通过")
return True
except Exception as e:
print(f" 数据库完整性验证失败: {e}")
print(f" 数据库完整性验证失败: {e}")
return False
def _check_critical_fields(self):
@@ -632,19 +689,23 @@ class DatabaseInitializer:
("knowledge_entries", "is_verified"),
("alerts", "severity"),
("vehicle_data", "vehicle_id"),
("conversations", "timestamp"),
("conversations", "response_time"),
("work_orders", "ai_suggestion")
("work_orders", "ai_suggestion"),
("work_orders", "assigned_module"),
("work_order_process_history", "processor_name"),
("work_order_suggestions", "ai_similarity")
]
for table_name, field_name in critical_checks:
if self._column_exists(table_name, field_name):
print(f" {table_name}.{field_name} 字段存在")
print(f" {table_name}.{field_name} 字段存在")
else:
print(f" ⚠️ {table_name}.{field_name} 字段缺失")
print(f" {table_name}.{field_name} 字段缺失")
def _generate_init_report(self):
"""生成初始化报告"""
print("\n📋 生成初始化报告...")
print("\n生成初始化报告...")
try:
report = {
@@ -662,10 +723,10 @@ class DatabaseInitializer:
with open(report_path, 'w', encoding='utf-8') as f:
json.dump(report, f, indent=2, ensure_ascii=False)
print(f" 初始化报告已保存到: {report_path}")
print(f" 初始化报告已保存到: {report_path}")
except Exception as e:
print(f" ⚠️ 生成初始化报告失败: {e}")
print(f" 生成初始化报告失败: {e}")
def _get_table_count(self) -> int:
"""获取表数量"""
@@ -678,7 +739,7 @@ class DatabaseInitializer:
def check_database_status(self) -> Dict[str, Any]:
"""检查数据库状态"""
print("\n" + "=" * 80)
print("📊 数据库状态检查")
print("数据库状态检查")
print("=" * 80)
try:
@@ -690,7 +751,9 @@ class DatabaseInitializer:
'knowledge_entries': KnowledgeEntry,
'analytics': Analytics,
'alerts': Alert,
'vehicle_data': VehicleData
'vehicle_data': VehicleData,
'work_order_suggestions': WorkOrderSuggestion,
'work_order_process_history': WorkOrderProcessHistory
}
status = {
@@ -706,10 +769,10 @@ class DatabaseInitializer:
count = session.query(model_class).count()
status["tables"][table_name] = count
status["total_records"] += count
print(f"📋 {table_name}: {count} 条记录")
print(f"{table_name}: {count} 条记录")
except Exception as e:
status["tables"][table_name] = f"错误: {e}"
print(f"⚠️ {table_name}: 检查失败 - {e}")
print(f"{table_name}: 检查失败 - {e}")
# 检查车辆数据详情
if 'vehicle_data' in status["tables"] and isinstance(status["tables"]['vehicle_data'], int):
@@ -736,18 +799,18 @@ class DatabaseInitializer:
"unverified": unverified_count
}
print(f"\n📊 总记录数: {status['total_records']}")
print("\n数据库状态检查完成")
print(f"\n总记录数: {status['total_records']}")
print("\n数据库状态检查完成")
return status
except Exception as e:
print(f"数据库状态检查失败: {e}")
print(f"数据库状态检查失败: {e}")
return {"error": str(e)}
def main():
"""主函数"""
print("🚀 TSP智能助手数据库初始化工具 - 重构版本")
print("TSP智能助手数据库初始化工具")
print("=" * 80)
# 创建初始化器
@@ -757,7 +820,7 @@ def main():
force_reset = '--reset' in sys.argv or '--force' in sys.argv
if force_reset:
print("⚠️ 警告:将重置数据库,所有数据将被删除!")
print("警告:将重置数据库,所有数据将被删除!")
try:
confirm = input("确定要继续吗?(y/N): ")
if confirm.lower() != 'y':
@@ -772,32 +835,12 @@ def main():
initializer.check_database_status()
print("\n" + "=" * 80)
print("🎉 数据库初始化成功!")
print("数据库初始化成功!")
print("=" * 80)
print("✅ 已完成的操作:")
print(" - 创建所有数据库表")
print(" - 执行数据库迁移")
print(" - 添加知识库验证字段")
print(" - 创建车辆数据表")
print(" - 插入初始知识库数据")
print(" - 添加示例车辆数据")
print(" - 验证所有知识库条目")
print(" - 生成初始化报告")
print("\n🚀 现在您可以运行以下命令启动系统:")
print(" python start_dashboard.py")
print("\n🧪 或运行功能测试:")
print(" python test_new_features.py")
print("\n📋 新功能包括:")
print(" - 知识库分页显示")
print(" - 知识库验证机制")
print(" - 车辆实时数据管理")
print(" - 文件上传生成知识库")
print(" - 智能对话结合车辆数据")
print(" - 飞书同步功能")
print(" - 系统性能优化")
else:
print("\n" + "=" * 80)
print("数据库初始化失败!")
print("数据库初始化失败!")
print("=" * 80)
if __name__ == "__main__":

File diff suppressed because one or more lines are too long

View File

@@ -1,12 +0,0 @@
apiVersion: 1
providers:
- name: 'default'
orgId: 1
folder: ''
type: file
disableDeletion: false
updateIntervalSeconds: 10
allowUiUpdates: true
options:
path: /etc/grafana/provisioning/dashboards

View File

@@ -1,9 +0,0 @@
apiVersion: 1
datasources:
- name: Prometheus
type: prometheus
access: proxy
url: http://prometheus:9090
isDefault: true
editable: true

View File

@@ -1,45 +0,0 @@
global:
scrape_interval: 15s
evaluation_interval: 15s
rule_files:
# - "first_rules.yml"
# - "second_rules.yml"
scrape_configs:
# Prometheus自身监控
- job_name: 'prometheus'
static_configs:
- targets: ['localhost:9090']
# TSP助手应用监控
- job_name: 'tsp-assistant'
static_configs:
- targets: ['tsp-assistant:5000']
metrics_path: '/api/metrics'
scrape_interval: 30s
scrape_timeout: 10s
# MySQL监控
- job_name: 'mysql'
static_configs:
- targets: ['mysql:3306']
scrape_interval: 30s
# Redis监控
- job_name: 'redis'
static_configs:
- targets: ['redis:6379']
scrape_interval: 30s
# Nginx监控
- job_name: 'nginx'
static_configs:
- targets: ['nginx:80']
scrape_interval: 30s
# Node Exporter系统监控
- job_name: 'node-exporter'
static_configs:
- targets: ['node-exporter:9100']
scrape_interval: 30s

View File

@@ -1,84 +1,80 @@
# 核心依赖
sqlalchemy>=2.0.0
requests>=2.31.0
numpy>=1.24.0
scikit-learn>=1.3.0
sqlalchemy>=2.0.32
requests>=2.32.3
numpy>=1.26.4
scikit-learn>=1.4.2
# 数据库驱动
pymysql>=1.1.0
cryptography>=41.0.0
flask>=2.3.0
flask-cors>=4.0.0
websockets>=11.0.0
pymysql>=1.1.1
cryptography>=43.0.1
flask>=3.0.3
flask-cors>=5.0.0
websockets>=15.0.1
# 中文处理
jieba>=0.42.1
# 系统监控
psutil>=5.9.0
psutil>=5.9.8
# 数据处理
pandas>=2.0.0
openpyxl>=3.1.0
pandas>=2.2.2
openpyxl>=3.1.5
# 向量化
sentence-transformers>=2.2.0
transformers>=4.30.0
torch>=2.0.0
# 向量化(可选,如果不需要可以注释掉以节省空间)
# sentence-transformers>=2.7.1
# transformers>=4.43.2
# torch>=2.4.1
# 日志和配置
python-dotenv>=1.0.0
structlog>=23.0.0
python-dotenv>=1.0.1
structlog>=24.4.0
# 时间处理
python-dateutil>=2.8.0
python-dateutil>=2.9.0
# JSON处理
ujson>=5.8.0
ujson>=5.10.0
# 异步支持
aiohttp>=3.8.0
asyncio>=3.4.3
aiohttp>=3.10.10
# asyncio是Python内置模块不需要安装
# Redis缓存
redis>=4.5.0
redis-py-cluster>=2.1.0
redis>=5.2.0
redis-py-cluster>=2.1.3
# 测试框架
pytest>=7.4.0
pytest-asyncio>=0.21.0
pytest-cov>=4.1.0
pytest>=8.3.3
pytest-asyncio>=0.24.0
pytest-cov>=6.0.0
# 代码质量
black>=23.0.0
flake8>=6.0.0
mypy>=1.5.0
isort>=5.12.0
# 性能监控
prometheus-client>=0.17.0
grafana-api>=1.0.0
black>=24.8.0
flake8>=7.1.1
mypy>=1.11.1
isort>=5.13.2
# 安全
bcrypt>=4.0.0
pyjwt>=2.8.0
bcrypt>=4.2.1
pyjwt>=2.9.0
# 文件处理
python-magic>=0.4.27
pillow>=10.0.0
pillow>=11.0.0
# 网络工具
urllib3>=2.0.0
httpx>=0.24.0
urllib3>=2.2.3
httpx>=0.27.2
# 数据验证
pydantic>=2.0.0
marshmallow>=3.20.0
pydantic>=2.9.2
marshmallow>=3.21.4
# 任务队列
celery>=5.3.0
kombu>=5.3.0
# 任务队列(可选)
# celery>=5.4.0
# kombu>=5.4.1
# 文档生成
sphinx>=7.0.0
sphinx-rtd-theme>=1.3.0
# 文档生成(可选)
# sphinx>=7.5.0
# sphinx-rtd-theme>=2.0.0

108
simple_git_push.bat Normal file
View File

@@ -0,0 +1,108 @@
@echo off
chcp 65001 >nul
setlocal enabledelayedexpansion
echo ========================================
echo 简单Git推送工具
echo ========================================
echo.
:: 1. 显示Git状态
echo [1] Git状态:
git status --short
echo.
:: 2. 显示远程仓库
echo [2] 远程仓库:
git remote -v
if %errorlevel% neq 0 (
echo 错误: 未配置远程仓库
pause
exit /b 1
)
echo.
:: 3. 显示当前分支
echo [3] 当前分支:
for /f "tokens=*" %%b in ('git branch --show-current 2^>nul') do set branch=%%b
if "!branch!"=="" (
echo 警告: 无法获取分支名称尝试使用main
set branch=main
)
echo 分支: !branch!
echo.
:: 4. 检查是否有未提交的更改
echo [4] 检查未提交的更改...
git diff --quiet
set has_uncommitted=%errorlevel%
git diff --cached --quiet
set has_staged=%errorlevel%
if %has_uncommitted% neq 0 (
echo 有未暂存的更改
)
if %has_staged% neq 0 (
echo 有已暂存的更改
)
if %has_uncommitted% equ 0 if %has_staged% equ 0 (
echo 所有更改已提交
)
echo.
:: 5. 尝试推送
echo [5] 开始推送...
echo 命令: git push origin !branch!
echo.
git push origin !branch! 2>&1 | findstr /v "^$"
set push_error=!errorlevel!
if !push_error! equ 0 (
echo.
echo ========================================
echo 推送成功!
echo ========================================
) else (
echo.
echo ========================================
echo 推送失败!错误码: !push_error!
echo ========================================
echo.
echo 尝试设置上游并推送...
git push -u origin !branch! 2>&1 | findstr /v "^$"
set push_u_error=!errorlevel!
if !push_u_error! equ 0 (
echo.
echo ========================================
echo 推送成功(已设置上游)!
echo ========================================
) else (
echo.
echo ========================================
echo 推送仍然失败
echo ========================================
echo.
echo 常见问题和解决方案:
echo.
echo 1. 认证问题:
echo - 检查SSH密钥: ssh -T git@github.com (GitHub)
echo - 检查SSH密钥: ssh -T git@gitee.com (Gitee)
echo - 或使用HTTPS + Personal Access Token
echo.
echo 2. 远程仓库地址:
git config --get remote.origin.url
echo.
echo 3. 分支冲突:
echo - 先拉取: git pull origin !branch! --rebase
echo - 解决冲突后推送: git push origin !branch!
echo.
echo 4. 检查网络连接和远程仓库权限
echo.
)
)
echo.
pause

View File

@@ -212,7 +212,7 @@ class TSPAgentAssistant:
try:
self.is_agent_mode = enabled
logger.info(f"Agent模式: {'启用' if enabled else '禁用'}")
return True
return True
except Exception as e:
logger.error(f"切换Agent模式失败: {e}")
return False
@@ -233,8 +233,8 @@ class TSPAgentAssistant:
"""停止主动监控"""
try:
self.ai_monitoring_active = False
logger.info("主动监控已停止")
return True
logger.info("主动监控已停止")
return True
except Exception as e:
logger.error(f"停止主动监控失败: {e}")
return False
@@ -261,14 +261,14 @@ class TSPAgentAssistant:
recent_executions = self.get_action_history(20)
# 生成分析报告
analysis = {
analysis = {
"tool_performance": tool_performance,
"recent_activity": len(recent_executions),
"success_rate": tool_performance.get("success_rate", 0),
"recommendations": self._generate_recommendations(tool_performance)
}
}
return analysis
return analysis
except Exception as e:
logger.error(f"运行智能分析失败: {e}")
@@ -357,8 +357,8 @@ class TSPAgentAssistant:
try:
logger.info(f"保存知识条目 {i+1}: {entry.get('question', '')[:50]}...")
# 这里应该调用知识库管理器保存
saved_count += 1
logger.info(f"知识条目 {i+1} 保存成功")
saved_count += 1
logger.info(f"知识条目 {i+1} 保存成功")
except Exception as save_error:
logger.error(f"保存知识条目 {i+1} 时出错: {save_error}")
@@ -380,9 +380,9 @@ class TSPAgentAssistant:
with open(file_path, 'r', encoding='utf-8') as f:
return f.read()
elif file_ext == '.pdf':
return "PDF文件需要安装PyPDF2库"
return "PDF文件需要安装PyPDF2库"
elif file_ext in ['.doc', '.docx']:
return "Word文件需要安装python-docx库"
return "Word文件需要安装python-docx库"
else:
return "不支持的文件格式"
except Exception as e:

View File

@@ -41,8 +41,17 @@ class WorkOrder(Base):
has_updated_same_day = Column(String(50), nullable=True) # 是否同日更新
operating_time = Column(String(100), nullable=True) # 操作时间
# 工单分发和权限管理字段
assigned_module = Column(String(50), nullable=True) # 分配的模块TBOX、OTA等
module_owner = Column(String(100), nullable=True) # 业务接口人/模块负责人
dispatcher = Column(String(100), nullable=True) # 分发人(运维人员)
dispatch_time = Column(DateTime, nullable=True) # 分发时间
region = Column(String(50), nullable=True) # 区域overseas/domestic- 用于区分海外/国内
# 关联对话记录
conversations = relationship("Conversation", back_populates="work_order")
# 关联处理过程记录
process_history = relationship("WorkOrderProcessHistory", back_populates="work_order", order_by="WorkOrderProcessHistory.process_time")
class Conversation(Base):
"""对话记录模型"""
@@ -136,3 +145,31 @@ class WorkOrderSuggestion(Base):
use_human_resolution = Column(Boolean, default=False) # 是否使用人工描述入库
created_at = Column(DateTime, default=datetime.now)
updated_at = Column(DateTime, default=datetime.now, onupdate=datetime.now)
class WorkOrderProcessHistory(Base):
"""工单处理过程记录表"""
__tablename__ = "work_order_process_history"
id = Column(Integer, primary_key=True)
work_order_id = Column(Integer, ForeignKey("work_orders.id"), nullable=False)
# 处理人员信息
processor_name = Column(String(100), nullable=False) # 处理人员姓名
processor_role = Column(String(50), nullable=True) # 处理人员角色(运维、业务方等)
processor_region = Column(String(50), nullable=True) # 处理人员区域overseas/domestic
# 处理内容
process_content = Column(Text, nullable=False) # 处理内容/操作描述
action_type = Column(String(50), nullable=False) # 操作类型dispatch、process、close、reassign等
# 处理结果
previous_status = Column(String(50), nullable=True) # 处理前的状态
new_status = Column(String(50), nullable=True) # 处理后的状态
assigned_module = Column(String(50), nullable=True) # 分配的模块(如果是分发操作)
# 时间戳
process_time = Column(DateTime, default=datetime.now, nullable=False) # 处理时间
created_at = Column(DateTime, default=datetime.now)
# 关联工单
work_order = relationship("WorkOrder", back_populates="process_history")

View File

@@ -0,0 +1,231 @@
# -*- coding: utf-8 -*-
"""
工单权限管理模块
实现基于角色的访问控制RBAC和工单分发流程
"""
import logging
from typing import List, Dict, Optional, Set
from enum import Enum
logger = logging.getLogger(__name__)
class UserRole(Enum):
"""用户角色枚举"""
# 属地运维(海外/国内)
OVERSEAS_OPS = "overseas_ops" # 海外属地运维
DOMESTIC_OPS = "domestic_ops" # 国内属地运维
# 业务方接口人(各模块负责人)
TBOX_OWNER = "tbox_owner" # TBOX模块负责人
OTA_OWNER = "ota_owner" # OTA模块负责人
DMC_OWNER = "dmc_owner" # DMC模块负责人
MES_OWNER = "mes_owner" # MES模块负责人
APP_OWNER = "app_owner" # APP模块负责人
PKI_OWNER = "pki_owner" # PKI模块负责人
TSP_OWNER = "tsp_owner" # TSP模块负责人
# 系统角色
ADMIN = "admin" # 系统管理员
VIEWER = "viewer" # 只读用户
class WorkOrderModule(Enum):
"""工单模块枚举"""
TBOX = "TBOX"
OTA = "OTA"
DMC = "DMC"
MES = "MES"
APP = "APP"
PKI = "PKI"
TSP = "TSP"
LOCAL_OPS = "local_ops" # 属地运维处理
UNASSIGNED = "unassigned" # 未分配
class WorkOrderStatus:
"""工单状态常量"""
PENDING = "pending" # 待处理
ASSIGNED = "assigned" # 已分配
IN_PROGRESS = "in_progress" # 处理中
RESOLVED = "resolved" # 已解决
CLOSED = "closed" # 已关闭
class WorkOrderPermissionManager:
"""工单权限管理器"""
# 所有模块集合(供属地运维和管理员使用)
ALL_MODULES = {
WorkOrderModule.TBOX, WorkOrderModule.OTA, WorkOrderModule.DMC,
WorkOrderModule.MES, WorkOrderModule.APP, WorkOrderModule.PKI,
WorkOrderModule.TSP, WorkOrderModule.LOCAL_OPS
}
# 角色到模块的映射
ROLE_MODULE_MAP = {
UserRole.TBOX_OWNER: {WorkOrderModule.TBOX},
UserRole.OTA_OWNER: {WorkOrderModule.OTA},
UserRole.DMC_OWNER: {WorkOrderModule.DMC},
UserRole.MES_OWNER: {WorkOrderModule.MES},
UserRole.APP_OWNER: {WorkOrderModule.APP},
UserRole.PKI_OWNER: {WorkOrderModule.PKI},
UserRole.TSP_OWNER: {WorkOrderModule.TSP},
UserRole.OVERSEAS_OPS: ALL_MODULES, # 可访问所有模块
UserRole.DOMESTIC_OPS: ALL_MODULES, # 可访问所有模块
UserRole.ADMIN: ALL_MODULES, # 管理员可访问所有
UserRole.VIEWER: set(), # 只读,由其他逻辑控制
}
@staticmethod
def can_view_all_workorders(role: UserRole) -> bool:
"""判断角色是否可以查看所有工单(属地运维和管理员)"""
return role in [UserRole.OVERSEAS_OPS, UserRole.DOMESTIC_OPS, UserRole.ADMIN]
@staticmethod
def get_accessible_modules(role: UserRole) -> Set[WorkOrderModule]:
"""获取角色可访问的模块列表"""
return WorkOrderPermissionManager.ROLE_MODULE_MAP.get(role, set())
@staticmethod
def can_access_module(role: UserRole, module: WorkOrderModule) -> bool:
"""判断角色是否可以访问指定模块"""
accessible_modules = WorkOrderPermissionManager.get_accessible_modules(role)
# 属地运维和管理员可以访问所有模块
if WorkOrderPermissionManager.can_view_all_workorders(role):
return True
# 业务方只能访问自己的模块
return module in accessible_modules
@staticmethod
def can_dispatch_workorder(role: UserRole) -> bool:
"""判断角色是否可以进行工单分发(属地运维和管理员)"""
return role in [UserRole.OVERSEAS_OPS, UserRole.DOMESTIC_OPS, UserRole.ADMIN]
@staticmethod
def can_update_workorder(role: UserRole, workorder_module: Optional[WorkOrderModule],
assigned_to_module: Optional[WorkOrderModule]) -> bool:
"""判断角色是否可以更新工单"""
# 管理员和属地运维可以更新所有工单
if WorkOrderPermissionManager.can_view_all_workorders(role):
return True
# 业务方只能更新分配给自己的模块的工单
if workorder_module and assigned_to_module:
accessible_modules = WorkOrderPermissionManager.get_accessible_modules(role)
return workorder_module in accessible_modules and workorder_module == assigned_to_module
return False
@staticmethod
def filter_workorders_by_permission(role: UserRole, workorders: List[Dict]) -> List[Dict]:
"""根据权限过滤工单列表"""
if WorkOrderPermissionManager.can_view_all_workorders(role):
# 属地运维和管理员可以看到所有工单
return workorders
# 业务方只能看到自己模块的工单
accessible_modules = WorkOrderPermissionManager.get_accessible_modules(role)
filtered = []
for wo in workorders:
module_str = wo.get("module") or wo.get("assigned_module")
if module_str:
try:
module = WorkOrderModule(module_str)
if module in accessible_modules:
filtered.append(wo)
except ValueError:
# 如果模块值不在枚举中,跳过
continue
else:
# 未分配的工单,业务方看不到
pass
return filtered
class WorkOrderDispatchManager:
"""工单分发管理器"""
# 模块到业务接口人的映射(可以动态配置)
MODULE_OWNER_MAP = {
WorkOrderModule.TBOX: "TBOX业务接口人",
WorkOrderModule.OTA: "OTA业务接口人",
WorkOrderModule.DMC: "DMC业务接口人",
WorkOrderModule.MES: "MES业务接口人",
WorkOrderModule.APP: "APP业务接口人",
WorkOrderModule.PKI: "PKI业务接口人",
WorkOrderModule.TSP: "TSP业务接口人",
}
@staticmethod
def get_module_owner(module: WorkOrderModule) -> str:
"""获取模块的业务接口人"""
return WorkOrderDispatchManager.MODULE_OWNER_MAP.get(module, "未指定")
@staticmethod
def dispatch_workorder(workorder_id: int, target_module: WorkOrderModule,
dispatcher_role: UserRole, dispatcher_name: str) -> Dict:
"""
分发工单到指定模块
Args:
workorder_id: 工单ID
target_module: 目标模块
dispatcher_role: 分发者角色(必须是运维或管理员)
dispatcher_name: 分发者姓名
Returns:
分发结果
"""
# 检查分发权限
if not WorkOrderPermissionManager.can_dispatch_workorder(dispatcher_role):
return {
"success": False,
"error": "无权进行工单分发,只有属地运维和管理员可以分发工单"
}
# 获取模块负责人
module_owner = WorkOrderDispatchManager.get_module_owner(target_module)
# 这里应该更新数据库中的工单信息
# 实际实现时需要调用数据库更新逻辑
return {
"success": True,
"message": f"工单已分发到{target_module.value}模块",
"assigned_module": target_module.value,
"module_owner": module_owner,
"dispatcher": dispatcher_name,
"dispatcher_role": dispatcher_role.value
}
@staticmethod
def suggest_module(description: str, title: str = "") -> Optional[WorkOrderModule]:
"""
根据工单描述建议分配模块可以使用AI分析
Args:
description: 工单描述
title: 工单标题
Returns:
建议的模块
"""
# 简单的关键词匹配实际可以使用AI分析
text = (title + " " + description).lower()
keyword_module_map = {
WorkOrderModule.TBOX: ["tbox", "telematics", "车载", "车联网"],
WorkOrderModule.OTA: ["ota", "over-the-air", "升级", "update"],
WorkOrderModule.DMC: ["dmc", "device management", "设备管理"],
WorkOrderModule.MES: ["mes", "manufacturing", "制造"],
WorkOrderModule.APP: ["app", "application", "应用", "remote control"],
WorkOrderModule.PKI: ["pki", "certificate", "证书"],
WorkOrderModule.TSP: ["tsp", "service", "服务"],
}
for module, keywords in keyword_module_map.items():
for keyword in keywords:
if keyword in text:
return module
return WorkOrderModule.UNASSIGNED

View File

@@ -23,7 +23,7 @@ class AISuggestionService:
self.llm_config = get_config().llm
logger.info(f"使用LLM配置: {self.llm_config.provider} - {self.llm_config.model}")
def generate_suggestion(self, tr_description: str, process_history: Optional[str] = None, vin: Optional[str] = None) -> str:
def generate_suggestion(self, tr_description: str, process_history: Optional[str] = None, vin: Optional[str] = None, existing_ai_suggestion: Optional[str] = None) -> str:
"""
生成AI建议 - 参考处理过程记录生成建议
@@ -31,6 +31,7 @@ class AISuggestionService:
tr_description: TR描述
process_history: 处理过程记录(可选,用于了解当前问题状态)
vin: 车架号(可选)
existing_ai_suggestion: 现有的AI建议可选用于判断是否是首次建议
Returns:
AI建议文本
@@ -41,6 +42,11 @@ class AISuggestionService:
chat_manager = RealtimeChatManager()
# 判断是否是首次建议通过检查现有AI建议
is_first_suggestion = True
if existing_ai_suggestion and existing_ai_suggestion.strip():
is_first_suggestion = False
# 构建上下文信息
context_info = ""
if process_history and process_history.strip():
@@ -49,17 +55,29 @@ class AISuggestionService:
已处理的步骤:
{process_history}"""
# 根据是否为首次建议,设置不同的提示词
if is_first_suggestion:
# 首次建议:只给出一般性的排查步骤,不要提进站抓取日志
suggestion_instruction = """要求:
1. 首次给客户建议,只提供远程可操作的一般性排查步骤
2. 如检查网络、重启系统、确认配置等常见操作
3. 绝对不要提到"进站""抓取日志"等需要线下操作的内容
4. 语言简洁精炼,用逗号连接,不要用序号或分行"""
else:
# 后续建议:如果已有处理记录但未解决,可以考虑更深入的方案
suggestion_instruction = """要求:
1. 基于已有处理步骤,给出下一步的排查建议
2. 如果远程操作都无法解决,可以考虑更深入的诊断方案
3. 语言简洁精炼,用逗号连接,不要用序号或分行"""
# 构建用户消息 - 要求生成简洁的简短建议
user_message = f"""请为以下问题提供精炼的技术支持操作建议:
格式要求:
1. 用逗号连接,一句话表达,不要用序号或分行
2. 现状+步骤,语言精炼
3. 总长度控制在150字以内
1. 现状+步骤,语言精炼
2. 总长度控制在150字以内
根据问题复杂程度选择结尾:
- 简单问题:给出具体操作步骤即可,不需要提日志分析
- 复杂问题:如远程操作无法解决,结尾才使用"建议邀请用户进站抓取日志分析"
{suggestion_instruction}
问题描述:{tr_description}{context_info}"""
@@ -76,13 +94,13 @@ class AISuggestionService:
logger.info(f"AI生成原始内容: {content[:100]}...")
# 二次处理:替换默认建议(在清理前先替换)
content = self._post_process_suggestion(content)
content = self._post_process_suggestion(content, is_first_suggestion)
# 清理并限制长度
cleaned = self._clean_response(content)
# 再次检查,确保替换生效
cleaned = self._post_process_suggestion(cleaned)
cleaned = self._post_process_suggestion(cleaned, is_first_suggestion)
# 记录清理后的内容
logger.info(f"AI建议清理后: {cleaned[:100]}...")
@@ -178,12 +196,13 @@ class AISuggestionService:
return cleaned
def _post_process_suggestion(self, content: str) -> str:
def _post_process_suggestion(self, content: str, is_first_suggestion: bool = True) -> str:
"""
二次处理建议内容:替换默认建议文案
Args:
content: 清理后的内容
is_first_suggestion: 是否是首次建议
Returns:
处理后的内容
@@ -191,22 +210,38 @@ class AISuggestionService:
if not content or not content.strip():
return content
# 替换各种形式的"联系售后技术支持"为"邀请用户进站抓取日志分析"
replacements = [
("建议联系售后技术支持进一步排查", "建议邀请用户进站抓取日志分析"),
("联系售后技术支持进行进一步排查", "邀请用户进站抓取日志分析"),
("建议联系售后技术支持", "建议邀请用户进站抓取日志分析"),
("联系售后技术支持", "邀请用户进站抓取日志分析"),
("如问题仍未解决,建议联系售后技术支持进行进一步排查", "如问题仍未解决,建议邀请用户进站抓取日志分析"),
("若仍无效,建议联系售后技术支持进一步排查", "若仍无效,建议邀请用户进站抓取日志分析"),
("仍无效,建议联系售后技术支持", "仍无效,建议邀请用户进站抓取日志分析"),
]
result = content
for old_text, new_text in replacements:
if old_text in result:
result = result.replace(old_text, new_text)
logger.info(f"✓ 替换建议文案: '{old_text}' -> '{new_text}'")
# 如果是首次建议,移除所有"进站"、"抓取日志"相关的内容
if is_first_suggestion:
# 移除进站相关的文案
station_keywords = [
"进站", "抓取日志", "邀请用户进站", "建议邀请用户进站",
"建议进站", "需要进站", "前往服务站", "联系售后", "售后技术支持"
]
for keyword in station_keywords:
if keyword in result:
# 找到包含关键词的句子并移除
lines = result.split('')
new_lines = [line for line in lines if keyword not in line]
result = ''.join(new_lines)
logger.info(f"首次建议,移除包含'{keyword}'的内容")
else:
# 非首次建议:替换"联系售后技术支持"为"邀请用户进站抓取日志分析"
replacements = [
("建议联系售后技术支持进一步排查", "建议邀请用户进站抓取日志分析"),
("联系售后技术支持进行进一步排查", "邀请用户进站抓取日志分析"),
("建议联系售后技术支持", "建议邀请用户进站抓取日志分析"),
("联系售后技术支持", "邀请用户进站抓取日志分析"),
("如问题仍未解决,建议联系售后技术支持进行进一步排查", "如问题仍未解决,建议邀请用户进站抓取日志分析"),
("若仍无效,建议联系售后技术支持进一步排查", "若仍无效,建议邀请用户进站抓取日志分析"),
("仍无效,建议联系售后技术支持", "仍无效,建议邀请用户进站抓取日志分析"),
]
for old_text, new_text in replacements:
if old_text in result:
result = result.replace(old_text, new_text)
logger.info(f"✓ 替换建议文案: '{old_text}' -> '{new_text}'")
# 如果没有任何替换,记录一下
if result == content:
@@ -342,7 +377,7 @@ class AISuggestionService:
logger.info(f"记录 {record.get('record_id', i)} - 现有AI建议前100字符: {existing_ai_suggestion[:100]}")
if tr_description:
ai_suggestion = self.generate_suggestion(tr_description, process_history, vin)
ai_suggestion = self.generate_suggestion(tr_description, process_history, vin, existing_ai_suggestion)
# 处理同一天多次更新的情况
new_suggestion = self._format_ai_suggestion_with_numbering(
time_str, ai_suggestion, existing_ai_suggestion

View File

@@ -53,13 +53,13 @@ class WorkOrderSyncService:
self.field_mapping = {
# 核心字段
"TR Number": "order_id",
"TR Description": "description",
"TR Description": "description", # 问题描述
"Type of problem": "category",
"TR Level": "priority",
"TR Status": "status",
"Source": "source",
"Date creation": "created_at",
"处理过程": "solution",
"处理过程": "resolution", # 处理过程历史记录存储完整历史到resolution字段
"TR tracking": "resolution",
# 扩展字段
@@ -194,18 +194,28 @@ class WorkOrderSyncService:
workorder_data = self._convert_feishu_to_local(parsed_fields)
workorder_data["feishu_record_id"] = feishu_id
# 过滤掉WorkOrder模型不支持的字段防止dict参数错误
valid_fields = {}
for key, value in workorder_data.items():
if hasattr(WorkOrder, key):
# 确保值不是dict、list等复杂类型
if isinstance(value, (dict, list)):
logger.warning(f"字段 '{key}' 包含复杂类型 {type(value).__name__},跳过")
continue
valid_fields[key] = value
if existing_workorder:
# 更新现有记录
for key, value in workorder_data.items():
for key, value in valid_fields.items():
if key != "feishu_record_id":
setattr(existing_workorder, key, value)
existing_workorder.updated_at = datetime.now()
updated_count += 1
else:
# 创建新记录
workorder_data["created_at"] = datetime.now()
workorder_data["updated_at"] = datetime.now()
new_workorder = WorkOrder(**workorder_data)
valid_fields["created_at"] = datetime.now()
valid_fields["updated_at"] = datetime.now()
new_workorder = WorkOrder(**valid_fields)
session.add(new_workorder)
created_count += 1
@@ -337,21 +347,17 @@ class WorkOrderSyncService:
"""创建新工单"""
try:
with db_manager.get_session() as session:
workorder = WorkOrder(
order_id=local_data.get("order_id"),
title=local_data.get("title"),
description=local_data.get("description"),
category=local_data.get("category"),
priority=local_data.get("priority"),
status=local_data.get("status"),
created_at=local_data.get("created_at"),
updated_at=local_data.get("updated_at"),
resolution=local_data.get("solution"),
feishu_record_id=local_data.get("feishu_record_id"),
assignee=local_data.get("assignee"),
solution=local_data.get("solution"),
ai_suggestion=local_data.get("ai_suggestion")
)
# 只使用WorkOrder模型支持的字段
valid_data = {}
for key, value in local_data.items():
if hasattr(WorkOrder, key):
# 确保值不是dict、list等复杂类型
if isinstance(value, (dict, list)):
logger.warning(f"字段 '{key}' 包含复杂类型 {type(value).__name__},跳过")
continue
valid_data[key] = value
workorder = WorkOrder(**valid_data)
session.add(workorder)
session.commit()
session.refresh(workorder)
@@ -432,15 +438,38 @@ class WorkOrderSyncService:
logger.warning(f"时间字段转换失败: {e}, 使用当前时间")
local_data[local_field] = datetime.now()
# 生成标题
tr_number = feishu_fields.get("TR Number", "")
problem_type = feishu_fields.get("Type of problem", "")
if tr_number and problem_type:
local_data["title"] = f"{tr_number} - {problem_type}"
elif tr_number:
local_data["title"] = f"{tr_number} - TR工单"
# 生成标题使用TR Description作为标题
tr_description = feishu_fields.get("TR Description", "")
if tr_description:
# 标题直接使用问题描述,如果太长则截断
if len(tr_description) > 200:
local_data["title"] = tr_description[:197] + "..."
else:
local_data["title"] = tr_description
else:
local_data["title"] = "TR工单"
# 如果没有描述使用TR Number
tr_number = feishu_fields.get("TR Number", "")
if tr_number:
local_data["title"] = f"{tr_number} - TR工单"
else:
local_data["title"] = "TR工单"
# 处理"处理过程"字段提取最新一条作为solution
# "处理过程"字段已映射到resolution这里需要
# 1. resolution存储完整的"处理过程"历史
# 2. solution存储"处理过程"的最新一条
process_history = local_data.get("resolution", "")
if process_history and isinstance(process_history, str):
# 按换行分割,获取最后一行(最新一条)
process_lines = [line.strip() for line in process_history.split('\n') if line.strip()]
if process_lines:
# 最新一条作为solution
local_data["solution"] = process_lines[-1]
# 完整历史保留在resolution已在字段映射中设置
else:
local_data["solution"] = ""
else:
local_data["solution"] = ""
# 设置默认值
if "status" not in local_data:

View File

@@ -3,43 +3,59 @@
"""
语义相似度计算服务
使用sentence-transformers进行更准确的语义相似度计算
使用LLM API进行更准确的语义相似度计算提高理解力并节约服务端资源
"""
import logging
import numpy as np
import re
from typing import List, Tuple, Optional
from sentence_transformers import SentenceTransformer
import torch
logger = logging.getLogger(__name__)
class SemanticSimilarityCalculator:
"""语义相似度计算器"""
"""语义相似度计算器 - 使用LLM API"""
def __init__(self, model_name: str = "all-MiniLM-L6-v2"):
def __init__(self, use_llm: bool = True):
"""
初始化语义相似度计算器
Args:
model_name: 使用的预训练模型名称
- all-MiniLM-L6-v2: 英文模型,速度快,推荐用于生产环境
- paraphrase-multilingual-MiniLM-L12-v2: 多语言模型,支持中文
- paraphrase-multilingual-mpnet-base-v2: 多语言模型,精度高
use_llm: 是否使用LLM API计算相似度默认True推荐
- True: 使用LLM API理解力更强无需加载本地模型
- False: 使用本地模型需要下载HuggingFace模型
"""
self.model_name = model_name
self.use_llm = use_llm
self.model = None
self._load_model()
self.llm_client = None
if use_llm:
self._init_llm_client()
else:
self._load_model()
def _init_llm_client(self):
"""初始化LLM客户端"""
try:
from ..core.llm_client import QwenClient
self.llm_client = QwenClient()
logger.info("LLM客户端初始化成功将使用LLM API计算语义相似度")
except Exception as e:
logger.error(f"初始化LLM客户端失败: {e}")
self.llm_client = None
# 回退到本地模型
self.use_llm = False
self._load_model()
def _load_model(self):
"""加载预训练模型"""
"""加载预训练模型仅在use_llm=False时使用"""
try:
logger.info(f"正在加载语义相似度模型: {self.model_name}")
self.model = SentenceTransformer(self.model_name)
logger.info("语义相似度模型加载成功")
logger.info(f"正在加载本地语义相似度模型: all-MiniLM-L6-v2")
self.model = SentenceTransformer("all-MiniLM-L6-v2")
logger.info("本地语义相似度模型加载成功")
except Exception as e:
logger.error(f"加载语义相似度模型失败: {e}")
# 回退到简单模型
logger.error(f"加载本地语义相似度模型失败: {e}")
self.model = None
def calculate_similarity(self, text1: str, text2: str, fast_mode: bool = True) -> float:
@@ -49,7 +65,7 @@ class SemanticSimilarityCalculator:
Args:
text1: 第一个文本
text2: 第二个文本
fast_mode: 是否使用快速模式(结合传统方法
fast_mode: 是否使用快速模式(仅在使用本地模型时有效
Returns:
相似度分数 (0-1之间)
@@ -58,27 +74,22 @@ class SemanticSimilarityCalculator:
return 0.0
try:
# 快速模式:先使用传统方法快速筛选
if fast_mode:
tfidf_sim = self._calculate_tfidf_similarity(text1, text2)
# 优先使用LLM API计算相似度
if self.use_llm and self.llm_client:
return self._calculate_llm_similarity(text1, text2)
# 如果传统方法相似度很高或很低,直接返回
if tfidf_sim >= 0.9:
return tfidf_sim
elif tfidf_sim <= 0.3:
return tfidf_sim
# 中等相似度时,使用语义方法进行精确计算
if self.model is not None:
# 回退到本地模型或TF-IDF
if self.model is not None:
if fast_mode:
# 快速模式先使用TF-IDF快速筛选
tfidf_sim = self._calculate_tfidf_similarity(text1, text2)
if tfidf_sim >= 0.9 or tfidf_sim <= 0.3:
return tfidf_sim
# 中等相似度时,使用语义方法进行精确计算
semantic_sim = self._calculate_semantic_similarity(text1, text2)
# 结合两种方法的结果
return (tfidf_sim * 0.3 + semantic_sim * 0.7)
else:
return tfidf_sim
# 完整模式:直接使用语义相似度
if self.model is not None:
return self._calculate_semantic_similarity(text1, text2)
return self._calculate_semantic_similarity(text1, text2)
else:
return self._calculate_tfidf_similarity(text1, text2)
@@ -86,6 +97,80 @@ class SemanticSimilarityCalculator:
logger.error(f"计算语义相似度失败: {e}")
return self._calculate_tfidf_similarity(text1, text2)
def _calculate_llm_similarity(self, text1: str, text2: str) -> float:
"""使用LLM API计算语义相似度"""
try:
# 构建prompt让LLM比较两个文本的相似度
prompt = f"""请比较以下两个文本的语义相似度并给出0-1之间的分数保留2位小数其中
- 1.0 表示完全相同
- 0.8-0.9 表示非常相似
- 0.6-0.7 表示较为相似
- 0.4-0.5 表示部分相似
- 0.0-0.3 表示差异很大
文本1: {text1}
文本2: {text2}
请只返回0-1之间的数字保留2位小数不要包含其他文字。例如0.85"""
messages = [
{"role": "system", "content": "你是一个专业的文本相似度评估专家,请准确评估两个文本的语义相似度。"},
{"role": "user", "content": prompt}
]
result = self.llm_client.chat_completion(
messages=messages,
temperature=0.1, # 低温度以获得更稳定的结果
max_tokens=50
)
if "error" in result:
logger.error(f"LLM API调用失败: {result['error']}")
# 回退到TF-IDF
return self._calculate_tfidf_similarity(text1, text2)
# 提取响应中的数字
response_content = result.get("choices", [{}])[0].get("message", {}).get("content", "")
similarity = self._extract_similarity_from_response(response_content)
logger.debug(f"LLM计算语义相似度: {similarity:.4f}")
return similarity
except Exception as e:
logger.error(f"LLM语义相似度计算失败: {e}")
# 回退到TF-IDF
return self._calculate_tfidf_similarity(text1, text2)
def _extract_similarity_from_response(self, response: str) -> float:
"""从LLM响应中提取相似度分数"""
try:
# 尝试提取0-1之间的浮点数
patterns = [
r'(\d+\.\d{1,2})', # 匹配两位小数的浮点数
r'(\d+\.\d+)', # 匹配任意小数的浮点数
r'(\d+)' # 匹配整数(可能是百分比形式)
]
for pattern in patterns:
matches = re.findall(pattern, response)
if matches:
value = float(matches[0])
# 如果值大于1可能是百分比形式需要除以100
if value > 1:
value = value / 100.0
# 确保在0-1范围内
value = max(0.0, min(1.0, value))
return value
# 如果没有找到数字,返回默认值
logger.warning(f"无法从响应中提取相似度分数: {response}")
return 0.5
except Exception as e:
logger.error(f"提取相似度分数失败: {e}, 响应: {response}")
return 0.5
def _calculate_semantic_similarity(self, text1: str, text2: str) -> float:
"""使用sentence-transformers计算语义相似度"""
try:
@@ -159,6 +244,11 @@ class SemanticSimilarityCalculator:
return []
try:
# 优先使用LLM API
if self.use_llm and self.llm_client:
return [self._calculate_llm_similarity(t1, t2) for t1, t2 in text_pairs]
# 回退到本地模型或TF-IDF
if self.model is not None:
return self._batch_semantic_similarity(text_pairs)
else:
@@ -214,17 +304,24 @@ class SemanticSimilarityCalculator:
return "语义差异较大,建议重新生成"
def is_model_available(self) -> bool:
"""检查模型是否可用"""
return self.model is not None
"""检查模型是否可用LLM或本地模型"""
if self.use_llm:
return self.llm_client is not None
else:
return self.model is not None
# 全局实例
_similarity_calculator = None
def get_similarity_calculator() -> SemanticSimilarityCalculator:
"""获取全局相似度计算器实例"""
def get_similarity_calculator(use_llm: bool = True) -> SemanticSimilarityCalculator:
"""获取全局相似度计算器实例
Args:
use_llm: 是否使用LLM API默认True推荐
"""
global _similarity_calculator
if _similarity_calculator is None:
_similarity_calculator = SemanticSimilarityCalculator()
_similarity_calculator = SemanticSimilarityCalculator(use_llm=use_llm)
return _similarity_calculator
def calculate_semantic_similarity(text1: str, text2: str, fast_mode: bool = True) -> float:

Binary file not shown.

View File

@@ -10,6 +10,7 @@ import logging
import uuid
import time
from datetime import datetime
from typing import Optional
from flask import Blueprint, request, jsonify, send_file
from werkzeug.utils import secure_filename
from sqlalchemy import text
@@ -40,12 +41,83 @@ class SimpleAIAccuracyConfig:
from src.main import TSPAssistant
from src.core.database import db_manager
from src.core.models import WorkOrder, Conversation, WorkOrderSuggestion, KnowledgeEntry
from src.core.models import WorkOrder, Conversation, WorkOrderSuggestion, KnowledgeEntry, WorkOrderProcessHistory
from src.core.query_optimizer import query_optimizer
from src.web.service_manager import service_manager
from src.core.workorder_permissions import (
WorkOrderPermissionManager, WorkOrderDispatchManager,
UserRole, WorkOrderModule
)
workorders_bp = Blueprint('workorders', __name__, url_prefix='/api/workorders')
def get_current_user_role() -> UserRole:
"""获取当前用户角色(临时实现,实际需要集成认证系统)"""
# TODO: 从session或token中获取用户信息
# 在没有认证系统之前默认返回ADMIN以便可以查看所有工单
# 实际实现时需要从认证系统获取真实角色
role_str = request.headers.get('X-User-Role', 'admin') # 临时改为admin避免VIEWER无法查看数据
try:
return UserRole(role_str)
except ValueError:
return UserRole.ADMIN # 临时返回ADMIN避免VIEWER无法查看数据
def get_current_user_name() -> str:
"""获取当前用户名(临时实现,实际需要集成认证系统)"""
# TODO: 从session或token中获取用户信息
return request.headers.get('X-User-Name', 'anonymous')
def add_process_history(
workorder_id: int,
processor_name: str,
process_content: str,
action_type: str,
processor_role: Optional[str] = None,
processor_region: Optional[str] = None,
previous_status: Optional[str] = None,
new_status: Optional[str] = None,
assigned_module: Optional[str] = None
) -> WorkOrderProcessHistory:
"""
添加工单处理过程记录
Args:
workorder_id: 工单ID
processor_name: 处理人员姓名
process_content: 处理内容
action_type: 操作类型dispatch、process、close、reassign等
processor_role: 处理人员角色
processor_region: 处理人员区域
previous_status: 处理前的状态
new_status: 处理后的状态
assigned_module: 分配的模块
Returns:
创建的处理记录对象
"""
try:
with db_manager.get_session() as session:
history = WorkOrderProcessHistory(
work_order_id=workorder_id,
processor_name=processor_name,
processor_role=processor_role,
processor_region=processor_region,
process_content=process_content,
action_type=action_type,
previous_status=previous_status,
new_status=new_status,
assigned_module=assigned_module,
process_time=datetime.now()
)
session.add(history)
session.commit()
session.refresh(history)
logger.info(f"工单 {workorder_id} 添加处理记录: {action_type} by {processor_name}")
return history
except Exception as e:
logger.error(f"添加处理记录失败: {e}")
raise
# 移除get_assistant函数使用service_manager
def _ensure_workorder_template_file() -> str:
@@ -96,13 +168,17 @@ def _ensure_workorder_template_file() -> str:
@workorders_bp.route('')
def get_workorders():
"""获取工单列表(分页)"""
"""获取工单列表(分页,带权限过滤"""
try:
# 获取当前用户角色和权限
current_role = get_current_user_role()
# 获取分页参数
page = request.args.get('page', 1, type=int)
per_page = request.args.get('per_page', 10, type=int)
status_filter = request.args.get('status', '')
priority_filter = request.args.get('priority', '')
module_filter = request.args.get('module', '') # 模块过滤
# 从数据库获取分页数据
from src.core.database import db_manager
@@ -112,11 +188,31 @@ def get_workorders():
# 构建查询
query = session.query(WorkOrder)
# 权限过滤:业务方只能看到自己模块的工单
if not WorkOrderPermissionManager.can_view_all_workorders(current_role):
# 获取用户可访问的模块
accessible_modules = WorkOrderPermissionManager.get_accessible_modules(current_role)
if accessible_modules:
# 构建模块列表过滤条件
module_names = [m.value for m in accessible_modules]
query = query.filter(WorkOrder.assigned_module.in_(module_names))
else:
# 如果没有可访问的模块,返回空列表
return jsonify({
"workorders": [],
"total": 0,
"page": page,
"per_page": per_page,
"total_pages": 0 # 统一使用total_pages字段
})
# 应用过滤器
if status_filter:
query = query.filter(WorkOrder.status == status_filter)
if priority_filter:
query = query.filter(WorkOrder.priority == priority_filter)
if module_filter:
query = query.filter(WorkOrder.assigned_module == module_filter)
# 按创建时间倒序排列
query = query.order_by(WorkOrder.created_at.desc())
@@ -135,6 +231,11 @@ def get_workorders():
'order_id': workorder.order_id,
'title': workorder.title,
'description': workorder.description,
'assigned_module': workorder.assigned_module,
'module_owner': workorder.module_owner,
'dispatcher': workorder.dispatcher,
'dispatch_time': workorder.dispatch_time.isoformat() if workorder.dispatch_time else None,
'region': workorder.region,
'category': workorder.category,
'priority': workorder.priority,
'status': workorder.status,
@@ -163,7 +264,7 @@ def get_workorders():
@workorders_bp.route('', methods=['POST'])
def create_workorder():
"""创建工单"""
"""创建工单(初始状态为待分发)"""
try:
data = request.get_json()
result = service_manager.get_assistant().create_work_order(
@@ -173,6 +274,35 @@ def create_workorder():
priority=data['priority']
)
# 获取当前用户信息(用于记录创建人)
current_user = get_current_user_name()
current_role = get_current_user_role()
# 创建工单后,设置为待分发状态(未分配模块)
if result and 'id' in result:
workorder_id = result.get('id')
with db_manager.get_session() as session:
workorder = session.query(WorkOrder).filter(WorkOrder.id == workorder_id).first()
if workorder:
# 初始状态为待分发
workorder.assigned_module = WorkOrderModule.UNASSIGNED.value
workorder.status = "pending" # 待处理/待分发
workorder.created_by = current_user # 记录创建人
session.commit()
# 记录创建工单的处理历史
processor_region = "overseas" if current_role == UserRole.OVERSEAS_OPS else "domestic"
add_process_history(
workorder_id=workorder_id,
processor_name=current_user,
process_content=f"工单已创建:{data.get('title', '')[:50]}",
action_type="create",
processor_role=current_role.value,
processor_region=processor_region,
previous_status=None,
new_status="pending"
)
# 清除工单相关缓存
from src.core.cache_manager import cache_manager
cache_manager.clear() # 清除所有缓存
@@ -183,12 +313,32 @@ def create_workorder():
@workorders_bp.route('/<int:workorder_id>')
def get_workorder_details(workorder_id):
"""获取工单详情(含数据库对话记录)"""
"""获取工单详情(含数据库对话记录,带权限检查"""
try:
# 获取当前用户角色和权限
current_role = get_current_user_role()
with db_manager.get_session() as session:
w = session.query(WorkOrder).filter(WorkOrder.id == workorder_id).first()
if not w:
return jsonify({"error": "工单不存在"}), 404
# 权限检查:业务方只能访问自己模块的工单
if not WorkOrderPermissionManager.can_view_all_workorders(current_role):
# 检查是否有权限访问该工单
assigned_module_str = w.assigned_module
if not assigned_module_str or assigned_module_str == WorkOrderModule.UNASSIGNED.value:
# 未分配的工单,业务方不能访问
return jsonify({"error": "无权访问该工单"}), 403
try:
assigned_module = WorkOrderModule(assigned_module_str)
accessible_modules = WorkOrderPermissionManager.get_accessible_modules(current_role)
if assigned_module not in accessible_modules:
return jsonify({"error": "无权访问该工单"}), 403
except ValueError:
# 如果模块值无效,业务方不能访问
return jsonify({"error": "无权访问该工单"}), 403
convs = session.query(Conversation).filter(Conversation.work_order_id == w.id).order_by(Conversation.timestamp.asc()).all()
conv_list = []
for c in convs:
@@ -198,6 +348,27 @@ def get_workorder_details(workorder_id):
"assistant_response": c.assistant_response,
"timestamp": c.timestamp.isoformat() if c.timestamp else None
})
# 获取处理过程记录
process_history_list = session.query(WorkOrderProcessHistory).filter(
WorkOrderProcessHistory.work_order_id == w.id
).order_by(WorkOrderProcessHistory.process_time.asc()).all()
process_history_data = []
for ph in process_history_list:
process_history_data.append({
"id": ph.id,
"processor_name": ph.processor_name,
"processor_role": ph.processor_role,
"processor_region": ph.processor_region,
"process_content": ph.process_content,
"action_type": ph.action_type,
"previous_status": ph.previous_status,
"new_status": ph.new_status,
"assigned_module": ph.assigned_module,
"process_time": ph.process_time.isoformat() if ph.process_time else None
})
# 在会话内构建工单数据
workorder = {
"id": w.id,
@@ -211,7 +382,13 @@ def get_workorder_details(workorder_id):
"updated_at": w.updated_at.isoformat() if w.updated_at else None,
"resolution": w.resolution,
"satisfaction_score": w.satisfaction_score,
"conversations": conv_list
"assigned_module": w.assigned_module,
"module_owner": w.module_owner,
"dispatcher": w.dispatcher,
"dispatch_time": w.dispatch_time.isoformat() if w.dispatch_time else None,
"region": w.region,
"conversations": conv_list,
"process_history": process_history_data # 处理过程记录
}
return jsonify(workorder)
except Exception as e:
@@ -219,25 +396,68 @@ def get_workorder_details(workorder_id):
@workorders_bp.route('/<int:workorder_id>', methods=['PUT'])
def update_workorder(workorder_id):
"""更新工单(写入数据库)"""
"""更新工单(写入数据库,自动记录处理历史"""
try:
# 获取当前用户信息
current_user = get_current_user_name()
current_role = get_current_user_role()
data = request.get_json()
if not data.get('title') or not data.get('description'):
return jsonify({"error": "标题和描述不能为空"}), 400
with db_manager.get_session() as session:
w = session.query(WorkOrder).filter(WorkOrder.id == workorder_id).first()
if not w:
return jsonify({"error": "工单不存在"}), 404
# 记录更新前的状态
previous_status = w.status
previous_priority = w.priority
# 更新工单信息
w.title = data.get('title', w.title)
w.description = data.get('description', w.description)
w.category = data.get('category', w.category)
w.priority = data.get('priority', w.priority)
w.status = data.get('status', w.status)
new_status = data.get('status', w.status)
w.status = new_status
w.resolution = data.get('resolution', w.resolution)
w.satisfaction_score = data.get('satisfaction_score', w.satisfaction_score)
w.updated_at = datetime.now()
session.commit()
# 如果状态或优先级发生变化,记录处理历史
has_status_change = previous_status != new_status
has_priority_change = previous_priority != data.get('priority', w.priority)
if has_status_change or has_priority_change:
# 构建处理内容
change_items = []
if has_status_change:
change_items.append(f"状态变更:{previous_status}{new_status}")
if has_priority_change:
change_items.append(f"优先级变更:{previous_priority}{data.get('priority', w.priority)}")
process_content = "".join(change_items)
if data.get('resolution'):
process_content += f";解决方案:{data.get('resolution', '')[:100]}"
# 判断区域
processor_region = "overseas" if current_role == UserRole.OVERSEAS_OPS else "domestic"
add_process_history(
workorder_id=workorder_id,
processor_name=current_user,
process_content=process_content or "更新工单信息",
action_type="update",
processor_role=current_role.value,
processor_region=processor_region,
previous_status=previous_status,
new_status=new_status
)
# 清除工单相关缓存
from src.core.cache_manager import cache_manager
cache_manager.clear() # 清除所有缓存
@@ -593,3 +813,232 @@ def download_import_template_file():
except Exception as e:
logger.error(f"下载模板文件失败: {e}")
return jsonify({"error": f"下载失败: {str(e)}"}), 500
@workorders_bp.route('/<int:workorder_id>/dispatch', methods=['POST'])
def dispatch_workorder(workorder_id):
"""工单分发:运维将工单分配给业务模块"""
try:
# 获取当前用户角色和权限
current_role = get_current_user_role()
current_user = get_current_user_name()
# 检查分发权限
if not WorkOrderPermissionManager.can_dispatch_workorder(current_role):
return jsonify({
"success": False,
"error": "无权进行工单分发,只有属地运维和管理员可以分发工单"
}), 403
# 获取请求数据
data = request.get_json() or {}
target_module_str = data.get('target_module', '')
if not target_module_str:
return jsonify({"success": False, "error": "请指定目标模块"}), 400
# 验证模块
try:
target_module = WorkOrderModule(target_module_str)
except ValueError:
return jsonify({"success": False, "error": f"无效的模块: {target_module_str}"}), 400
# 获取工单
with db_manager.get_session() as session:
workorder = session.query(WorkOrder).filter(WorkOrder.id == workorder_id).first()
if not workorder:
return jsonify({"success": False, "error": "工单不存在"}), 404
# 执行分发
module_owner = WorkOrderDispatchManager.get_module_owner(target_module)
# 记录分发前的状态
previous_status = workorder.status
# 更新工单信息
workorder.assigned_module = target_module.value
workorder.module_owner = module_owner
workorder.dispatcher = current_user
workorder.dispatch_time = datetime.now()
workorder.status = "assigned" # 更新状态为已分配
# 根据区域自动设置可以从工单source或其他字段判断
# 这里简化处理,可以根据实际需求调整
if not workorder.region:
# 如果source包含特定关键词可以判断区域
source = workorder.source or ""
if any(keyword in source.lower() for keyword in ["overseas", "abroad", "海外"]):
workorder.region = "overseas"
else:
workorder.region = "domestic"
session.commit()
# 记录处理历史:工单分发
processor_region = "overseas" if current_role == UserRole.OVERSEAS_OPS else "domestic"
add_process_history(
workorder_id=workorder_id,
processor_name=current_user,
process_content=f"工单已分发到{target_module.value}模块,业务接口人:{module_owner}",
action_type="dispatch",
processor_role=current_role.value,
processor_region=processor_region,
previous_status=previous_status,
new_status="assigned",
assigned_module=target_module.value
)
logger.info(f"工单 {workorder_id} 已分发到 {target_module.value} 模块,分发人: {current_user}")
return jsonify({
"success": True,
"message": f"工单已成功分发到{target_module.value}模块",
"workorder": {
"id": workorder.id,
"assigned_module": workorder.assigned_module,
"module_owner": workorder.module_owner,
"dispatcher": workorder.dispatcher,
"dispatch_time": workorder.dispatch_time.isoformat() if workorder.dispatch_time else None,
"status": workorder.status
}
})
except Exception as e:
logger.error(f"工单分发失败: {e}")
return jsonify({"success": False, "error": str(e)}), 500
@workorders_bp.route('/<int:workorder_id>/suggest-module', methods=['POST'])
def suggest_workorder_module(workorder_id):
"""AI建议工单应该分配的模块"""
try:
with db_manager.get_session() as session:
workorder = session.query(WorkOrder).filter(WorkOrder.id == workorder_id).first()
if not workorder:
return jsonify({"success": False, "error": "工单不存在"}), 404
# 使用AI分析建议模块
suggested_module = WorkOrderDispatchManager.suggest_module(
description=workorder.description or "",
title=workorder.title or ""
)
return jsonify({
"success": True,
"suggested_module": suggested_module.value if suggested_module else None,
"module_owner": WorkOrderDispatchManager.get_module_owner(suggested_module) if suggested_module else None
})
except Exception as e:
logger.error(f"模块建议失败: {e}")
return jsonify({"success": False, "error": str(e)}), 500
@workorders_bp.route('/modules', methods=['GET'])
def get_available_modules():
"""获取所有可用的模块列表"""
try:
modules = [
{"value": m.value, "name": m.name, "owner": WorkOrderDispatchManager.get_module_owner(m)}
for m in WorkOrderModule
if m != WorkOrderModule.UNASSIGNED
]
return jsonify({"success": True, "modules": modules})
except Exception as e:
return jsonify({"success": False, "error": str(e)}), 500
@workorders_bp.route('/<int:workorder_id>/process-history', methods=['GET'])
def get_workorder_process_history(workorder_id):
"""获取工单处理过程记录"""
try:
with db_manager.get_session() as session:
workorder = session.query(WorkOrder).filter(WorkOrder.id == workorder_id).first()
if not workorder:
return jsonify({"error": "工单不存在"}), 404
# 获取处理历史
history_list = session.query(WorkOrderProcessHistory).filter(
WorkOrderProcessHistory.work_order_id == workorder_id
).order_by(WorkOrderProcessHistory.process_time.asc()).all()
history_data = []
for ph in history_list:
history_data.append({
"id": ph.id,
"processor_name": ph.processor_name,
"processor_role": ph.processor_role,
"processor_region": ph.processor_region,
"process_content": ph.process_content,
"action_type": ph.action_type,
"previous_status": ph.previous_status,
"new_status": ph.new_status,
"assigned_module": ph.assigned_module,
"process_time": ph.process_time.isoformat() if ph.process_time else None
})
return jsonify({
"success": True,
"workorder_id": workorder_id,
"process_history": history_data,
"total": len(history_data)
})
except Exception as e:
logger.error(f"获取处理历史失败: {e}")
return jsonify({"success": False, "error": str(e)}), 500
@workorders_bp.route('/<int:workorder_id>/process-history', methods=['POST'])
def add_workorder_process_history(workorder_id):
"""手动添加工单处理过程记录"""
try:
# 获取当前用户信息
current_user = get_current_user_name()
current_role = get_current_user_role()
data = request.get_json() or {}
process_content = data.get('process_content', '').strip()
if not process_content:
return jsonify({"success": False, "error": "处理内容不能为空"}), 400
with db_manager.get_session() as session:
workorder = session.query(WorkOrder).filter(WorkOrder.id == workorder_id).first()
if not workorder:
return jsonify({"success": False, "error": "工单不存在"}), 404
# 获取可选参数
action_type = data.get('action_type', 'process') # 默认操作类型为process
processor_role = data.get('processor_role', current_role.value)
processor_region = data.get('processor_region')
if not processor_region:
# 根据角色自动判断区域
processor_region = "overseas" if current_role == UserRole.OVERSEAS_OPS else "domestic"
previous_status = data.get('previous_status', workorder.status)
new_status = data.get('new_status', workorder.status)
assigned_module = data.get('assigned_module', workorder.assigned_module)
# 添加处理记录
history = add_process_history(
workorder_id=workorder_id,
processor_name=data.get('processor_name', current_user),
process_content=process_content,
action_type=action_type,
processor_role=processor_role,
processor_region=processor_region,
previous_status=previous_status,
new_status=new_status,
assigned_module=assigned_module
)
return jsonify({
"success": True,
"message": "处理记录已添加",
"history": {
"id": history.id,
"processor_name": history.processor_name,
"processor_role": history.processor_role,
"process_content": history.process_content,
"action_type": history.action_type,
"process_time": history.process_time.isoformat() if history.process_time else None
}
})
except Exception as e:
logger.error(f"添加处理记录失败: {e}")
return jsonify({"success": False, "error": str(e)}), 500

File diff suppressed because it is too large Load Diff

View File

@@ -213,18 +213,22 @@ class WebSocketServer:
async def handle_client(self, websocket: WebSocketServerProtocol, path: str):
"""处理客户端连接"""
# 检查连接头
headers = websocket.request_headers
connection = headers.get("Connection", "").lower()
# 检查连接头(如果可用)
try:
if hasattr(websocket, 'request_headers'):
headers = websocket.request_headers
connection = headers.get("Connection", "").lower()
# 处理不同的连接头格式
if "upgrade" not in connection and "keep-alive" in connection:
logger.warning(f"收到非标准连接头: {connection}")
# 对于keep-alive连接头我们仍然接受连接
elif "upgrade" not in connection:
logger.warning(f"连接头不包含upgrade: {connection}")
await websocket.close(code=1002, reason="Invalid connection header")
return
# 处理不同的连接头格式
if "upgrade" not in connection and "keep-alive" in connection:
logger.warning(f"收到非标准连接头: {connection}")
# 对于keep-alive连接头我们仍然接受连接
elif "upgrade" not in connection:
logger.warning(f"连接头不包含upgrade: {connection}")
# 在websockets 15.x中连接已经在serve时验证所以这里只记录警告
except AttributeError:
# websockets 15.x版本可能没有request_headers属性跳过检查
pass
await self.register_client(websocket)
@@ -243,19 +247,15 @@ class WebSocketServer:
logger.info(f"启动WebSocket服务器: ws://{self.host}:{self.port}")
# 添加CORS支持
async def handle_client_with_cors(websocket: WebSocketServerProtocol, path: str):
# 设置CORS
if websocket.request_headers.get("Origin"):
# 允许跨域连接
pass
await self.handle_client(websocket, path)
async def handle_client_with_cors(websocket: WebSocketServerProtocol, path: str = None):
# CORS处理websockets库默认允许所有来源连接
# 如果需要限制可以在serve时使用additional_headers参数
await self.handle_client(websocket, path or "")
async with websockets.serve(
handle_client_with_cors,
self.host,
self.port,
# 添加额外的服务器选项
process_request=self._process_request
self.port
):
await asyncio.Future() # 保持服务器运行

View File

@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
"""
启动TSP智能助手综合管理平台
TSP智能助手综合管理平台
"""
import sys

79
test_git_push.bat Normal file
View File

@@ -0,0 +1,79 @@
@echo off
chcp 65001 >nul
setlocal enabledelayedexpansion
echo ========================================
echo Git推送测试脚本
echo ========================================
echo.
:: 检查Git状态
echo [1] 检查Git状态...
git status --porcelain >nul 2>&1
if %errorlevel% neq 0 (
echo Git未初始化
pause
exit /b 1
)
echo Git状态正常
echo.
:: 获取当前分支
echo [2] 获取当前分支...
set current_branch=
for /f "tokens=*" %%b in ('git branch --show-current 2^>nul') do set current_branch=%%b
if "!current_branch!"=="" (
echo 无法获取当前分支使用默认分支main
set current_branch=main
) else (
echo 当前分支: !current_branch!
)
echo.
:: 检查远程仓库
echo [3] 检查远程仓库...
git remote -v
if %errorlevel% neq 0 (
echo 未配置远程仓库
pause
exit /b 1
)
echo.
:: 尝试推送
echo [4] 尝试推送到远程...
echo 分支: !current_branch!
echo.
git push origin !current_branch! 2>&1
if %errorlevel% equ 0 (
echo.
echo 推送成功!
) else (
echo.
echo 推送失败
echo.
echo 可能的原因:
echo 1. 远程分支不存在,尝试设置上游...
git push -u origin !current_branch! 2>&1
if %errorlevel% equ 0 (
echo 推送成功(已设置上游)!
) else (
echo 推送仍然失败
echo.
echo 请检查:
echo - 网络连接
echo - 认证配置SSH密钥或Token
echo - 远程仓库地址
pause
exit /b 1
)
)
echo.
echo ========================================
echo 测试完成
echo ========================================
pause

Binary file not shown.