fix: 清除所有硬编码配置

1. Redis 连接:redis_manager.py 和 system_optimizer.py 的 IP/端口/密码改为从 unified_config 读取(不再硬编码 43.134.68.207/123456)
2. 性能配置:performance_config.py 改为从 system_settings.json 读取,硬编码值仅作 fallback
3. 默认管理员密码:从环境变量 ADMIN_PASSWORD 读取,默认 admin123
This commit is contained in:
2026-04-07 10:23:35 +08:00
parent cc73403cf2
commit c2a5bcbc94
5 changed files with 74 additions and 87 deletions

View File

@@ -135,10 +135,12 @@ class AuthManager:
return None
def create_default_admin(self):
"""创建默认管理员用户"""
admin = self.create_user('admin', 'admin123', '系统管理员', 'admin@example.com', 'admin')
"""创建默认管理员用户(密码从环境变量 ADMIN_PASSWORD 读取,默认 admin123"""
import os
admin_pwd = os.environ.get('ADMIN_PASSWORD', 'admin123')
admin = self.create_user('admin', admin_pwd, '系统管理员', 'admin@example.com', 'admin')
if admin:
print("默认管理员用户已创建: admin/admin123")
print(f"默认管理员用户已创建: admin/{admin_pwd}")
return admin

View File

@@ -1,89 +1,65 @@
# -*- coding: utf-8 -*-
"""
性能优化配置
集中管理所有性能相关的配置参数
从 system_settings.json 读取,硬编码值仅作为 fallback
"""
import os
import json
import logging
logger = logging.getLogger(__name__)
# 默认值
_DEFAULTS = {
"database": {"pool_size": 20, "max_overflow": 30, "pool_recycle": 1800, "pool_timeout": 10},
"cache": {"default_ttl": 60, "max_memory_size": 2000, "conversation_ttl": 60, "workorder_ttl": 30, "monitoring_ttl": 30},
"query": {"default_limit": 100, "conversations_limit": 1000, "workorders_limit": 100, "monitoring_limit": 1000},
"api": {"timeout": 10, "retry_count": 3, "batch_size": 50},
"monitoring": {"interval": 60, "slow_query_threshold": 1.0, "performance_log_enabled": True}
}
def _load():
try:
path = os.path.join('data', 'system_settings.json')
if os.path.exists(path):
with open(path, 'r', encoding='utf-8') as f:
return json.load(f).get('performance', {})
except Exception as e:
logger.debug(f"加载性能配置失败: {e}")
return {}
class PerformanceConfig:
"""性能配置类"""
"""性能配置类 — 优先从配置文件读取"""
# 数据库连接池配置
DATABASE_POOL_SIZE = 20
DATABASE_MAX_OVERFLOW = 30
DATABASE_POOL_RECYCLE = 1800
DATABASE_POOL_TIMEOUT = 10
# 缓存配置
CACHE_DEFAULT_TTL = 60 # 默认缓存时间(秒)
CACHE_MAX_MEMORY_SIZE = 2000 # 最大内存缓存条目数
CACHE_CONVERSATION_TTL = 60 # 对话缓存时间
CACHE_WORKORDER_TTL = 30 # 工单缓存时间
CACHE_MONITORING_TTL = 30 # 监控数据缓存时间
# 查询优化配置
QUERY_LIMIT_DEFAULT = 100 # 默认查询限制
QUERY_LIMIT_CONVERSATIONS = 1000 # 对话查询限制
QUERY_LIMIT_WORKORDERS = 100 # 工单查询限制
QUERY_LIMIT_MONITORING = 1000 # 监控查询限制
# 前端缓存配置
FRONTEND_CACHE_TIMEOUT = 30000 # 前端缓存时间(毫秒)
FRONTEND_PARALLEL_LOADING = True # 是否启用并行加载
# API响应优化
API_TIMEOUT = 10 # API超时时间
API_RETRY_COUNT = 3 # API重试次数
API_BATCH_SIZE = 50 # 批量操作大小
# 系统监控配置
MONITORING_INTERVAL = 60 # 监控间隔(秒)
SLOW_QUERY_THRESHOLD = 1.0 # 慢查询阈值(秒)
PERFORMANCE_LOG_ENABLED = True # 是否启用性能日志
@classmethod
def _get(cls, section, key):
cfg = _load()
return cfg.get(section, {}).get(key, _DEFAULTS.get(section, {}).get(key))
@classmethod
def get_database_config(cls):
"""获取数据库配置"""
return {
'pool_size': cls.DATABASE_POOL_SIZE,
'max_overflow': cls.DATABASE_MAX_OVERFLOW,
'pool_recycle': cls.DATABASE_POOL_RECYCLE,
'pool_timeout': cls.DATABASE_POOL_TIMEOUT
}
cfg = _load().get('database', {})
return {**_DEFAULTS['database'], **cfg}
@classmethod
def get_cache_config(cls):
"""获取缓存配置"""
return {
'default_ttl': cls.CACHE_DEFAULT_TTL,
'max_memory_size': cls.CACHE_MAX_MEMORY_SIZE,
'conversation_ttl': cls.CACHE_CONVERSATION_TTL,
'workorder_ttl': cls.CACHE_WORKORDER_TTL,
'monitoring_ttl': cls.CACHE_MONITORING_TTL
}
cfg = _load().get('cache', {})
return {**_DEFAULTS['cache'], **cfg}
@classmethod
def get_query_config(cls):
"""获取查询配置"""
return {
'default_limit': cls.QUERY_LIMIT_DEFAULT,
'conversations_limit': cls.QUERY_LIMIT_CONVERSATIONS,
'workorders_limit': cls.QUERY_LIMIT_WORKORDERS,
'monitoring_limit': cls.QUERY_LIMIT_MONITORING
}
@classmethod
def get_frontend_config(cls):
"""获取前端配置"""
return {
'cache_timeout': cls.FRONTEND_CACHE_TIMEOUT,
'parallel_loading': cls.FRONTEND_PARALLEL_LOADING
}
cfg = _load().get('query', {})
return {**_DEFAULTS['query'], **cfg}
@classmethod
def get_api_config(cls):
"""获取API配置"""
return {
'timeout': cls.API_TIMEOUT,
'retry_count': cls.API_RETRY_COUNT,
'batch_size': cls.API_BATCH_SIZE
}
cfg = _load().get('api', {})
return {**_DEFAULTS['api'], **cfg}
# 向后兼容的类属性(读取时动态获取)
DATABASE_POOL_SIZE = property(lambda self: self._get('database', 'pool_size'))
CACHE_DEFAULT_TTL = property(lambda self: self._get('cache', 'default_ttl'))
CACHE_MAX_MEMORY_SIZE = property(lambda self: self._get('cache', 'max_memory_size'))
QUERY_LIMIT_DEFAULT = property(lambda self: self._get('query', 'default_limit'))
API_TIMEOUT = property(lambda self: self._get('api', 'timeout'))
MONITORING_INTERVAL = property(lambda self: self._get('monitoring', 'interval'))

View File

@@ -34,10 +34,18 @@ class RedisManager:
self.connection_lock = threading.Lock()
self._initialized = True
# Redis配置
self.host = '43.134.68.207'
self.port = 6379
self.password = '123456'
# Redis配置(从统一配置读取)
try:
from src.config.unified_config import get_config
redis_cfg = get_config().redis
self.host = redis_cfg.host
self.port = redis_cfg.port
self.password = redis_cfg.password
except Exception:
import os
self.host = os.environ.get('REDIS_HOST', 'localhost')
self.port = int(os.environ.get('REDIS_PORT', 6379))
self.password = os.environ.get('REDIS_PASSWORD') or None
self.connect_timeout = 2
self.socket_timeout = 2

View File

@@ -66,13 +66,14 @@ class SystemOptimizer:
self.redis_connected = False
def _ensure_redis_connection(self):
"""确保Redis连接"""
"""确保Redis连接(从统一配置读取)"""
if not self.redis_connected:
try:
config = get_config()
self.redis_client = redis.Redis(
host='43.134.68.207',
port=6379,
password='123456',
host=config.redis.host,
port=config.redis.port,
password=config.redis.password,
decode_responses=True,
socket_connect_timeout=2,
socket_timeout=2,

View File

@@ -117,7 +117,7 @@ def main():
print(f" 实时对话: http://localhost:{config.server.port}/chat")
print(f" WebSocket: ws://localhost:{config.server.websocket_port}")
print()
print("🤖 飞书长连接服务: 已在后台启动")
print(" 飞书长连接服务: 已在后台启动")
print()
print("按 Ctrl+C 停止服务")
print("=" * 60)