75 lines
2.3 KiB
Python
75 lines
2.3 KiB
Python
from data_analysis_agent import DataAnalysisAgent
|
|
from config.llm_config import LLMConfig
|
|
|
|
import sys
|
|
import os
|
|
from datetime import datetime
|
|
|
|
from utils.create_session_dir import create_session_output_dir
|
|
|
|
class DualLogger:
|
|
"""同时输出到终端和文件的日志记录器"""
|
|
def __init__(self, log_dir, filename="log.txt"):
|
|
self.terminal = sys.stdout
|
|
log_path = os.path.join(log_dir, filename)
|
|
self.log = open(log_path, "a", encoding="utf-8")
|
|
|
|
def write(self, message):
|
|
self.terminal.write(message)
|
|
# 过滤掉生成的代码块,不写入日志文件
|
|
if "🔧 执行代码:" in message:
|
|
return
|
|
self.log.write(message)
|
|
self.log.flush()
|
|
|
|
def flush(self):
|
|
self.terminal.flush()
|
|
self.log.flush()
|
|
|
|
def setup_logging(log_dir):
|
|
"""配置日志记录"""
|
|
# 记录开始时间
|
|
logger = DualLogger(log_dir)
|
|
sys.stdout = logger
|
|
# 可选:也将错误输出重定向
|
|
# sys.stderr = logger
|
|
print(f"\n{'='*20} Run Started at {datetime.now().strftime('%Y-%m-%d %H:%M:%S')} {'='*20}\n")
|
|
print(f"📄 日志文件已保存至: {os.path.join(log_dir, 'log.txt')}")
|
|
|
|
|
|
def main():
|
|
llm_config = LLMConfig()
|
|
files = ["./UB IOV Support_TR.csv"]
|
|
|
|
# 简化后的需求,让 Agent 自主规划
|
|
analysis_requirement = "我想了解这份工单数据的健康度。请帮我进行全面分析,并找出核心问题点和改进建议。"
|
|
|
|
# 在主函数中先创建会话目录,以便存放日志
|
|
base_output_dir = "outputs"
|
|
session_output_dir = create_session_output_dir(base_output_dir, analysis_requirement)
|
|
|
|
# 设置日志
|
|
setup_logging(session_output_dir)
|
|
|
|
# force_max_rounds=False 允许 AI 在认为完成后主动停止
|
|
agent = DataAnalysisAgent(llm_config, max_rounds=20, force_max_rounds=False)
|
|
|
|
# 这里的 template_path 如果你有特定的参考模板文件,可以传入路径
|
|
template_path = None
|
|
|
|
report = agent.analyze(
|
|
user_input=analysis_requirement,
|
|
files=files,
|
|
template_path=template_path,
|
|
session_output_dir=session_output_dir
|
|
)
|
|
|
|
print("\n" + "="*60)
|
|
print(f"✅ 分析任务圆满完成!")
|
|
print(f"📊 报告路径: {report.get('report_file_path')}")
|
|
print("="*60)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|