""" 补货建议 Agent 重构版本:使用 part_ratio + SQL Agent + LangGraph """ import logging import time import uuid from typing import Optional, List from datetime import date, datetime from decimal import Decimal from langgraph.graph import StateGraph, END from .state import AgentState from .nodes import ( fetch_part_ratio_node, sql_agent_node, allocate_budget_node, should_retry_sql, ) from ..models import ReplenishmentTask, TaskStatus, TaskExecutionLog, LogStatus, ReplenishmentPartSummary from ..services import ResultWriter logger = logging.getLogger(__name__) class ReplenishmentAgent: """补货建议 Agent""" def __init__(self): self._graph = None self._result_writer = ResultWriter() @property def graph(self) -> StateGraph: """获取工作流图""" if self._graph is None: self._graph = self._build_graph() return self._graph def _build_graph(self) -> StateGraph: """ 构建 LangGraph 工作流 工作流结构: fetch_part_ratio → sql_agent → allocate_budget → END """ workflow = StateGraph(AgentState) # 添加核心节点 workflow.add_node("fetch_part_ratio", fetch_part_ratio_node) workflow.add_node("sql_agent", sql_agent_node) workflow.add_node("allocate_budget", allocate_budget_node) # 设置入口 workflow.set_entry_point("fetch_part_ratio") # 添加边 workflow.add_edge("fetch_part_ratio", "sql_agent") # SQL Agent 条件边(支持重试) workflow.add_conditional_edges( "sql_agent", should_retry_sql, { "retry": "sql_agent", "continue": "allocate_budget", } ) # allocate_budget → END workflow.add_edge("allocate_budget", END) return workflow.compile() def run( self, group_id: int, dealer_grouping_id: int, dealer_grouping_name: str, brand_grouping_id: Optional[int] = None, brand_grouping_name: str = "", statistics_date: Optional[str] = None, ) -> AgentState: """ 执行补货建议生成 Args: group_id: 集团ID dealer_grouping_id: 商家组合ID dealer_grouping_name: 商家组合名称 brand_grouping_id: 品牌组合ID brand_grouping_name: 品牌组合名称 statistics_date: 统计日期 """ task_no = f"AI-{uuid.uuid4().hex[:12].upper()}" if statistics_date is None: statistics_date = date.today().strftime("%Y-%m-%d") logger.info( f"开始执行补货建议: task_no={task_no}, " f"dealer_grouping={dealer_grouping_name}" ) # 初始化状态 initial_state: AgentState = { "task_no": task_no, "group_id": group_id, "brand_grouping_id": brand_grouping_id, "brand_grouping_name": brand_grouping_name, "dealer_grouping_id": dealer_grouping_id, "dealer_grouping_name": dealer_grouping_name, "statistics_date": statistics_date, "part_ratios": [], "sql_queries": [], "sql_results": [], "sql_retry_count": 0, "sql_execution_logs": [], "base_ratio": Decimal("1.1"), "allocated_details": [], "details": [], "llm_suggestions": [], "part_results": [], "report": None, "llm_provider": "", "llm_model": "", "llm_prompt_tokens": 0, "llm_completion_tokens": 0, "status": "running", "error_message": "", "start_time": time.time(), "end_time": 0, "current_node": "", "next_node": "fetch_part_ratio", } # 创建任务记录 task = ReplenishmentTask( task_no=task_no, group_id=group_id, dealer_grouping_id=dealer_grouping_id, dealer_grouping_name=dealer_grouping_name, brand_grouping_id=brand_grouping_id, statistics_date=statistics_date, status=TaskStatus.RUNNING, ) self._result_writer.save_task(task) try: # 执行工作流 final_state = self.graph.invoke(initial_state) # 更新任务状态 execution_time = int((final_state.get("end_time", time.time()) - final_state["start_time"]) * 1000) actual_amount = sum(d.suggest_amount for d in final_state.get("details", [])) task.status = TaskStatus.SUCCESS task.actual_amount = actual_amount task.part_count = len(final_state.get("details", [])) task.shop_count = len(set(d.shop_id for d in final_state.get("details", []))) task.base_ratio = final_state.get("base_ratio", Decimal("0")) task.llm_provider = final_state.get("llm_provider", "") task.llm_model = final_state.get("llm_model", "") task.llm_prompt_tokens = final_state.get("llm_prompt_tokens", 0) task.llm_completion_tokens = final_state.get("llm_completion_tokens", 0) task.llm_total_tokens = task.llm_prompt_tokens + task.llm_completion_tokens task.llm_analysis_summary = final_state.get("llm_analysis_summary", "") task.execution_time_ms = execution_time self._result_writer.update_task(task) # 保存执行日志 if final_state.get("sql_execution_logs"): self._save_execution_logs( task_no=task_no, group_id=group_id, brand_grouping_id=brand_grouping_id, brand_grouping_name=brand_grouping_name, dealer_grouping_id=dealer_grouping_id, dealer_grouping_name=dealer_grouping_name, logs=final_state["sql_execution_logs"], ) # 配件汇总已在 allocate_budget_node 中保存,此处跳过避免重复 # if final_state.get("part_results"): # self._save_part_summaries( # task_no=task_no, # group_id=group_id, # dealer_grouping_id=dealer_grouping_id, # statistics_date=statistics_date, # part_results=final_state["part_results"], # ) logger.info( f"补货建议执行完成: task_no={task_no}, " f"parts={task.part_count}, amount={actual_amount}, " f"time={execution_time}ms" ) return final_state except Exception as e: logger.error(f"补货建议执行失败: task_no={task_no}, error={e}") task.status = TaskStatus.FAILED task.error_message = str(e) task.execution_time_ms = int((time.time() - initial_state["start_time"]) * 1000) self._result_writer.update_task(task) raise finally: self._result_writer.close() def _save_execution_logs( self, task_no: str, group_id: int, brand_grouping_id: Optional[int], brand_grouping_name: str, dealer_grouping_id: int, dealer_grouping_name: str, logs: List[dict], ): """保存执行日志""" for log_data in logs: log = TaskExecutionLog( task_no=task_no, group_id=group_id, brand_grouping_id=brand_grouping_id, brand_grouping_name=brand_grouping_name, dealer_grouping_id=dealer_grouping_id, dealer_grouping_name=dealer_grouping_name, step_name=log_data.get("step_name", ""), step_order=log_data.get("step_order", 0), status=log_data.get("status", LogStatus.SUCCESS), input_data=log_data.get("input_data", ""), output_data=log_data.get("output_data", ""), error_message=log_data.get("error_message", ""), retry_count=log_data.get("retry_count", 0), sql_query=log_data.get("sql_query", ""), llm_prompt=log_data.get("llm_prompt", ""), llm_response=log_data.get("llm_response", ""), llm_tokens=log_data.get("llm_tokens", 0), execution_time_ms=log_data.get("execution_time_ms", 0), ) self._result_writer.save_execution_log(log) def _save_part_summaries( self, task_no: str, group_id: int, dealer_grouping_id: int, statistics_date: str, part_results: list, ): """保存配件汇总""" from .sql_agent import PartAnalysisResult summaries = [] for pr in part_results: if not isinstance(pr, PartAnalysisResult): continue summary = ReplenishmentPartSummary( task_no=task_no, group_id=group_id, dealer_grouping_id=dealer_grouping_id, part_code=pr.part_code, part_name=pr.part_name, unit=pr.unit, cost_price=pr.cost_price, total_storage_cnt=pr.total_storage_cnt, total_avg_sales_cnt=pr.total_avg_sales_cnt, group_current_ratio=pr.group_current_ratio, total_suggest_cnt=pr.total_suggest_cnt, total_suggest_amount=pr.total_suggest_amount, shop_count=pr.shop_count, need_replenishment_shop_count=pr.need_replenishment_shop_count, part_decision_reason=pr.part_decision_reason, priority=pr.priority, llm_confidence=pr.confidence, statistics_date=statistics_date, ) summaries.append(summary) if summaries: self._result_writer.save_part_summaries(summaries) logger.info(f"保存配件汇总: count={len(summaries)}") def run_for_all_groupings(self, group_id: int): """ 为所有商家组合执行补货建议 """ from ..services import DataService data_service = DataService() try: groupings = data_service.get_dealer_groupings(group_id) logger.info(f"获取商家组合: group_id={group_id}, count={len(groupings)}") for idx, grouping in enumerate(groupings): logger.info(f"[{idx+1}/{len(groupings)}] 开始处理商家组合: {grouping['name']} (id={grouping['id']})") try: self.run( group_id=group_id, dealer_grouping_id=grouping["id"], dealer_grouping_name=grouping["name"], ) logger.info(f"[{grouping['name']}] 执行完成") except Exception as e: logger.error(f"商家组合执行失败: {grouping['name']}, error={e}", exc_info=True) continue finally: data_service.close()