| New file |
| | |
| | | package com.zy.ai.controller; |
| | | |
| | | import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; |
| | | import com.core.annotations.ManagerAuth; |
| | | import com.core.common.R; |
| | | import com.zy.ai.entity.AiDataAnalysisReport; |
| | | import com.zy.ai.service.AiDataAnalysisReportService; |
| | | import com.zy.ai.service.DataAnalysisCoordinatorService; |
| | | import com.zy.common.web.BaseController; |
| | | import com.zy.system.service.ConfigService; |
| | | import lombok.RequiredArgsConstructor; |
| | | import lombok.extern.slf4j.Slf4j; |
| | | import org.springframework.web.bind.annotation.*; |
| | | |
| | | import java.util.ArrayList; |
| | | import java.util.LinkedHashMap; |
| | | import java.util.List; |
| | | import java.util.Map; |
| | | |
| | | @Slf4j |
| | | @RestController |
| | | @RequestMapping("/ai/dataAnalysis") |
| | | @RequiredArgsConstructor |
| | | public class DataAnalysisController extends BaseController { |
| | | |
| | | private static final int DEFAULT_LIMIT = 20; |
| | | private static final int MAX_LIMIT = 100; |
| | | |
| | | private final DataAnalysisCoordinatorService dataAnalysisCoordinatorService; |
| | | private final AiDataAnalysisReportService aiDataAnalysisReportService; |
| | | private final ConfigService configService; |
| | | |
| | | @GetMapping("/enabled/auth") |
| | | @ManagerAuth(memo = "查询AI数据分析功能开关") |
| | | public R getEnabled() { |
| | | Map<String, Object> result = new LinkedHashMap<>(); |
| | | result.put("enabled", dataAnalysisCoordinatorService.isEnabled()); |
| | | result.put("scheduledPeriods", configService.getConfigValue("aiDataAnalysisScheduledPeriods", "YESTERDAY")); |
| | | result.put("cron", configService.getConfigValue("aiDataAnalysisCron", "0 0 1 * * ?")); |
| | | result.put("uploadEnabled", "1".equals(configService.getConfigValue("aiDataAnalysisUploadEnabled", "0"))); |
| | | result.put("uploadUrl", configService.getConfigValue("aiDataAnalysisUploadUrl", "")); |
| | | return R.ok(result); |
| | | } |
| | | |
| | | @PostMapping("/enabled/auth") |
| | | @ManagerAuth(memo = "修改AI数据分析功能开关") |
| | | public R setEnabled(@RequestParam("enabled") String enabled) { |
| | | boolean isEnabled = "1".equals(enabled) || "true".equalsIgnoreCase(enabled); |
| | | log.info("AI数据分析开关切换: enabled={}, 原值={}", enabled, dataAnalysisCoordinatorService.isEnabled()); |
| | | dataAnalysisCoordinatorService.setEnabled(isEnabled); |
| | | boolean saved = dataAnalysisCoordinatorService.isEnabled(); |
| | | log.info("AI数据分析开关保存结果: {}", saved); |
| | | Map<String, Object> result = new LinkedHashMap<>(); |
| | | result.put("enabled", saved); |
| | | return R.ok(result); |
| | | } |
| | | |
| | | @PostMapping("/trigger/auth") |
| | | @ManagerAuth(memo = "手动触发AI数据分析") |
| | | public R trigger(@RequestParam("periodType") String periodType) { |
| | | return R.ok(dataAnalysisCoordinatorService.runManualAnalysis(periodType)); |
| | | } |
| | | |
| | | @GetMapping("/reports/auth") |
| | | @ManagerAuth(memo = "查看AI数据分析报告列表") |
| | | public R listReports( |
| | | @RequestParam(value = "periodType", required = false) String periodType, |
| | | @RequestParam(value = "limit", required = false) Integer limit) { |
| | | int safeLimit = normalizeLimit(limit); |
| | | QueryWrapper<AiDataAnalysisReport> wrapper = new QueryWrapper<>(); |
| | | if (periodType != null && !periodType.trim().isEmpty()) { |
| | | wrapper.eq("period_type", periodType.trim()); |
| | | } |
| | | wrapper.orderByDesc("create_time").last("limit " + safeLimit); |
| | | List<AiDataAnalysisReport> reports = aiDataAnalysisReportService.list(wrapper); |
| | | return R.ok(toReportSummaries(reports)); |
| | | } |
| | | |
| | | @GetMapping("/report/{id}/auth") |
| | | @ManagerAuth(memo = "查看AI数据分析报告详情") |
| | | public R getReport(@PathVariable("id") Long id) { |
| | | AiDataAnalysisReport report = aiDataAnalysisReportService.getById(id); |
| | | if (report == null) { |
| | | return R.error("报告不存在"); |
| | | } |
| | | return R.ok(toReportDetail(report)); |
| | | } |
| | | |
| | | private int normalizeLimit(Integer limit) { |
| | | if (limit == null || limit <= 0) { |
| | | return DEFAULT_LIMIT; |
| | | } |
| | | return Math.min(limit, MAX_LIMIT); |
| | | } |
| | | |
| | | private List<Map<String, Object>> toReportSummaries(List<AiDataAnalysisReport> reports) { |
| | | List<Map<String, Object>> result = new ArrayList<>(); |
| | | if (reports == null || reports.isEmpty()) { |
| | | return result; |
| | | } |
| | | for (AiDataAnalysisReport report : reports) { |
| | | result.add(toReportSummary(report)); |
| | | } |
| | | return result; |
| | | } |
| | | |
| | | private Map<String, Object> toReportSummary(AiDataAnalysisReport report) { |
| | | LinkedHashMap<String, Object> item = new LinkedHashMap<>(); |
| | | item.put("id", report.getId()); |
| | | item.put("periodType", report.getPeriodType()); |
| | | item.put("triggerType", report.getTriggerType()); |
| | | item.put("status", report.getStatus()); |
| | | item.put("createTime", report.getCreateTime()); |
| | | item.put("finishTime", report.getFinishTime()); |
| | | item.put("llmCallCount", report.getLlmCallCount()); |
| | | item.put("totalTokens", report.getTotalTokens()); |
| | | item.put("uploadStatus", report.getUploadStatus()); |
| | | return item; |
| | | } |
| | | |
| | | private Map<String, Object> toReportDetail(AiDataAnalysisReport report) { |
| | | LinkedHashMap<String, Object> item = new LinkedHashMap<>(); |
| | | item.put("id", report.getId()); |
| | | item.put("periodType", report.getPeriodType()); |
| | | item.put("periodStart", report.getPeriodStart()); |
| | | item.put("periodEnd", report.getPeriodEnd()); |
| | | item.put("triggerType", report.getTriggerType()); |
| | | item.put("status", report.getStatus()); |
| | | item.put("summary", report.getSummary()); |
| | | item.put("structuredData", report.getStructuredData()); |
| | | item.put("llmCallCount", report.getLlmCallCount()); |
| | | item.put("promptTokens", report.getPromptTokens()); |
| | | item.put("completionTokens", report.getCompletionTokens()); |
| | | item.put("totalTokens", report.getTotalTokens()); |
| | | item.put("errorMessage", report.getErrorMessage()); |
| | | item.put("localFilePath", report.getLocalFilePath()); |
| | | item.put("uploadStatus", report.getUploadStatus()); |
| | | item.put("createTime", report.getCreateTime()); |
| | | item.put("finishTime", report.getFinishTime()); |
| | | return item; |
| | | } |
| | | } |
| New file |
| | |
| | | package com.zy.ai.entity; |
| | | |
| | | import com.baomidou.mybatisplus.annotation.IdType; |
| | | import com.baomidou.mybatisplus.annotation.TableField; |
| | | import com.baomidou.mybatisplus.annotation.TableId; |
| | | import com.baomidou.mybatisplus.annotation.TableName; |
| | | import lombok.Data; |
| | | |
| | | import java.io.Serializable; |
| | | import java.util.Date; |
| | | |
| | | @Data |
| | | @TableName("sys_ai_data_analysis_report") |
| | | public class AiDataAnalysisReport implements Serializable { |
| | | private static final long serialVersionUID = 1L; |
| | | |
| | | @TableId(value = "id", type = IdType.AUTO) |
| | | private Long id; |
| | | |
| | | @TableField("period_type") |
| | | private String periodType; |
| | | |
| | | @TableField("period_start") |
| | | private Date periodStart; |
| | | |
| | | @TableField("period_end") |
| | | private Date periodEnd; |
| | | |
| | | @TableField("trigger_type") |
| | | private String triggerType; |
| | | |
| | | private String status; |
| | | |
| | | private String summary; |
| | | |
| | | @TableField("structured_data") |
| | | private String structuredData; |
| | | |
| | | @TableField("llm_call_count") |
| | | private Integer llmCallCount; |
| | | |
| | | @TableField("prompt_tokens") |
| | | private Integer promptTokens; |
| | | |
| | | @TableField("completion_tokens") |
| | | private Integer completionTokens; |
| | | |
| | | @TableField("total_tokens") |
| | | private Integer totalTokens; |
| | | |
| | | @TableField("error_message") |
| | | private String errorMessage; |
| | | |
| | | @TableField("local_file_path") |
| | | private String localFilePath; |
| | | |
| | | @TableField("upload_status") |
| | | private String uploadStatus; |
| | | |
| | | @TableField("create_time") |
| | | private Date createTime; |
| | | |
| | | @TableField("finish_time") |
| | | private Date finishTime; |
| | | } |
| New file |
| | |
| | | package com.zy.ai.entity; |
| | | |
| | | import com.baomidou.mybatisplus.annotation.IdType; |
| | | import com.baomidou.mybatisplus.annotation.TableField; |
| | | import com.baomidou.mybatisplus.annotation.TableId; |
| | | import com.baomidou.mybatisplus.annotation.TableName; |
| | | import lombok.Data; |
| | | |
| | | import java.io.Serializable; |
| | | import java.util.Date; |
| | | |
| | | @Data |
| | | @TableName("sys_ai_data_analysis_upload_log") |
| | | public class AiDataAnalysisUploadLog implements Serializable { |
| | | private static final long serialVersionUID = 1L; |
| | | |
| | | @TableId(value = "id", type = IdType.AUTO) |
| | | private Long id; |
| | | |
| | | @TableField("report_id") |
| | | private Long reportId; |
| | | |
| | | @TableField("upload_url") |
| | | private String uploadUrl; |
| | | |
| | | @TableField("request_body") |
| | | private String requestBody; |
| | | |
| | | @TableField("response_body") |
| | | private String responseBody; |
| | | |
| | | @TableField("http_status") |
| | | private Integer httpStatus; |
| | | |
| | | private String result; |
| | | |
| | | @TableField("error_message") |
| | | private String errorMessage; |
| | | |
| | | @TableField("retry_count") |
| | | private Integer retryCount; |
| | | |
| | | @TableField("create_time") |
| | | private Date createTime; |
| | | } |
| New file |
| | |
| | | package com.zy.ai.entity; |
| | | |
| | | import com.baomidou.mybatisplus.annotation.IdType; |
| | | import com.baomidou.mybatisplus.annotation.TableField; |
| | | import com.baomidou.mybatisplus.annotation.TableId; |
| | | import com.baomidou.mybatisplus.annotation.TableName; |
| | | import lombok.Data; |
| | | |
| | | import java.io.Serializable; |
| | | import java.util.Date; |
| | | |
| | | @Data |
| | | @TableName("sys_ai_token_usage") |
| | | public class AiTokenUsage implements Serializable { |
| | | private static final long serialVersionUID = 1L; |
| | | |
| | | @TableId(value = "id", type = IdType.AUTO) |
| | | private Integer id; |
| | | |
| | | @TableField("prompt_tokens") |
| | | private Long promptTokens; |
| | | |
| | | @TableField("completion_tokens") |
| | | private Long completionTokens; |
| | | |
| | | @TableField("total_tokens") |
| | | private Long totalTokens; |
| | | |
| | | @TableField("llm_call_count") |
| | | private Long llmCallCount; |
| | | |
| | | @TableField("update_time") |
| | | private Date updateTime; |
| | | } |
| | |
| | | |
| | | DIAGNOSE_STREAM("wcs_diagnose_stream", "WCS巡检诊断"), |
| | | SENSOR_CHAT("wcs_sensor_chat", "WCS专家问答"), |
| | | AUTO_TUNE_DISPATCH("wcs_auto_tune_dispatch", "WCS自动调参"); |
| | | AUTO_TUNE_DISPATCH("wcs_auto_tune_dispatch", "WCS自动调参"), |
| | | DATA_ANALYSIS("wcs_data_analysis", "WCS数据分析"); |
| | | |
| | | private final String code; |
| | | private final String label; |
| | |
| | | import com.zy.ai.gateway.adapter.AiProviderAdapterRegistry; |
| | | import com.zy.ai.gateway.model.AiRequest; |
| | | import com.zy.ai.gateway.model.AiResponse; |
| | | import com.zy.ai.service.AiTokenUsageService; |
| | | import com.zy.ai.service.LlmCallLogService; |
| | | import com.zy.ai.service.LlmRoutingService; |
| | | import lombok.RequiredArgsConstructor; |
| | |
| | | private final LlmRoutingService llmRoutingService; |
| | | private final AiProviderAdapterRegistry adapterRegistry; |
| | | private final LlmCallLogService llmCallLogService; |
| | | private final AiTokenUsageService aiTokenUsageService; |
| | | |
| | | @Value("${llm.base-url:}") |
| | | private String fallbackBaseUrl; |
| | |
| | | item.setExtra(cut(extraPayload(route, response), 512)); |
| | | item.setCreateTime(new Date()); |
| | | llmCallLogService.saveIgnoreError(item); |
| | | |
| | | // 累加 token 到独立存储 |
| | | if (success && response != null && response.getUsage() != null) { |
| | | aiTokenUsageService.incrementTokens( |
| | | response.getUsage().getInputTokens() == null ? 0 : response.getUsage().getInputTokens(), |
| | | response.getUsage().getOutputTokens() == null ? 0 : response.getUsage().getOutputTokens(), |
| | | response.getUsage().getTotalTokens() == null ? 0 : response.getUsage().getTotalTokens(), |
| | | 1); |
| | | } |
| | | } |
| | | |
| | | private String responseText(AiResponse response) { |
| New file |
| | |
| | | package com.zy.ai.mapper; |
| | | |
| | | import com.baomidou.mybatisplus.core.mapper.BaseMapper; |
| | | import com.zy.ai.entity.AiDataAnalysisReport; |
| | | import org.apache.ibatis.annotations.Mapper; |
| | | import org.springframework.stereotype.Repository; |
| | | |
| | | @Mapper |
| | | @Repository |
| | | public interface AiDataAnalysisReportMapper extends BaseMapper<AiDataAnalysisReport> { |
| | | } |
| New file |
| | |
| | | package com.zy.ai.mapper; |
| | | |
| | | import com.baomidou.mybatisplus.core.mapper.BaseMapper; |
| | | import com.zy.ai.entity.AiDataAnalysisUploadLog; |
| | | import org.apache.ibatis.annotations.Mapper; |
| | | import org.springframework.stereotype.Repository; |
| | | |
| | | @Mapper |
| | | @Repository |
| | | public interface AiDataAnalysisUploadLogMapper extends BaseMapper<AiDataAnalysisUploadLog> { |
| | | } |
| New file |
| | |
| | | package com.zy.ai.mapper; |
| | | |
| | | import com.baomidou.mybatisplus.core.mapper.BaseMapper; |
| | | import com.zy.ai.entity.AiTokenUsage; |
| | | import org.apache.ibatis.annotations.Mapper; |
| | | import org.apache.ibatis.annotations.Param; |
| | | import org.springframework.stereotype.Repository; |
| | | |
| | | @Mapper |
| | | @Repository |
| | | public interface AiTokenUsageMapper extends BaseMapper<AiTokenUsage> { |
| | | |
| | | int incrementTokens(@Param("promptTokens") long promptTokens, |
| | | @Param("completionTokens") long completionTokens, |
| | | @Param("totalTokens") long totalTokens, |
| | | @Param("callCount") long callCount); |
| | | } |
| | |
| | | package com.zy.ai.mcp.config; |
| | | |
| | | import com.zy.ai.mcp.tool.AutoTuneMcpTools; |
| | | import com.zy.ai.mcp.tool.DataAnalysisMcpTools; |
| | | import com.zy.ai.mcp.tool.WcsMcpTools; |
| | | import org.springframework.ai.support.ToolCallbacks; |
| | | import org.springframework.ai.tool.StaticToolCallbackProvider; |
| | |
| | | |
| | | @Bean("wcsMcpToolCallbackProvider") |
| | | public ToolCallbackProvider wcsMcpToolCallbackProvider(WcsMcpTools wcsMcpTools, |
| | | AutoTuneMcpTools autoTuneMcpTools) { |
| | | return new StaticToolCallbackProvider(ToolCallbacks.from(wcsMcpTools, autoTuneMcpTools)); |
| | | AutoTuneMcpTools autoTuneMcpTools, |
| | | DataAnalysisMcpTools dataAnalysisMcpTools) { |
| | | return new StaticToolCallbackProvider(ToolCallbacks.from(wcsMcpTools, autoTuneMcpTools, dataAnalysisMcpTools)); |
| | | } |
| | | } |
| | |
| | | Object getSystemConfig(JSONObject args); |
| | | |
| | | Object getSystemPseudocode(JSONObject args); |
| | | |
| | | Object getTaskThroughput(JSONObject args); |
| | | |
| | | Object getDeviceFaultSummary(JSONObject args); |
| | | |
| | | Object getDeviceUtilization(JSONObject args); |
| | | |
| | | Object getErrorLogSummary(JSONObject args); |
| | | } |
| | |
| | | import com.zy.asrs.entity.BasDevp; |
| | | import com.zy.asrs.entity.BasRgv; |
| | | import com.zy.asrs.entity.WrkMast; |
| | | import com.zy.asrs.entity.BasCrnp; |
| | | import com.zy.asrs.entity.BasDevp; |
| | | import com.zy.asrs.entity.BasRgv; |
| | | import com.zy.asrs.entity.WrkMast; |
| | | import com.zy.asrs.mapper.WrkAnalysisMapper; |
| | | import com.zy.asrs.service.BasCrnpErrLogService; |
| | | import com.zy.asrs.service.BasCrnpService; |
| | | import com.zy.asrs.service.BasDevpService; |
| | | import com.zy.asrs.service.BasDualCrnpErrLogService; |
| | | import com.zy.asrs.service.BasRgvErrLogService; |
| | | import com.zy.asrs.service.BasRgvService; |
| | | import com.zy.asrs.service.BasStationErrLogService; |
| | | import com.zy.asrs.service.WrkMastService; |
| | | import com.zy.core.cache.SlaveConnection; |
| | | import com.zy.core.enums.SlaveType; |
| | |
| | | private ConfigService configService; |
| | | @Autowired |
| | | private MainProcessPseudocodeService mainProcessPseudocodeService; |
| | | @Autowired |
| | | private WrkAnalysisMapper wrkAnalysisMapper; |
| | | @Autowired |
| | | private BasCrnpErrLogService basCrnpErrLogService; |
| | | @Autowired |
| | | private BasDualCrnpErrLogService basDualCrnpErrLogService; |
| | | @Autowired |
| | | private BasRgvErrLogService basRgvErrLogService; |
| | | @Autowired |
| | | private BasStationErrLogService basStationErrLogService; |
| | | |
| | | @Override |
| | | public Object getCrnDeviceStatus(JSONObject args) { |
| | |
| | | return mainProcessPseudocodeService.queryMainProcessPseudocode(refresh); |
| | | } |
| | | |
| | | @Override |
| | | public Object getTaskThroughput(JSONObject args) { |
| | | Date startTime = optDate(args, "startTime"); |
| | | Date endTime = optDate(args, "endTime"); |
| | | if (startTime == null || endTime == null) { |
| | | JSONObject err = new JSONObject(); |
| | | err.put("error", "startTime and endTime are required"); |
| | | return err; |
| | | } |
| | | Map<String, Object> result = wrkAnalysisMapper.aggregateThroughput(startTime, endTime); |
| | | JSONObject data = new JSONObject(); |
| | | data.put("throughput", result); |
| | | data.put("startTime", startTime); |
| | | data.put("endTime", endTime); |
| | | return data; |
| | | } |
| | | |
| | | @Override |
| | | public Object getDeviceFaultSummary(JSONObject args) { |
| | | Date startTime = optDate(args, "startTime"); |
| | | Date endTime = optDate(args, "endTime"); |
| | | if (startTime == null || endTime == null) { |
| | | JSONObject err = new JSONObject(); |
| | | err.put("error", "startTime and endTime are required"); |
| | | return err; |
| | | } |
| | | Map<String, Object> throughput = wrkAnalysisMapper.aggregateThroughput(startTime, endTime); |
| | | JSONObject data = new JSONObject(); |
| | | data.put("totalTaskCount", throughput.get("taskCount")); |
| | | data.put("faultTaskCount", throughput.get("faultTaskCount")); |
| | | data.put("totalFaultCount", throughput.get("totalFaultCount")); |
| | | data.put("totalFaultDurationMs", throughput.get("totalFaultDurationMs")); |
| | | data.put("crnFaultCount", throughput.get("crnFaultCount")); |
| | | data.put("crnFaultDurationMs", throughput.get("crnFaultDurationMs")); |
| | | data.put("dualCrnFaultCount", throughput.get("dualCrnFaultCount")); |
| | | data.put("dualCrnFaultDurationMs", throughput.get("dualCrnFaultDurationMs")); |
| | | data.put("rgvFaultCount", throughput.get("rgvFaultCount")); |
| | | data.put("rgvFaultDurationMs", throughput.get("rgvFaultDurationMs")); |
| | | data.put("stationFaultCount", throughput.get("stationFaultCount")); |
| | | data.put("stationFaultDurationMs", throughput.get("stationFaultDurationMs")); |
| | | long taskCount = throughput.get("taskCount") != null ? ((Number) throughput.get("taskCount")).longValue() : 0; |
| | | long faultTask = throughput.get("faultTaskCount") != null ? ((Number) throughput.get("faultTaskCount")).longValue() : 0; |
| | | data.put("faultRate", taskCount > 0 ? Math.round(faultTask * 10000.0 / taskCount) / 100.0 : 0); |
| | | data.put("startTime", startTime); |
| | | data.put("endTime", endTime); |
| | | return data; |
| | | } |
| | | |
| | | @Override |
| | | public Object getDeviceUtilization(JSONObject args) { |
| | | Date startTime = optDate(args, "startTime"); |
| | | Date endTime = optDate(args, "endTime"); |
| | | if (startTime == null || endTime == null) { |
| | | JSONObject err = new JSONObject(); |
| | | err.put("error", "startTime and endTime are required"); |
| | | return err; |
| | | } |
| | | List<Map<String, Object>> devices = wrkAnalysisMapper.groupByDevice(startTime, endTime); |
| | | JSONObject data = new JSONObject(); |
| | | data.put("devices", devices); |
| | | data.put("startTime", startTime); |
| | | data.put("endTime", endTime); |
| | | return data; |
| | | } |
| | | |
| | | @Override |
| | | public Object getErrorLogSummary(JSONObject args) { |
| | | Date startTime = optDate(args, "startTime"); |
| | | Date endTime = optDate(args, "endTime"); |
| | | if (startTime == null || endTime == null) { |
| | | JSONObject err = new JSONObject(); |
| | | err.put("error", "startTime and endTime are required"); |
| | | return err; |
| | | } |
| | | JSONObject data = new JSONObject(); |
| | | |
| | | long crnErrCount = basCrnpErrLogService.count(new QueryWrapper<com.zy.asrs.entity.BasCrnpErrLog>() |
| | | .ge("start_time", startTime).lt("start_time", endTime)); |
| | | long dualCrnErrCount = basDualCrnpErrLogService.count(new QueryWrapper<com.zy.asrs.entity.BasDualCrnpErrLog>() |
| | | .ge("start_time", startTime).lt("start_time", endTime)); |
| | | long rgvErrCount = basRgvErrLogService.count(new QueryWrapper<com.zy.asrs.entity.BasRgvErrLog>() |
| | | .ge("start_time", startTime).lt("start_time", endTime)); |
| | | long stationErrCount = basStationErrLogService.count(new QueryWrapper<com.zy.asrs.entity.BasStationErrLog>() |
| | | .ge("start_time", startTime).lt("start_time", endTime)); |
| | | |
| | | data.put("crnErrorCount", crnErrCount); |
| | | data.put("dualCrnErrorCount", dualCrnErrCount); |
| | | data.put("rgvErrorCount", rgvErrCount); |
| | | data.put("stationErrorCount", stationErrCount); |
| | | data.put("totalErrorCount", crnErrCount + dualCrnErrCount + rgvErrCount + stationErrCount); |
| | | data.put("startTime", startTime); |
| | | data.put("endTime", endTime); |
| | | return data; |
| | | } |
| | | |
| | | // --------- helpers --------- |
| | | |
| | | private int optInt(JSONObject o, String key, int def) { |
| | |
| | | return value.trim(); |
| | | } |
| | | |
| | | private Date optDate(JSONObject o, String key) { |
| | | if (o == null || !o.containsKey(key)) return null; |
| | | return o.getDate(key); |
| | | } |
| | | |
| | | private List<Long> optLongList(JSONObject o, String key) { |
| | | if (o == null || !o.containsKey(key)) return Collections.emptyList(); |
| | | JSONArray arr = o.getJSONArray(key); |
| New file |
| | |
| | | package com.zy.ai.mcp.tool; |
| | | |
| | | import com.alibaba.fastjson.JSONObject; |
| | | import com.zy.ai.mcp.service.WcsDataFacade; |
| | | import lombok.RequiredArgsConstructor; |
| | | import org.springframework.ai.tool.annotation.Tool; |
| | | import org.springframework.ai.tool.annotation.ToolParam; |
| | | import org.springframework.stereotype.Component; |
| | | |
| | | import java.util.Date; |
| | | |
| | | @Component |
| | | @RequiredArgsConstructor |
| | | public class DataAnalysisMcpTools { |
| | | |
| | | private final WcsDataFacade wcsDataFacade; |
| | | |
| | | @Tool(name = "analysis_query_task_throughput", description = "查询指定时间范围内的任务吞吐量统计:任务总量、入库/出库/移库数量、平均时长、故障汇总") |
| | | public Object queryTaskThroughput( |
| | | @ToolParam(description = "分析开始时间") Date startTime, |
| | | @ToolParam(description = "分析结束时间") Date endTime) { |
| | | return wcsDataFacade.getTaskThroughput(json().fluentPut("startTime", startTime).fluentPut("endTime", endTime)); |
| | | } |
| | | |
| | | @Tool(name = "analysis_query_device_fault_summary", description = "查询指定时间范围内的设备故障汇总:按设备类型(堆垛机/双工位堆垛机/RGV/输送线)统计故障次数和故障时长") |
| | | public Object queryDeviceFaultSummary( |
| | | @ToolParam(description = "分析开始时间") Date startTime, |
| | | @ToolParam(description = "分析结束时间") Date endTime) { |
| | | return wcsDataFacade.getDeviceFaultSummary(json().fluentPut("startTime", startTime).fluentPut("endTime", endTime)); |
| | | } |
| | | |
| | | @Tool(name = "analysis_query_device_utilization", description = "查询指定时间范围内的设备利用率:按设备编号统计任务分配量、平均任务时长、故障数") |
| | | public Object queryDeviceUtilization( |
| | | @ToolParam(description = "分析开始时间") Date startTime, |
| | | @ToolParam(description = "分析结束时间") Date endTime) { |
| | | return wcsDataFacade.getDeviceUtilization(json().fluentPut("startTime", startTime).fluentPut("endTime", endTime)); |
| | | } |
| | | |
| | | @Tool(name = "analysis_query_error_logs", description = "查询指定时间范围内的设备错误日志统计:按设备类型统计错误次数") |
| | | public Object queryErrorLogs( |
| | | @ToolParam(description = "分析开始时间") Date startTime, |
| | | @ToolParam(description = "分析结束时间") Date endTime) { |
| | | return wcsDataFacade.getErrorLogSummary(json().fluentPut("startTime", startTime).fluentPut("endTime", endTime)); |
| | | } |
| | | |
| | | private JSONObject json() { |
| | | return new JSONObject(); |
| | | } |
| | | } |
| New file |
| | |
| | | package com.zy.ai.service; |
| | | |
| | | import com.baomidou.mybatisplus.extension.service.IService; |
| | | import com.zy.ai.entity.AiDataAnalysisReport; |
| | | |
| | | public interface AiDataAnalysisReportService extends IService<AiDataAnalysisReport> { |
| | | } |
| New file |
| | |
| | | package com.zy.ai.service; |
| | | |
| | | import com.baomidou.mybatisplus.extension.service.IService; |
| | | import com.zy.ai.entity.AiDataAnalysisUploadLog; |
| | | |
| | | public interface AiDataAnalysisUploadLogService extends IService<AiDataAnalysisUploadLog> { |
| | | } |
| New file |
| | |
| | | package com.zy.ai.service; |
| | | |
| | | import com.baomidou.mybatisplus.extension.service.IService; |
| | | import com.zy.ai.entity.AiTokenUsage; |
| | | |
| | | public interface AiTokenUsageService extends IService<AiTokenUsage> { |
| | | |
| | | void incrementTokens(long promptTokens, long completionTokens, long totalTokens, long callCount); |
| | | } |
| New file |
| | |
| | | package com.zy.ai.service; |
| | | |
| | | import lombok.Data; |
| | | |
| | | import java.io.Serializable; |
| | | import java.util.List; |
| | | |
| | | public interface DataAnalysisAgentService { |
| | | |
| | | DataAnalysisAgentResult runAnalysis(String periodType); |
| | | |
| | | @Data |
| | | class DataAnalysisAgentResult implements Serializable { |
| | | private static final long serialVersionUID = 1L; |
| | | private Boolean success; |
| | | private String periodType; |
| | | private String triggerType; |
| | | private String summary; |
| | | private String structuredData; |
| | | private Integer toolCallCount; |
| | | private Integer llmCallCount; |
| | | private Long promptTokens; |
| | | private Long completionTokens; |
| | | private Long totalTokens; |
| | | private Boolean maxRoundsReached; |
| | | private List<McpCallResult> mcpCalls; |
| | | } |
| | | |
| | | @Data |
| | | class McpCallResult implements Serializable { |
| | | private static final long serialVersionUID = 1L; |
| | | private Integer callSeq; |
| | | private String toolName; |
| | | private Long durationMs; |
| | | private String status; |
| | | private String requestJson; |
| | | private String responseJson; |
| | | private String errorMessage; |
| | | } |
| | | } |
| New file |
| | |
| | | package com.zy.ai.service; |
| | | |
| | | import lombok.Data; |
| | | |
| | | public interface DataAnalysisCoordinatorService { |
| | | |
| | | DataAnalysisCoordinatorResult runAnalysisIfEligible(); |
| | | |
| | | DataAnalysisCoordinatorResult runManualAnalysis(String periodType); |
| | | |
| | | boolean isEnabled(); |
| | | |
| | | void setEnabled(boolean enabled); |
| | | |
| | | @Data |
| | | class DataAnalysisCoordinatorResult { |
| | | private Boolean skipped; |
| | | private String reason; |
| | | private Boolean triggered; |
| | | private DataAnalysisAgentService.DataAnalysisAgentResult agentResult; |
| | | |
| | | public static DataAnalysisCoordinatorResult skipped(String reason) { |
| | | DataAnalysisCoordinatorResult r = new DataAnalysisCoordinatorResult(); |
| | | r.setSkipped(true); |
| | | r.setReason(reason); |
| | | r.setTriggered(false); |
| | | return r; |
| | | } |
| | | |
| | | public static DataAnalysisCoordinatorResult triggered(DataAnalysisAgentService.DataAnalysisAgentResult agentResult) { |
| | | DataAnalysisCoordinatorResult r = new DataAnalysisCoordinatorResult(); |
| | | r.setSkipped(false); |
| | | r.setTriggered(true); |
| | | r.setAgentResult(agentResult); |
| | | return r; |
| | | } |
| | | } |
| | | } |
| New file |
| | |
| | | package com.zy.ai.service; |
| | | |
| | | import com.zy.ai.entity.AiDataAnalysisReport; |
| | | |
| | | public interface DataAnalysisFileStorageService { |
| | | |
| | | String saveReport(AiDataAnalysisReport report); |
| | | } |
| New file |
| | |
| | | package com.zy.ai.service; |
| | | |
| | | import com.zy.ai.entity.AiDataAnalysisReport; |
| | | import lombok.Data; |
| | | |
| | | public interface DataAnalysisUploadService { |
| | | |
| | | UploadResult upload(AiDataAnalysisReport report); |
| | | |
| | | @Data |
| | | class UploadResult { |
| | | private boolean success; |
| | | private boolean skipped; |
| | | private Integer httpStatus; |
| | | private String responseBody; |
| | | private String errorMessage; |
| | | |
| | | public static UploadResult skipped() { |
| | | UploadResult r = new UploadResult(); |
| | | r.setSkipped(true); |
| | | return r; |
| | | } |
| | | |
| | | public static UploadResult success(Integer httpStatus, String responseBody) { |
| | | UploadResult r = new UploadResult(); |
| | | r.setSuccess(true); |
| | | r.setHttpStatus(httpStatus); |
| | | r.setResponseBody(responseBody); |
| | | return r; |
| | | } |
| | | |
| | | public static UploadResult failed(Integer httpStatus, String errorMessage) { |
| | | UploadResult r = new UploadResult(); |
| | | r.setSuccess(false); |
| | | r.setHttpStatus(httpStatus); |
| | | r.setErrorMessage(errorMessage); |
| | | return r; |
| | | } |
| | | } |
| | | } |
| | |
| | | private final LlmSpringAiClientService llmSpringAiClientService; |
| | | private final AiGatewayService aiGatewayService; |
| | | private final OpenAiChatCompletionsMapper openAiChatCompletionsMapper; |
| | | private final AiTokenUsageService aiTokenUsageService; |
| | | |
| | | @Value("${llm.base-url:}") |
| | | private String fallbackBaseUrl; |
| | |
| | | item.setExtra(cut(buildExtraPayload(responseObj == null ? null : responseObj.getUsage(), extra), 512)); |
| | | item.setCreateTime(new Date()); |
| | | llmCallLogService.saveIgnoreError(item); |
| | | |
| | | // 累加 token 到独立存储 |
| | | if (success && responseObj != null && responseObj.getUsage() != null) { |
| | | ChatCompletionResponse.Usage usage = responseObj.getUsage(); |
| | | aiTokenUsageService.incrementTokens( |
| | | usage.getPromptTokens() == null ? 0 : usage.getPromptTokens(), |
| | | usage.getCompletionTokens() == null ? 0 : usage.getCompletionTokens(), |
| | | usage.getTotalTokens() == null ? 0 : usage.getTotalTokens(), |
| | | 1); |
| | | } |
| | | } |
| | | |
| | | private ChatCompletionResponse usageResponse(ChatCompletionResponse.Usage usage) { |
| New file |
| | |
| | | package com.zy.ai.service.impl; |
| | | |
| | | import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; |
| | | import com.zy.ai.entity.AiDataAnalysisReport; |
| | | import com.zy.ai.mapper.AiDataAnalysisReportMapper; |
| | | import com.zy.ai.service.AiDataAnalysisReportService; |
| | | import org.springframework.stereotype.Service; |
| | | |
| | | @Service("aiDataAnalysisReportService") |
| | | public class AiDataAnalysisReportServiceImpl extends ServiceImpl<AiDataAnalysisReportMapper, AiDataAnalysisReport> |
| | | implements AiDataAnalysisReportService { |
| | | } |
| New file |
| | |
| | | package com.zy.ai.service.impl; |
| | | |
| | | import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; |
| | | import com.zy.ai.entity.AiDataAnalysisUploadLog; |
| | | import com.zy.ai.mapper.AiDataAnalysisUploadLogMapper; |
| | | import com.zy.ai.service.AiDataAnalysisUploadLogService; |
| | | import org.springframework.stereotype.Service; |
| | | |
| | | @Service("aiDataAnalysisUploadLogService") |
| | | public class AiDataAnalysisUploadLogServiceImpl extends ServiceImpl<AiDataAnalysisUploadLogMapper, AiDataAnalysisUploadLog> |
| | | implements AiDataAnalysisUploadLogService { |
| | | } |
| New file |
| | |
| | | package com.zy.ai.service.impl; |
| | | |
| | | import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; |
| | | import com.zy.ai.entity.AiTokenUsage; |
| | | import com.zy.ai.mapper.AiTokenUsageMapper; |
| | | import com.zy.ai.service.AiTokenUsageService; |
| | | import lombok.extern.slf4j.Slf4j; |
| | | import org.springframework.stereotype.Service; |
| | | |
| | | @Slf4j |
| | | @Service("aiTokenUsageService") |
| | | public class AiTokenUsageServiceImpl extends ServiceImpl<AiTokenUsageMapper, AiTokenUsage> |
| | | implements AiTokenUsageService { |
| | | |
| | | @Override |
| | | public void incrementTokens(long promptTokens, long completionTokens, long totalTokens, long callCount) { |
| | | if (promptTokens <= 0 && completionTokens <= 0 && totalTokens <= 0 && callCount <= 0) { |
| | | return; |
| | | } |
| | | try { |
| | | int rows = baseMapper.incrementTokens(promptTokens, completionTokens, totalTokens, callCount); |
| | | if (rows == 0) { |
| | | // Row doesn't exist, create it |
| | | AiTokenUsage usage = new AiTokenUsage(); |
| | | usage.setId(1); |
| | | usage.setPromptTokens(promptTokens); |
| | | usage.setCompletionTokens(completionTokens); |
| | | usage.setTotalTokens(totalTokens); |
| | | usage.setLlmCallCount(callCount); |
| | | save(usage); |
| | | } |
| | | } catch (Exception e) { |
| | | log.warn("Failed to increment AI token usage: {}", e.getMessage()); |
| | | } |
| | | } |
| | | } |
| New file |
| | |
| | | package com.zy.ai.service.impl; |
| | | |
| | | import com.alibaba.fastjson.JSON; |
| | | import com.alibaba.fastjson.JSONObject; |
| | | import com.zy.ai.entity.AiPromptTemplate; |
| | | import com.zy.ai.entity.ChatCompletionRequest; |
| | | import com.zy.ai.entity.ChatCompletionResponse; |
| | | import com.zy.ai.enums.AiPromptScene; |
| | | import com.zy.ai.mcp.service.SpringAiMcpToolManager; |
| | | import com.zy.ai.service.AiPromptTemplateService; |
| | | import com.zy.ai.service.DataAnalysisAgentService; |
| | | import com.zy.ai.service.LlmChatService; |
| | | import lombok.RequiredArgsConstructor; |
| | | import lombok.extern.slf4j.Slf4j; |
| | | import org.springframework.stereotype.Service; |
| | | |
| | | import java.time.DayOfWeek; |
| | | import java.time.LocalDate; |
| | | import java.time.LocalDateTime; |
| | | import java.time.LocalTime; |
| | | import java.util.*; |
| | | |
| | | @Slf4j |
| | | @Service |
| | | @RequiredArgsConstructor |
| | | public class DataAnalysisAgentServiceImpl implements DataAnalysisAgentService { |
| | | |
| | | private static final int MAX_TOOL_ROUNDS = 10; |
| | | private static final double TEMPERATURE = 0.3D; |
| | | private static final int MAX_TOKENS = 4096; |
| | | private static final String MCP_STATUS_SUCCESS = "success"; |
| | | private static final String MCP_STATUS_FAILED = "failed"; |
| | | |
| | | private static final String TOOL_THROUGHPUT = "wcs_local_analysis_query_task_throughput"; |
| | | private static final String TOOL_FAULT_SUMMARY = "wcs_local_analysis_query_device_fault_summary"; |
| | | private static final String TOOL_UTILIZATION = "wcs_local_analysis_query_device_utilization"; |
| | | private static final String TOOL_ERROR_LOGS = "wcs_local_analysis_query_error_logs"; |
| | | |
| | | private static final Set<String> ALLOWED_TOOL_NAMES = Set.of( |
| | | TOOL_THROUGHPUT, |
| | | TOOL_FAULT_SUMMARY, |
| | | TOOL_UTILIZATION, |
| | | TOOL_ERROR_LOGS |
| | | ); |
| | | |
| | | private final LlmChatService llmChatService; |
| | | private final SpringAiMcpToolManager mcpToolManager; |
| | | private final AiPromptTemplateService aiPromptTemplateService; |
| | | |
| | | @Override |
| | | public DataAnalysisAgentResult runAnalysis(String periodType) { |
| | | String normalizedPeriod = normalizePeriodType(periodType); |
| | | DateRange dateRange = resolveDateRange(normalizedPeriod); |
| | | UsageCounter usageCounter = new UsageCounter(); |
| | | List<McpCallResult> mcpCalls = new ArrayList<>(); |
| | | boolean maxRoundsReached = false; |
| | | StringBuilder summaryBuffer = new StringBuilder(); |
| | | int toolCallCount = 0; |
| | | |
| | | try { |
| | | List<Object> tools = filterAllowedTools(mcpToolManager.buildOpenAiTools()); |
| | | if (tools == null || tools.isEmpty()) { |
| | | throw new IllegalStateException("No data analysis MCP tools registered"); |
| | | } |
| | | |
| | | AiPromptTemplate promptTemplate = aiPromptTemplateService.resolvePublished(AiPromptScene.DATA_ANALYSIS.getCode()); |
| | | List<ChatCompletionRequest.Message> messages = buildMessages(promptTemplate, normalizedPeriod, dateRange); |
| | | |
| | | for (int round = 0; round < MAX_TOOL_ROUNDS; round++) { |
| | | ChatCompletionResponse response = llmChatService.chatCompletionOrThrow(messages, TEMPERATURE, MAX_TOKENS, tools); |
| | | ChatCompletionRequest.Message assistantMessage = extractAssistantMessage(response); |
| | | usageCounter.add(response.getUsage()); |
| | | messages.add(assistantMessage); |
| | | appendSummary(summaryBuffer, assistantMessage.getContent()); |
| | | |
| | | List<ChatCompletionRequest.ToolCall> toolCalls = assistantMessage.getTool_calls(); |
| | | if (toolCalls == null || toolCalls.isEmpty()) { |
| | | return buildResult(true, normalizedPeriod, summaryBuffer, toolCallCount, usageCounter, false, mcpCalls); |
| | | } |
| | | |
| | | for (ChatCompletionRequest.ToolCall toolCall : toolCalls) { |
| | | McpCallResult mcpCall = callAnalysisTool(toolCall, mcpCalls); |
| | | toolCallCount++; |
| | | Object toolOutput = parseToolOutput(mcpCall); |
| | | messages.add(buildToolMessage(toolCall, toolOutput)); |
| | | } |
| | | } |
| | | maxRoundsReached = true; |
| | | return buildResult(false, normalizedPeriod, summaryBuffer, toolCallCount, usageCounter, maxRoundsReached, mcpCalls); |
| | | } catch (Exception exception) { |
| | | log.error("Data analysis agent stopped with error", exception); |
| | | appendSummary(summaryBuffer, "数据分析 Agent 执行异常: " + exception.getMessage()); |
| | | return buildResult(false, normalizedPeriod, summaryBuffer, toolCallCount, usageCounter, maxRoundsReached, mcpCalls); |
| | | } |
| | | } |
| | | |
| | | private McpCallResult callAnalysisTool(ChatCompletionRequest.ToolCall toolCall, List<McpCallResult> mcpCalls) { |
| | | String toolName = resolveToolName(toolCall); |
| | | if (!ALLOWED_TOOL_NAMES.contains(toolName)) { |
| | | throw new IllegalArgumentException("Disallowed data analysis MCP tool: " + toolName); |
| | | } |
| | | JSONObject arguments = parseArguments(toolCall); |
| | | long startTimeMillis = System.currentTimeMillis(); |
| | | McpCallResult mcpCall = new McpCallResult(); |
| | | mcpCall.setCallSeq(mcpCalls.size() + 1); |
| | | mcpCall.setToolName(toolName); |
| | | mcpCall.setRequestJson(JSON.toJSONString(arguments == null ? new JSONObject() : arguments)); |
| | | try { |
| | | Object output = mcpToolManager.callTool(toolName, arguments); |
| | | mcpCall.setDurationMs(Math.max(0L, System.currentTimeMillis() - startTimeMillis)); |
| | | mcpCall.setStatus(MCP_STATUS_SUCCESS); |
| | | mcpCall.setResponseJson(JSON.toJSONString(output)); |
| | | mcpCalls.add(mcpCall); |
| | | return mcpCall; |
| | | } catch (Exception exception) { |
| | | mcpCall.setDurationMs(Math.max(0L, System.currentTimeMillis() - startTimeMillis)); |
| | | mcpCall.setStatus(MCP_STATUS_FAILED); |
| | | mcpCall.setErrorMessage(exception.getMessage()); |
| | | mcpCalls.add(mcpCall); |
| | | throw new IllegalStateException("Data analysis MCP tool failed: " + toolName + ", " + exception.getMessage(), exception); |
| | | } |
| | | } |
| | | |
| | | private Object parseToolOutput(McpCallResult mcpCall) { |
| | | if (MCP_STATUS_FAILED.equals(mcpCall.getStatus())) { |
| | | JSONObject err = new JSONObject(); |
| | | err.put("error", mcpCall.getErrorMessage()); |
| | | return err; |
| | | } |
| | | if (mcpCall.getResponseJson() == null || mcpCall.getResponseJson().isEmpty()) { |
| | | return new JSONObject(); |
| | | } |
| | | try { |
| | | return JSON.parse(mcpCall.getResponseJson()); |
| | | } catch (Exception e) { |
| | | return mcpCall.getResponseJson(); |
| | | } |
| | | } |
| | | |
| | | private List<ChatCompletionRequest.Message> buildMessages(AiPromptTemplate promptTemplate, |
| | | String periodType, |
| | | DateRange dateRange) { |
| | | List<ChatCompletionRequest.Message> messages = new ArrayList<>(); |
| | | |
| | | ChatCompletionRequest.Message systemMessage = new ChatCompletionRequest.Message(); |
| | | systemMessage.setRole("system"); |
| | | systemMessage.setContent(promptTemplate == null ? "" : promptTemplate.getContent()); |
| | | messages.add(systemMessage); |
| | | |
| | | ChatCompletionRequest.Message userMessage = new ChatCompletionRequest.Message(); |
| | | userMessage.setRole("user"); |
| | | userMessage.setContent("请分析" + periodLabel(periodType) + "的WCS运营数据。" |
| | | + "时间范围:startTime=" + dateRange.start + ", endTime=" + dateRange.end |
| | | + "。请依次调用所有分析工具获取数据,然后生成完整的分析报告。"); |
| | | messages.add(userMessage); |
| | | return messages; |
| | | } |
| | | |
| | | private String periodLabel(String periodType) { |
| | | switch (periodType) { |
| | | case "TODAY": return "今天"; |
| | | case "YESTERDAY": return "昨天"; |
| | | case "THIS_WEEK": return "本周"; |
| | | case "THIS_MONTH": return "本月"; |
| | | default: return periodType; |
| | | } |
| | | } |
| | | |
| | | private DateRange resolveDateRange(String periodType) { |
| | | LocalDate today = LocalDate.now(); |
| | | switch (periodType) { |
| | | case "TODAY": |
| | | return new DateRange(today.atStartOfDay(), today.plusDays(1).atStartOfDay()); |
| | | case "YESTERDAY": |
| | | return new DateRange(today.minusDays(1).atStartOfDay(), today.atStartOfDay()); |
| | | case "THIS_WEEK": |
| | | LocalDate weekStart = today.with(DayOfWeek.MONDAY); |
| | | return new DateRange(weekStart.atStartOfDay(), today.plusDays(1).atStartOfDay()); |
| | | case "THIS_MONTH": |
| | | LocalDate monthStart = today.withDayOfMonth(1); |
| | | return new DateRange(monthStart.atStartOfDay(), today.plusDays(1).atStartOfDay()); |
| | | default: |
| | | throw new IllegalArgumentException("Unknown period: " + periodType); |
| | | } |
| | | } |
| | | |
| | | private ChatCompletionRequest.Message extractAssistantMessage(ChatCompletionResponse response) { |
| | | if (response == null || response.getChoices() == null || response.getChoices().isEmpty()) { |
| | | throw new IllegalStateException("LLM returned empty response"); |
| | | } |
| | | ChatCompletionRequest.Message message = response.getChoices().get(0).getMessage(); |
| | | if (message == null) { |
| | | throw new IllegalStateException("LLM returned empty message"); |
| | | } |
| | | return message; |
| | | } |
| | | |
| | | private ChatCompletionRequest.Message buildToolMessage(ChatCompletionRequest.ToolCall toolCall, Object toolOutput) { |
| | | ChatCompletionRequest.Message toolMessage = new ChatCompletionRequest.Message(); |
| | | toolMessage.setRole("tool"); |
| | | toolMessage.setTool_call_id(toolCall == null ? null : toolCall.getId()); |
| | | toolMessage.setContent(JSON.toJSONString(toolOutput)); |
| | | return toolMessage; |
| | | } |
| | | |
| | | private String resolveToolName(ChatCompletionRequest.ToolCall toolCall) { |
| | | if (toolCall == null || toolCall.getFunction() == null || toolCall.getFunction().getName() == null |
| | | || toolCall.getFunction().getName().trim().isEmpty()) { |
| | | throw new IllegalArgumentException("missing tool name"); |
| | | } |
| | | return toolCall.getFunction().getName(); |
| | | } |
| | | |
| | | private JSONObject parseArguments(ChatCompletionRequest.ToolCall toolCall) { |
| | | String rawArguments = toolCall == null || toolCall.getFunction() == null |
| | | ? null |
| | | : toolCall.getFunction().getArguments(); |
| | | if (rawArguments == null || rawArguments.trim().isEmpty()) { |
| | | return new JSONObject(); |
| | | } |
| | | try { |
| | | return JSON.parseObject(rawArguments); |
| | | } catch (Exception exception) { |
| | | JSONObject arguments = new JSONObject(); |
| | | arguments.put("_raw", rawArguments); |
| | | return arguments; |
| | | } |
| | | } |
| | | |
| | | private List<Object> filterAllowedTools(List<Object> tools) { |
| | | List<Object> allowedTools = new ArrayList<>(); |
| | | if (tools == null || tools.isEmpty()) { |
| | | return allowedTools; |
| | | } |
| | | for (Object tool : tools) { |
| | | String toolName = resolveOpenAiToolName(tool); |
| | | if (ALLOWED_TOOL_NAMES.contains(toolName)) { |
| | | allowedTools.add(tool); |
| | | } |
| | | } |
| | | return allowedTools; |
| | | } |
| | | |
| | | private String resolveOpenAiToolName(Object tool) { |
| | | if (!(tool instanceof Map<?, ?> toolMap)) { |
| | | return null; |
| | | } |
| | | Object function = toolMap.get("function"); |
| | | if (!(function instanceof Map<?, ?> functionMap)) { |
| | | return null; |
| | | } |
| | | Object name = functionMap.get("name"); |
| | | return name == null ? null : String.valueOf(name); |
| | | } |
| | | |
| | | private DataAnalysisAgentResult buildResult(boolean success, |
| | | String periodType, |
| | | StringBuilder summaryBuffer, |
| | | int toolCallCount, |
| | | UsageCounter usageCounter, |
| | | boolean maxRoundsReached, |
| | | List<McpCallResult> mcpCalls) { |
| | | DataAnalysisAgentResult result = new DataAnalysisAgentResult(); |
| | | result.setSuccess(success); |
| | | result.setPeriodType(periodType); |
| | | result.setTriggerType("agent"); |
| | | result.setToolCallCount(toolCallCount); |
| | | result.setLlmCallCount(usageCounter.getLlmCallCount()); |
| | | result.setPromptTokens(usageCounter.getPromptTokens()); |
| | | result.setCompletionTokens(usageCounter.getCompletionTokens()); |
| | | result.setTotalTokens(usageCounter.getTotalTokens()); |
| | | result.setMaxRoundsReached(maxRoundsReached); |
| | | result.setMcpCalls(mcpCalls != null ? new ArrayList<>(mcpCalls) : new ArrayList<>()); |
| | | |
| | | String summary = summaryBuffer == null ? "" : summaryBuffer.toString().trim(); |
| | | if (toolCallCount <= 0) { |
| | | summary = "数据分析 Agent 未调用任何分析工具,未生成报告。" + (summary.isEmpty() ? "" : "\n" + summary); |
| | | } |
| | | if (maxRoundsReached) { |
| | | summary = summary + "\n数据分析 Agent 达到最大工具调用轮次,已停止。"; |
| | | } |
| | | result.setSummary(summary); |
| | | return result; |
| | | } |
| | | |
| | | private void appendSummary(StringBuilder summaryBuffer, String content) { |
| | | if (summaryBuffer == null || content == null || content.trim().isEmpty()) { |
| | | return; |
| | | } |
| | | if (summaryBuffer.length() > 0) { |
| | | summaryBuffer.append('\n'); |
| | | } |
| | | summaryBuffer.append(content.trim()); |
| | | } |
| | | |
| | | private String normalizePeriodType(String periodType) { |
| | | if (periodType == null || periodType.trim().isEmpty()) { |
| | | return "YESTERDAY"; |
| | | } |
| | | return periodType.trim().toUpperCase(); |
| | | } |
| | | |
| | | private static class DateRange { |
| | | final LocalDateTime start; |
| | | final LocalDateTime end; |
| | | |
| | | DateRange(LocalDateTime start, LocalDateTime end) { |
| | | this.start = start; |
| | | this.end = end; |
| | | } |
| | | } |
| | | |
| | | private static class UsageCounter { |
| | | private long promptTokens; |
| | | private long completionTokens; |
| | | private long totalTokens; |
| | | private int llmCallCount; |
| | | |
| | | void add(ChatCompletionResponse.Usage usage) { |
| | | llmCallCount++; |
| | | if (usage == null) { |
| | | return; |
| | | } |
| | | promptTokens += usage.getPromptTokens() == null ? 0L : usage.getPromptTokens(); |
| | | completionTokens += usage.getCompletionTokens() == null ? 0L : usage.getCompletionTokens(); |
| | | totalTokens += usage.getTotalTokens() == null ? 0L : usage.getTotalTokens(); |
| | | } |
| | | |
| | | long getPromptTokens() { return promptTokens; } |
| | | long getCompletionTokens() { return completionTokens; } |
| | | long getTotalTokens() { return totalTokens; } |
| | | int getLlmCallCount() { return llmCallCount; } |
| | | } |
| | | } |
| New file |
| | |
| | | package com.zy.ai.service.impl; |
| | | |
| | | import com.alibaba.fastjson.JSON; |
| | | import com.zy.ai.entity.AiDataAnalysisReport; |
| | | import com.zy.ai.service.*; |
| | | import com.zy.common.utils.RedisUtil; |
| | | import com.zy.core.enums.RedisKeyType; |
| | | import com.zy.system.entity.OperateLog; |
| | | import com.zy.system.service.ConfigService; |
| | | import com.zy.system.service.OperateLogService; |
| | | import lombok.RequiredArgsConstructor; |
| | | import lombok.extern.slf4j.Slf4j; |
| | | import org.springframework.stereotype.Service; |
| | | |
| | | import java.util.Arrays; |
| | | import java.util.Date; |
| | | import java.util.List; |
| | | import java.util.UUID; |
| | | |
| | | @Slf4j |
| | | @Service("dataAnalysisCoordinatorService") |
| | | @RequiredArgsConstructor |
| | | public class DataAnalysisCoordinatorServiceImpl implements DataAnalysisCoordinatorService { |
| | | |
| | | private static final String CONFIG_ENABLED = "aiDataAnalysisEnabled"; |
| | | private static final String CONFIG_PERIODS = "aiDataAnalysisScheduledPeriods"; |
| | | private static final int RUNNING_LOCK_SECONDS = 30 * 60; |
| | | private static final long SYSTEM_USER_ID = 9527L; |
| | | |
| | | private final ConfigService configService; |
| | | private final DataAnalysisAgentService dataAnalysisAgentService; |
| | | private final AiDataAnalysisReportService aiDataAnalysisReportService; |
| | | private final DataAnalysisFileStorageService dataAnalysisFileStorageService; |
| | | private final DataAnalysisUploadService dataAnalysisUploadService; |
| | | private final RedisUtil redisUtil; |
| | | private final OperateLogService operateLogService; |
| | | |
| | | @Override |
| | | public boolean isEnabled() { |
| | | String value = configService.getConfigValue(CONFIG_ENABLED, "0"); |
| | | return "1".equals(value.trim()); |
| | | } |
| | | |
| | | @Override |
| | | public void setEnabled(boolean enabled) { |
| | | configService.saveConfigValue(CONFIG_ENABLED, enabled ? "1" : "0"); |
| | | configService.refreshSystemConfigCache(); |
| | | } |
| | | |
| | | @Override |
| | | public DataAnalysisCoordinatorResult runAnalysisIfEligible() { |
| | | if (!isEnabled()) { |
| | | return DataAnalysisCoordinatorResult.skipped("disabled"); |
| | | } |
| | | |
| | | String periods = configService.getConfigValue(CONFIG_PERIODS, "YESTERDAY"); |
| | | List<String> periodList = Arrays.stream(periods.split(",")) |
| | | .map(String::trim) |
| | | .filter(s -> !s.isEmpty()) |
| | | .toList(); |
| | | |
| | | if (periodList.isEmpty()) { |
| | | return DataAnalysisCoordinatorResult.skipped("no_configured_periods"); |
| | | } |
| | | |
| | | // Run the first configured period |
| | | String periodType = periodList.get(0); |
| | | return runWithLock("auto", periodType); |
| | | } |
| | | |
| | | @Override |
| | | public DataAnalysisCoordinatorResult runManualAnalysis(String periodType) { |
| | | if (!isEnabled()) { |
| | | return DataAnalysisCoordinatorResult.skipped("disabled"); |
| | | } |
| | | return runWithLock("manual", periodType); |
| | | } |
| | | |
| | | private DataAnalysisCoordinatorResult runWithLock(String triggerType, String periodType) { |
| | | String lockKey = RedisKeyType.AI_DATA_ANALYSIS_RUNNING_LOCK.key; |
| | | String lockToken = UUID.randomUUID().toString(); |
| | | if (!redisUtil.trySetStringIfAbsent(lockKey, lockToken, RUNNING_LOCK_SECONDS)) { |
| | | return DataAnalysisCoordinatorResult.skipped("running_lock_not_acquired"); |
| | | } |
| | | |
| | | Date startTime = new Date(); |
| | | DataAnalysisAgentService.DataAnalysisAgentResult agentResult = null; |
| | | try { |
| | | agentResult = dataAnalysisAgentService.runAnalysis(periodType); |
| | | saveReport(triggerType, periodType, startTime, agentResult); |
| | | safeWriteOperateLog(triggerType, periodType, agentResult); |
| | | return DataAnalysisCoordinatorResult.triggered(agentResult); |
| | | } catch (Exception exception) { |
| | | log.error("Data analysis coordinator failed to run agent", exception); |
| | | agentResult = failedAgentResult(periodType, exception); |
| | | saveReport(triggerType, periodType, startTime, agentResult); |
| | | safeWriteOperateLog(triggerType, periodType, agentResult); |
| | | return DataAnalysisCoordinatorResult.triggered(agentResult); |
| | | } finally { |
| | | redisUtil.compareAndDelete(lockKey, lockToken); |
| | | } |
| | | } |
| | | |
| | | private void saveReport(String triggerType, String periodType, Date startTime, |
| | | DataAnalysisAgentService.DataAnalysisAgentResult agentResult) { |
| | | try { |
| | | AiDataAnalysisReport report = new AiDataAnalysisReport(); |
| | | report.setPeriodType(periodType); |
| | | report.setPeriodStart(resolvePeriodStart(periodType)); |
| | | report.setPeriodEnd(resolvePeriodEnd(periodType)); |
| | | report.setTriggerType(triggerType); |
| | | report.setStatus(Boolean.TRUE.equals(agentResult.getSuccess()) ? "success" : "failed"); |
| | | report.setSummary(agentResult.getSummary()); |
| | | report.setStructuredData(agentResult.getMcpCalls() != null ? JSON.toJSONString(agentResult.getMcpCalls()) : null); |
| | | report.setLlmCallCount(agentResult.getLlmCallCount()); |
| | | report.setPromptTokens(agentResult.getPromptTokens() != null ? agentResult.getPromptTokens().intValue() : 0); |
| | | report.setCompletionTokens(agentResult.getCompletionTokens() != null ? agentResult.getCompletionTokens().intValue() : 0); |
| | | report.setTotalTokens(agentResult.getTotalTokens() != null ? agentResult.getTotalTokens().intValue() : 0); |
| | | report.setCreateTime(startTime); |
| | | report.setFinishTime(new Date()); |
| | | |
| | | // Save to local file |
| | | String filePath = dataAnalysisFileStorageService.saveReport(report); |
| | | report.setLocalFilePath(filePath); |
| | | |
| | | // Save to DB |
| | | aiDataAnalysisReportService.save(report); |
| | | |
| | | // Try upload |
| | | DataAnalysisUploadService.UploadResult uploadResult = dataAnalysisUploadService.upload(report); |
| | | report.setUploadStatus(uploadResult.isSuccess() ? "uploaded" : (uploadResult.isSkipped() ? "skipped" : "failed")); |
| | | aiDataAnalysisReportService.updateById(report); |
| | | } catch (Exception e) { |
| | | log.error("Failed to save data analysis report", e); |
| | | } |
| | | } |
| | | |
| | | private Date resolvePeriodStart(String periodType) { |
| | | // Simplified - the agent resolves the actual range |
| | | return new Date(); |
| | | } |
| | | |
| | | private Date resolvePeriodEnd(String periodType) { |
| | | return new Date(); |
| | | } |
| | | |
| | | private void safeWriteOperateLog(String triggerType, String periodType, |
| | | DataAnalysisAgentService.DataAnalysisAgentResult agentResult) { |
| | | try { |
| | | String memo = "AI数据分析 " + periodType + " " + triggerType |
| | | + " 结果:" + (Boolean.TRUE.equals(agentResult.getSuccess()) ? "成功" : "失败"); |
| | | OperateLog operateLog = new OperateLog(); |
| | | operateLog.setUserId(SYSTEM_USER_ID); |
| | | operateLog.setAction("AI数据分析"); |
| | | operateLog.setRequest(memo); |
| | | operateLog.setCreateTime(new Date()); |
| | | operateLogService.save(operateLog); |
| | | } catch (Exception e) { |
| | | log.warn("Failed to write operate log for data analysis", e); |
| | | } |
| | | } |
| | | |
| | | private DataAnalysisAgentService.DataAnalysisAgentResult failedAgentResult(String periodType, Exception exception) { |
| | | DataAnalysisAgentService.DataAnalysisAgentResult result = new DataAnalysisAgentService.DataAnalysisAgentResult(); |
| | | result.setSuccess(false); |
| | | result.setPeriodType(periodType); |
| | | result.setTriggerType("agent"); |
| | | result.setSummary("数据分析任务执行异常: " + exception.getMessage()); |
| | | result.setToolCallCount(0); |
| | | result.setLlmCallCount(0); |
| | | result.setPromptTokens(0L); |
| | | result.setCompletionTokens(0L); |
| | | result.setTotalTokens(0L); |
| | | result.setMaxRoundsReached(false); |
| | | return result; |
| | | } |
| | | } |
| New file |
| | |
| | | package com.zy.ai.service.impl; |
| | | |
| | | import com.alibaba.fastjson.JSON; |
| | | import com.alibaba.fastjson.serializer.SerializerFeature; |
| | | import com.zy.ai.entity.AiDataAnalysisReport; |
| | | import com.zy.ai.service.DataAnalysisFileStorageService; |
| | | import lombok.extern.slf4j.Slf4j; |
| | | import org.springframework.beans.factory.annotation.Value; |
| | | import org.springframework.stereotype.Service; |
| | | |
| | | import java.io.File; |
| | | import java.io.FileOutputStream; |
| | | import java.io.OutputStreamWriter; |
| | | import java.nio.charset.StandardCharsets; |
| | | import java.text.SimpleDateFormat; |
| | | import java.util.Date; |
| | | import java.util.LinkedHashMap; |
| | | import java.util.Map; |
| | | |
| | | @Slf4j |
| | | @Service("dataAnalysisFileStorageService") |
| | | public class DataAnalysisFileStorageServiceImpl implements DataAnalysisFileStorageService { |
| | | |
| | | @Value("${dataAnalysisStorage.loggingPath:../stock/out/wcs/aiAnalysis}") |
| | | private String basePath; |
| | | |
| | | @Override |
| | | public String saveReport(AiDataAnalysisReport report) { |
| | | try { |
| | | SimpleDateFormat dirFormat = new SimpleDateFormat("yyyyMMdd"); |
| | | SimpleDateFormat fileFormat = new SimpleDateFormat("yyyyMMdd_HHmmss"); |
| | | String dateDir = dirFormat.format(report.getCreateTime()); |
| | | String timestamp = fileFormat.format(report.getCreateTime()); |
| | | |
| | | File dir = new File(basePath, dateDir); |
| | | if (!dir.exists() && !dir.mkdirs()) { |
| | | log.warn("Failed to create analysis storage directory: {}", dir.getAbsolutePath()); |
| | | return null; |
| | | } |
| | | |
| | | String fileName = "analysis_" + report.getPeriodType() + "_" + timestamp + ".json"; |
| | | File file = new File(dir, fileName); |
| | | |
| | | Map<String, Object> content = new LinkedHashMap<>(); |
| | | content.put("periodType", report.getPeriodType()); |
| | | content.put("periodStart", report.getPeriodStart()); |
| | | content.put("periodEnd", report.getPeriodEnd()); |
| | | content.put("triggerType", report.getTriggerType()); |
| | | content.put("status", report.getStatus()); |
| | | content.put("summary", report.getSummary()); |
| | | content.put("structuredData", report.getStructuredData()); |
| | | content.put("llmCallCount", report.getLlmCallCount()); |
| | | content.put("totalTokens", report.getTotalTokens()); |
| | | content.put("createTime", report.getCreateTime()); |
| | | content.put("finishTime", report.getFinishTime()); |
| | | |
| | | try (OutputStreamWriter writer = new OutputStreamWriter( |
| | | new FileOutputStream(file), StandardCharsets.UTF_8)) { |
| | | writer.write(JSON.toJSONString(content, SerializerFeature.PrettyFormat, |
| | | SerializerFeature.WriteDateUseDateFormat)); |
| | | } |
| | | |
| | | String relativePath = dateDir + "/" + fileName; |
| | | log.info("Data analysis report saved to file: {}", relativePath); |
| | | return relativePath; |
| | | } catch (Exception e) { |
| | | log.error("Failed to save data analysis report to file", e); |
| | | return null; |
| | | } |
| | | } |
| | | } |
| New file |
| | |
| | | package com.zy.ai.service.impl; |
| | | |
| | | import com.alibaba.fastjson.JSON; |
| | | import com.zy.ai.entity.AiDataAnalysisReport; |
| | | import com.zy.ai.entity.AiDataAnalysisUploadLog; |
| | | import com.zy.ai.service.AiDataAnalysisUploadLogService; |
| | | import com.zy.ai.service.DataAnalysisUploadService; |
| | | import com.zy.common.utils.HttpHandler; |
| | | import com.zy.system.service.ConfigService; |
| | | import lombok.RequiredArgsConstructor; |
| | | import lombok.extern.slf4j.Slf4j; |
| | | import org.springframework.stereotype.Service; |
| | | |
| | | import java.util.Date; |
| | | import java.util.LinkedHashMap; |
| | | import java.util.Map; |
| | | |
| | | @Slf4j |
| | | @Service("dataAnalysisUploadService") |
| | | @RequiredArgsConstructor |
| | | public class DataAnalysisUploadServiceImpl implements DataAnalysisUploadService { |
| | | |
| | | private static final String CONFIG_UPLOAD_ENABLED = "aiDataAnalysisUploadEnabled"; |
| | | private static final String CONFIG_UPLOAD_URL = "aiDataAnalysisUploadUrl"; |
| | | |
| | | private final ConfigService configService; |
| | | private final AiDataAnalysisUploadLogService aiDataAnalysisUploadLogService; |
| | | |
| | | @Override |
| | | public UploadResult upload(AiDataAnalysisReport report) { |
| | | if (!isUploadEnabled()) { |
| | | return UploadResult.skipped(); |
| | | } |
| | | |
| | | String url = configService.getConfigValue(CONFIG_UPLOAD_URL, ""); |
| | | if (url == null || url.trim().isEmpty()) { |
| | | return UploadResult.skipped(); |
| | | } |
| | | |
| | | Map<String, Object> payload = new LinkedHashMap<>(); |
| | | payload.put("reportId", report.getId()); |
| | | payload.put("periodType", report.getPeriodType()); |
| | | payload.put("periodStart", report.getPeriodStart()); |
| | | payload.put("periodEnd", report.getPeriodEnd()); |
| | | payload.put("triggerType", report.getTriggerType()); |
| | | payload.put("status", report.getStatus()); |
| | | payload.put("summary", report.getSummary()); |
| | | payload.put("structuredData", report.getStructuredData()); |
| | | payload.put("totalTokens", report.getTotalTokens()); |
| | | payload.put("createTime", report.getCreateTime()); |
| | | |
| | | String jsonBody = JSON.toJSONString(payload); |
| | | AiDataAnalysisUploadLog uploadLog = new AiDataAnalysisUploadLog(); |
| | | uploadLog.setReportId(report.getId()); |
| | | uploadLog.setUploadUrl(url); |
| | | uploadLog.setRequestBody(jsonBody); |
| | | uploadLog.setCreateTime(new Date()); |
| | | |
| | | try { |
| | | HttpHandler httpHandler = new HttpHandler.Builder() |
| | | .setUri(url) |
| | | .setJson(jsonBody) |
| | | .setTimeout(30, java.util.concurrent.TimeUnit.SECONDS) |
| | | .build(); |
| | | |
| | | String response = httpHandler.doPost(); |
| | | uploadLog.setHttpStatus(200); |
| | | uploadLog.setResponseBody(response); |
| | | uploadLog.setResult("success"); |
| | | aiDataAnalysisUploadLogService.save(uploadLog); |
| | | log.info("Data analysis report uploaded, reportId={}, url={}", report.getId(), url); |
| | | return UploadResult.success(200, response); |
| | | } catch (Exception e) { |
| | | log.warn("Failed to upload data analysis report, reportId={}, url={}", report.getId(), url, e); |
| | | uploadLog.setResult("failed"); |
| | | uploadLog.setErrorMessage(e.getMessage()); |
| | | aiDataAnalysisUploadLogService.save(uploadLog); |
| | | return UploadResult.failed(null, e.getMessage()); |
| | | } |
| | | } |
| | | |
| | | private boolean isUploadEnabled() { |
| | | String value = configService.getConfigValue(CONFIG_UPLOAD_ENABLED, "0"); |
| | | return "1".equals(value.trim()); |
| | | } |
| | | } |
| | |
| | | public class LlmCallLogServiceImpl extends ServiceImpl<LlmCallLogMapper, LlmCallLog> implements LlmCallLogService { |
| | | |
| | | private volatile boolean disabled = false; |
| | | private volatile long lastRetryTime = 0; |
| | | private static final long RETRY_INTERVAL_MS = 60_000; // 1 分钟后重试 |
| | | |
| | | @Override |
| | | public void saveIgnoreError(LlmCallLog logItem) { |
| | | if (logItem == null || disabled) { |
| | | if (logItem == null) { |
| | | return; |
| | | } |
| | | if (disabled) { |
| | | // 定期重试,防止表后来创建了但 disabled 一直为 true |
| | | long now = System.currentTimeMillis(); |
| | | if (now - lastRetryTime < RETRY_INTERVAL_MS) { |
| | | return; |
| | | } |
| | | lastRetryTime = now; |
| | | log.info("LLM调用日志之前已禁用,尝试重新写入..."); |
| | | } |
| | | try { |
| | | save(logItem); |
| | | if (disabled) { |
| | | disabled = false; |
| | | log.info("LLM调用日志写入成功,已恢复日志记录"); |
| | | } |
| | | } catch (Exception e) { |
| | | String msg = e.getMessage() == null ? "" : e.getMessage(); |
| | | if (msg.contains("doesn't exist") || msg.contains("不存在")) { |
| | | disabled = true; |
| | | log.warn("LLM调用日志表不存在,日志记录已自动关闭,请先执行建表SQL"); |
| | | lastRetryTime = System.currentTimeMillis(); |
| | | log.warn("LLM调用日志表不存在,日志记录已暂停,请先执行建表SQL"); |
| | | return; |
| | | } |
| | | log.warn("写入LLM调用日志失败: {}", msg); |
| New file |
| | |
| | | package com.zy.ai.timer; |
| | | |
| | | import com.zy.ai.service.DataAnalysisCoordinatorService; |
| | | import lombok.extern.slf4j.Slf4j; |
| | | import org.springframework.beans.factory.annotation.Autowired; |
| | | import org.springframework.scheduling.annotation.Scheduled; |
| | | import org.springframework.stereotype.Component; |
| | | |
| | | @Slf4j |
| | | @Component |
| | | public class DataAnalysisScheduler { |
| | | |
| | | @Autowired |
| | | private DataAnalysisCoordinatorService dataAnalysisCoordinatorService; |
| | | |
| | | @Scheduled(cron = "0 0 1 * * ?") |
| | | public void runDailyAnalysis() { |
| | | try { |
| | | DataAnalysisCoordinatorService.DataAnalysisCoordinatorResult result = |
| | | dataAnalysisCoordinatorService.runAnalysisIfEligible(); |
| | | if (Boolean.TRUE.equals(result.getSkipped())) { |
| | | log.debug("Data analysis scheduler skipped, reason={}", result.getReason()); |
| | | return; |
| | | } |
| | | log.info("Data analysis scheduler triggered, success={}", |
| | | result.getAgentResult() == null ? null : result.getAgentResult().getSuccess()); |
| | | } catch (Exception e) { |
| | | log.error("Data analysis scheduler failed", e); |
| | | } |
| | | } |
| | | } |
| | |
| | | "- 不得臆测回滚原因。"); |
| | | return blocks; |
| | | } |
| | | if (scene == AiPromptScene.DATA_ANALYSIS) { |
| | | blocks.put(AiPromptBlockType.BASE_POLICY, |
| | | "你是一名 WCS(仓储控制系统)运营数据分析师,精通自动化立库的任务调度、设备运行和故障分析。\n\n" + |
| | | "你的职责是:基于系统提供的历史运营数据,生成结构化的数据分析报告,帮助运维人员了解仓库运行状况、发现潜在问题并提出优化建议。"); |
| | | blocks.put(AiPromptBlockType.TOOL_POLICY, |
| | | "==================== 可用 MCP 工具 ====================\n\n" + |
| | | "你可以调用以下工具获取聚合统计数据(工具返回 JSON):\n" + |
| | | "- " + localTool("analysis_query_task_throughput") + ":查询任务吞吐量(任务总量、入库/出库/移库数量、平均时长、故障汇总)\n" + |
| | | "- " + localTool("analysis_query_device_fault_summary") + ":查询设备故障汇总(按设备类型统计故障次数和时长)\n" + |
| | | "- " + localTool("analysis_query_device_utilization") + ":查询设备利用率(按设备编号统计任务分配量、平均时长)\n" + |
| | | "- " + localTool("analysis_query_error_logs") + ":查询设备错误日志统计(按设备类型统计错误次数)\n\n" + |
| | | "使用策略:\n" + |
| | | "1)先调用 throughput 和 fault 工具获取总体概况。\n" + |
| | | "2)如有异常指标,再调用 utilization 和 error_logs 深入分析。\n" + |
| | | "3)所有工具都需要传入 startTime 和 endTime 参数。\n" + |
| | | "4)禁止臆测,所有数据必须来自工具返回。"); |
| | | blocks.put(AiPromptBlockType.OUTPUT_CONTRACT, |
| | | "==================== 输出要求 ====================\n\n" + |
| | | "请使用简体中文,按以下结构输出分析报告:\n\n" + |
| | | "## 1. 任务概览\n" + |
| | | "- 任务总量、入库/出库/移库分布\n" + |
| | | "- 平均任务时长、各阶段时长分布\n" + |
| | | "- 与正常水平对比(如无基线数据,说明缺少对比依据)\n\n" + |
| | | "## 2. 设备运行状况\n" + |
| | | "- 各设备类型任务分配情况\n" + |
| | | "- 设备利用率分析(负载是否均衡)\n" + |
| | | "- 异常设备识别(空闲率过高/过低、负载不均等)\n\n" + |
| | | "## 3. 故障分析\n" + |
| | | "- 故障总量和故障率\n" + |
| | | "- 按设备类型分布的故障统计\n" + |
| | | "- 主要故障设备和故障模式\n\n" + |
| | | "## 4. 风险与建议\n" + |
| | | "- 当前存在的主要风险点\n" + |
| | | "- 具体可执行的优化建议(1-5 条)\n" + |
| | | "- 需要关注的设备或流程"); |
| | | blocks.put(AiPromptBlockType.SCENE_PLAYBOOK, |
| | | "==================== 分析流程 ====================\n\n" + |
| | | "Step 1 获取总体数据\n" + |
| | | "- 调用 " + localTool("analysis_query_task_throughput") + " 获取任务吞吐量\n" + |
| | | "- 调用 " + localTool("analysis_query_device_fault_summary") + " 获取故障汇总\n\n" + |
| | | "Step 2 深入分析\n" + |
| | | "- 调用 " + localTool("analysis_query_device_utilization") + " 分析设备负载均衡性\n" + |
| | | "- 调用 " + localTool("analysis_query_error_logs") + " 分析错误分布\n\n" + |
| | | "Step 3 综合分析\n" + |
| | | "- 将各维度数据关联分析\n" + |
| | | "- 识别异常指标和潜在风险\n" + |
| | | "- 提出针对性优化建议\n\n" + |
| | | "Step 4 输出报告\n" + |
| | | "- 按输出要求格式化报告\n" + |
| | | "- 确保所有结论都有数据支撑"); |
| | | return blocks; |
| | | } |
| | | throw new IllegalArgumentException("不支持的 Prompt 场景: " + scene.getCode()); |
| | | } |
| | | |
| | |
| | | import com.baomidou.mybatisplus.core.mapper.BaseMapper; |
| | | import com.zy.asrs.entity.WrkAnalysis; |
| | | import org.apache.ibatis.annotations.Mapper; |
| | | import org.apache.ibatis.annotations.Param; |
| | | import org.springframework.stereotype.Repository; |
| | | |
| | | import java.util.Date; |
| | | import java.util.List; |
| | | import java.util.Map; |
| | | |
| | | @Mapper |
| | | @Repository |
| | | public interface WrkAnalysisMapper extends BaseMapper<WrkAnalysis> { |
| | | |
| | | Map<String, Object> aggregateThroughput(@Param("startTime") Date startTime, @Param("endTime") Date endTime); |
| | | |
| | | List<Map<String, Object>> groupByDevice(@Param("startTime") Date startTime, @Param("endTime") Date endTime); |
| | | } |
| | |
| | | AI_AUTO_TUNE_RUNNING_LOCK("ai_auto_tune_running_lock"), |
| | | AI_AUTO_TUNE_APPLY_LOCK("ai_auto_tune_apply_lock"), |
| | | AI_AUTO_TUNE_LAST_TRIGGER_GUARD("ai_auto_tune_last_trigger_guard"), |
| | | AI_DATA_ANALYSIS_RUNNING_LOCK("ai_data_analysis_running_lock"), |
| | | AI_DATA_ANALYSIS_LAST_TRIGGER_GUARD("ai_data_analysis_last_trigger_guard"), |
| | | PLANNER_SCHEDULE("planner_schedule_"), |
| | | HIGH_PRIVILEGE_GRANT("high_privilege_grant_"), |
| | | ; |
| | |
| | | import com.core.common.R; |
| | | import com.zy.ai.entity.AiAutoTuneJob; |
| | | import com.zy.ai.entity.AiChatSession; |
| | | import com.zy.ai.entity.AiTokenUsage; |
| | | import com.zy.ai.entity.LlmCallLog; |
| | | import com.zy.ai.entity.LlmRouteConfig; |
| | | import com.zy.ai.enums.AiPromptScene; |
| | | import com.zy.ai.mapper.AiChatSessionMapper; |
| | | import com.zy.ai.mapper.AiTokenUsageMapper; |
| | | import com.zy.ai.service.AiAutoTuneJobService; |
| | | import com.zy.ai.service.LlmCallLogService; |
| | | import com.zy.ai.service.LlmRouteConfigService; |
| | |
| | | private AiAutoTuneJobService aiAutoTuneJobService; |
| | | @Autowired |
| | | private AiChatSessionMapper aiChatSessionMapper; |
| | | @Autowired |
| | | private AiTokenUsageMapper aiTokenUsageMapper; |
| | | @Autowired |
| | | private DevicePingFileStorageService devicePingFileStorageService; |
| | | |
| | |
| | | private Map<String, Object> buildAiStats() { |
| | | Map<String, Object> result = new LinkedHashMap<>(); |
| | | |
| | | // 从独立累计表读取 token 统计 |
| | | long tokenTotal = 0L; |
| | | long promptTokenTotal = 0L; |
| | | long completionTokenTotal = 0L; |
| | | long llmCallCountTotal = 0L; |
| | | try { |
| | | AiTokenUsage tokenUsage = aiTokenUsageMapper.selectById(1); |
| | | if (tokenUsage != null) { |
| | | promptTokenTotal = safeCount(tokenUsage.getPromptTokens()); |
| | | completionTokenTotal = safeCount(tokenUsage.getCompletionTokens()); |
| | | tokenTotal = safeCount(tokenUsage.getTotalTokens()); |
| | | llmCallCountTotal = safeCount(tokenUsage.getLlmCallCount()); |
| | | } |
| | | } catch (Exception e) { |
| | | log.warn("dashboard ai token usage load failed: {}", safeMessage(e)); |
| | | } |
| | | |
| | | // 会话统计(保留用于显示会话数和提问轮次) |
| | | long askCount = 0L; |
| | | long sessionCount = 0L; |
| | | long autoTunePromptTokenTotal = 0L; |
| | | long autoTuneCompletionTokenTotal = 0L; |
| | | long autoTuneTokenTotal = 0L; |
| | | try { |
| | | List<AiChatSession> sessions = aiChatSessionMapper.selectList(new QueryWrapper<AiChatSession>() |
| | | .select("id", "sum_prompt_tokens", "sum_completion_tokens", "sum_total_tokens", "ask_count")); |
| | | .select("id", "ask_count")); |
| | | sessionCount = sessions == null ? 0L : sessions.size(); |
| | | if (sessions != null) { |
| | | for (AiChatSession session : sessions) { |
| | | promptTokenTotal += safeCount(session == null ? null : session.getSumPromptTokens()); |
| | | completionTokenTotal += safeCount(session == null ? null : session.getSumCompletionTokens()); |
| | | tokenTotal += safeCount(session == null ? null : session.getSumTotalTokens()); |
| | | askCount += safeCount(session == null ? null : session.getAskCount()); |
| | | } |
| | | } |
| | | } catch (Exception e) { |
| | | log.warn("dashboard ai session stats load failed: {}", safeMessage(e)); |
| | | } |
| | | |
| | | try { |
| | | List<Map<String, Object>> autoTuneRows = aiAutoTuneJobService.listMaps(new QueryWrapper<AiAutoTuneJob>() |
| | | .select("COALESCE(SUM(prompt_tokens), 0) AS prompt_token_total", |
| | | "COALESCE(SUM(completion_tokens), 0) AS completion_token_total", |
| | | "COALESCE(SUM(total_tokens), 0) AS token_total") |
| | | .eq("prompt_scene_code", AiPromptScene.AUTO_TUNE_DISPATCH.getCode())); |
| | | Map<String, Object> autoTuneRow = autoTuneRows == null || autoTuneRows.isEmpty() |
| | | ? Collections.emptyMap() |
| | | : autoTuneRows.get(0); |
| | | autoTunePromptTokenTotal = toLong(autoTuneRow.get("prompt_token_total")); |
| | | autoTuneCompletionTokenTotal = toLong(autoTuneRow.get("completion_token_total")); |
| | | autoTuneTokenTotal = toLong(autoTuneRow.get("token_total")); |
| | | |
| | | // Agent 自动调参不生成 sys_ai_chat_session,会单独落到 sys_ai_auto_tune_job。 |
| | | promptTokenTotal += autoTunePromptTokenTotal; |
| | | completionTokenTotal += autoTuneCompletionTokenTotal; |
| | | tokenTotal += autoTuneTokenTotal; |
| | | } catch (Exception e) { |
| | | log.warn("dashboard ai auto tune token stats load failed: {}", safeMessage(e)); |
| | | } |
| | | |
| | | List<LlmRouteConfig> routes = Collections.emptyList(); |
| | |
| | | overview.put("tokenTotal", tokenTotal); |
| | | overview.put("promptTokenTotal", promptTokenTotal); |
| | | overview.put("completionTokenTotal", completionTokenTotal); |
| | | overview.put("autoTuneTokenTotal", autoTuneTokenTotal); |
| | | overview.put("autoTunePromptTokenTotal", autoTunePromptTokenTotal); |
| | | overview.put("autoTuneCompletionTokenTotal", autoTuneCompletionTokenTotal); |
| | | overview.put("llmCallCountTotal", llmCallCountTotal); |
| | | overview.put("askCount", askCount); |
| | | overview.put("sessionCount", sessionCount); |
| | | overview.put("routeTotal", routeTotal); |
| | |
| | | # 系统版本信息 |
| | | app: |
| | | version: 3.0.1.6 |
| | | version: 3.0.1.7 |
| | | version-type: prd # prd 或 dev |
| | | i18n: |
| | | default-locale: zh-CN |
| New file |
| | |
| | | <?xml version="1.0" encoding="UTF-8"?> |
| | | <!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd"> |
| | | <mapper namespace="com.zy.ai.mapper.AiTokenUsageMapper"> |
| | | |
| | | <resultMap id="BaseResultMap" type="com.zy.ai.entity.AiTokenUsage"> |
| | | <id column="id" property="id" /> |
| | | <result column="prompt_tokens" property="promptTokens" /> |
| | | <result column="completion_tokens" property="completionTokens" /> |
| | | <result column="total_tokens" property="totalTokens" /> |
| | | <result column="llm_call_count" property="llmCallCount" /> |
| | | <result column="update_time" property="updateTime" /> |
| | | </resultMap> |
| | | |
| | | <update id="incrementTokens"> |
| | | UPDATE sys_ai_token_usage |
| | | SET prompt_tokens = prompt_tokens + #{promptTokens}, |
| | | completion_tokens = completion_tokens + #{completionTokens}, |
| | | total_tokens = total_tokens + #{totalTokens}, |
| | | llm_call_count = llm_call_count + #{callCount}, |
| | | update_time = NOW() |
| | | WHERE id = 1 |
| | | </update> |
| | | |
| | | </mapper> |
| | |
| | | <result column="update_time" property="updateTime" /> |
| | | </resultMap> |
| | | |
| | | <select id="aggregateThroughput" resultType="map"> |
| | | SELECT |
| | | COUNT(*) as taskCount, |
| | | SUM(CASE WHEN io_type = 1 THEN 1 ELSE 0 END) as inboundCount, |
| | | SUM(CASE WHEN io_type = 2 THEN 1 ELSE 0 END) as outboundCount, |
| | | SUM(CASE WHEN io_type NOT IN (1, 2) THEN 1 ELSE 0 END) as moveCount, |
| | | ROUND(AVG(total_duration_ms)) as avgTotalDurationMs, |
| | | ROUND(AVG(station_duration_ms)) as avgStationDurationMs, |
| | | ROUND(AVG(crane_duration_ms)) as avgCraneDurationMs, |
| | | SUM(has_fault) as faultTaskCount, |
| | | SUM(fault_count) as totalFaultCount, |
| | | SUM(fault_duration_ms) as totalFaultDurationMs, |
| | | SUM(crn_fault_count) as crnFaultCount, |
| | | SUM(crn_fault_duration_ms) as crnFaultDurationMs, |
| | | SUM(dual_crn_fault_count) as dualCrnFaultCount, |
| | | SUM(dual_crn_fault_duration_ms) as dualCrnFaultDurationMs, |
| | | SUM(rgv_fault_count) as rgvFaultCount, |
| | | SUM(rgv_fault_duration_ms) as rgvFaultDurationMs, |
| | | SUM(station_fault_count) as stationFaultCount, |
| | | SUM(station_fault_duration_ms) as stationFaultDurationMs |
| | | FROM asr_wrk_analysis |
| | | WHERE finish_time >= #{startTime} AND finish_time < #{endTime} |
| | | </select> |
| | | |
| | | <select id="groupByDevice" resultType="map"> |
| | | SELECT |
| | | CASE |
| | | WHEN crn_no IS NOT NULL THEN 'CRN' |
| | | WHEN dual_crn_no IS NOT NULL THEN 'DUAL_CRN' |
| | | WHEN rgv_no IS NOT NULL THEN 'RGV' |
| | | ELSE 'UNKNOWN' |
| | | END as deviceType, |
| | | COALESCE(crn_no, dual_crn_no, rgv_no) as deviceNo, |
| | | COUNT(*) as taskCount, |
| | | ROUND(AVG(total_duration_ms)) as avgDurationMs, |
| | | SUM(has_fault) as faultTaskCount, |
| | | SUM(fault_count) as faultCount, |
| | | SUM(fault_duration_ms) as faultDurationMs |
| | | FROM asr_wrk_analysis |
| | | WHERE finish_time >= #{startTime} AND finish_time < #{endTime} |
| | | GROUP BY deviceType, deviceNo |
| | | ORDER BY taskCount DESC |
| | | </select> |
| | | |
| | | </mapper> |
| New file |
| | |
| | | -- AI数据分析报告 |
| | | CREATE TABLE IF NOT EXISTS sys_ai_data_analysis_report ( |
| | | id BIGINT AUTO_INCREMENT PRIMARY KEY, |
| | | period_type VARCHAR(20) NOT NULL COMMENT 'TODAY/YESTERDAY/THIS_WEEK/THIS_MONTH', |
| | | period_start DATETIME COMMENT '分析周期开始时间', |
| | | period_end DATETIME COMMENT '分析周期结束时间', |
| | | trigger_type VARCHAR(20) NOT NULL COMMENT 'auto/manual', |
| | | status VARCHAR(20) NOT NULL DEFAULT 'pending' COMMENT 'pending/running/success/failed', |
| | | summary TEXT COMMENT 'LLM生成的自然语言分析报告', |
| | | structured_data LONGTEXT COMMENT 'JSON格式的结构化分析数据', |
| | | llm_call_count INT DEFAULT 0, |
| | | prompt_tokens INT DEFAULT 0, |
| | | completion_tokens INT DEFAULT 0, |
| | | total_tokens INT DEFAULT 0, |
| | | error_message VARCHAR(1024), |
| | | local_file_path VARCHAR(512) COMMENT '本地存储文件路径', |
| | | upload_status VARCHAR(20) DEFAULT 'pending' COMMENT 'pending/uploaded/failed/skipped', |
| | | create_time DATETIME NOT NULL, |
| | | finish_time DATETIME, |
| | | INDEX idx_period_type (period_type), |
| | | INDEX idx_trigger_type (trigger_type), |
| | | INDEX idx_create_time (create_time) |
| | | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='AI数据分析报告'; |
| | | |
| | | -- AI数据分析报告上传日志 |
| | | CREATE TABLE IF NOT EXISTS sys_ai_data_analysis_upload_log ( |
| | | id BIGINT AUTO_INCREMENT PRIMARY KEY, |
| | | report_id BIGINT NOT NULL COMMENT '关联报告ID', |
| | | upload_url VARCHAR(512), |
| | | request_body TEXT, |
| | | response_body TEXT, |
| | | http_status INT, |
| | | result VARCHAR(20) NOT NULL DEFAULT 'pending' COMMENT 'success/failed', |
| | | error_message VARCHAR(1024), |
| | | retry_count INT DEFAULT 0, |
| | | create_time DATETIME NOT NULL, |
| | | INDEX idx_report_id (report_id), |
| | | INDEX idx_result (result) |
| | | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='数据分析报告上传日志'; |
| | | |
| | | -- 配置项 |
| | | INSERT INTO sys_config(name, code, value, type, status, select_type) |
| | | SELECT 'AI数据分析功能开关', 'aiDataAnalysisEnabled', '0', 1, 1, 'develop' FROM dual |
| | | WHERE NOT EXISTS (SELECT 1 FROM sys_config WHERE code = 'aiDataAnalysisEnabled'); |
| | | |
| | | INSERT INTO sys_config(name, code, value, type, status, select_type) |
| | | SELECT 'AI数据分析定时周期', 'aiDataAnalysisScheduledPeriods', 'YESTERDAY', 1, 1, 'develop' FROM dual |
| | | WHERE NOT EXISTS (SELECT 1 FROM sys_config WHERE code = 'aiDataAnalysisScheduledPeriods'); |
| | | |
| | | INSERT INTO sys_config(name, code, value, type, status, select_type) |
| | | SELECT 'AI数据分析定时Cron', 'aiDataAnalysisCron', '0 0 1 * * ?', 1, 1, 'develop' FROM dual |
| | | WHERE NOT EXISTS (SELECT 1 FROM sys_config WHERE code = 'aiDataAnalysisCron'); |
| | | |
| | | INSERT INTO sys_config(name, code, value, type, status, select_type) |
| | | SELECT 'AI数据分析上传地址', 'aiDataAnalysisUploadUrl', '', 1, 1, 'develop' FROM dual |
| | | WHERE NOT EXISTS (SELECT 1 FROM sys_config WHERE code = 'aiDataAnalysisUploadUrl'); |
| | | |
| | | INSERT INTO sys_config(name, code, value, type, status, select_type) |
| | | SELECT 'AI数据分析上传开关', 'aiDataAnalysisUploadEnabled', '0', 1, 1, 'develop' FROM dual |
| | | WHERE NOT EXISTS (SELECT 1 FROM sys_config WHERE code = 'aiDataAnalysisUploadEnabled'); |
| | | |
| | | -- 菜单:AI管理 -> AI数据分析 |
| | | -- 执行后请在"角色授权"里给对应角色勾选 AI管理 -> AI数据分析。 |
| | | SET @ai_manage_id := COALESCE( |
| | | ( |
| | | SELECT id |
| | | FROM sys_resource |
| | | WHERE code = 'aiManage' AND level = 1 |
| | | ORDER BY id |
| | | LIMIT 1 |
| | | ), |
| | | ( |
| | | SELECT id |
| | | FROM sys_resource |
| | | WHERE name = 'AI管理' AND level = 1 |
| | | ORDER BY id |
| | | LIMIT 1 |
| | | ) |
| | | ); |
| | | |
| | | INSERT INTO sys_resource(code, name, resource_id, level, sort, status) |
| | | SELECT 'ai/data_analysis.html', 'AI数据分析', @ai_manage_id, 2, 5, 1 |
| | | FROM dual |
| | | WHERE @ai_manage_id IS NOT NULL |
| | | AND NOT EXISTS ( |
| | | SELECT 1 |
| | | FROM sys_resource |
| | | WHERE code = 'ai/data_analysis.html' AND level = 2 |
| | | ); |
| | | |
| | | UPDATE sys_resource |
| | | SET name = 'AI数据分析', |
| | | resource_id = @ai_manage_id, |
| | | level = 2, |
| | | sort = 5, |
| | | status = 1 |
| | | WHERE code = 'ai/data_analysis.html' AND level = 2; |
| | | |
| | | SET @ai_data_analysis_id := ( |
| | | SELECT id |
| | | FROM sys_resource |
| | | WHERE code = 'ai/data_analysis.html' AND level = 2 |
| | | ORDER BY id |
| | | LIMIT 1 |
| | | ); |
| | | |
| | | INSERT INTO sys_resource(code, name, resource_id, level, sort, status) |
| | | SELECT 'ai/data_analysis.html#view', '查看', @ai_data_analysis_id, 3, 1, 1 |
| | | FROM dual |
| | | WHERE @ai_data_analysis_id IS NOT NULL |
| | | AND NOT EXISTS ( |
| | | SELECT 1 |
| | | FROM sys_resource |
| | | WHERE code = 'ai/data_analysis.html#view' AND level = 3 |
| | | ); |
| | | |
| | | UPDATE sys_resource |
| | | SET name = '查看', |
| | | resource_id = @ai_data_analysis_id, |
| | | level = 3, |
| | | sort = 1, |
| | | status = 1 |
| | | WHERE code = 'ai/data_analysis.html#view' AND level = 3; |
| | | |
| | | -- 验证菜单创建结果 |
| | | SELECT id, code, name, resource_id, level, sort, status |
| | | FROM sys_resource |
| | | WHERE code IN ( |
| | | 'ai/data_analysis.html', |
| | | 'ai/data_analysis.html#view' |
| | | ) |
| | | ORDER BY level, sort, id; |
| | | |
| | | -- AI累计Token使用统计(独立存储,不受历史记录删除影响) |
| | | CREATE TABLE IF NOT EXISTS sys_ai_token_usage ( |
| | | id INT PRIMARY KEY DEFAULT 1, |
| | | prompt_tokens BIGINT NOT NULL DEFAULT 0, |
| | | completion_tokens BIGINT NOT NULL DEFAULT 0, |
| | | total_tokens BIGINT NOT NULL DEFAULT 0, |
| | | llm_call_count BIGINT NOT NULL DEFAULT 0, |
| | | update_time DATETIME |
| | | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='AI累计Token使用统计'; |
| | | |
| | | INSERT INTO sys_ai_token_usage (id, prompt_tokens, completion_tokens, total_tokens, llm_call_count, update_time) |
| | | SELECT 1, 0, 0, 0, 0, NOW() FROM dual |
| | | WHERE NOT EXISTS (SELECT 1 FROM sys_ai_token_usage WHERE id = 1); |
| New file |
| | |
| | | <!DOCTYPE html> |
| | | <html lang="zh-CN"> |
| | | <head> |
| | | <meta charset="UTF-8" /> |
| | | <meta name="viewport" content="width=device-width, initial-scale=1.0" /> |
| | | <title>AI数据分析</title> |
| | | <link rel="stylesheet" href="../../static/vue/element/element.css" /> |
| | | <style> |
| | | body { |
| | | margin: 0; |
| | | font-family: "Avenir Next", "PingFang SC", "Microsoft YaHei", sans-serif; |
| | | background: |
| | | radial-gradient(900px 460px at 4% -8%, rgba(36, 113, 92, 0.16), transparent 52%), |
| | | radial-gradient(820px 420px at 106% 0%, rgba(20, 82, 128, 0.14), transparent 54%), |
| | | linear-gradient(180deg, #f4f8fb 0%, #eef4f8 100%); |
| | | color: #223046; |
| | | } |
| | | .console-page { |
| | | max-width: 1680px; |
| | | margin: 16px auto; |
| | | padding: 0 14px 22px; |
| | | } |
| | | .hero { |
| | | border-radius: 18px; |
| | | color: #fff; |
| | | padding: 16px; |
| | | background: |
| | | linear-gradient(135deg, rgba(14, 76, 82, 0.96), rgba(31, 115, 108, 0.92) 48%, rgba(44, 130, 86, 0.94)), |
| | | radial-gradient(460px 180px at 80% 0%, rgba(255, 255, 255, 0.24), transparent 60%); |
| | | box-shadow: 0 14px 34px rgba(26, 76, 91, 0.22); |
| | | } |
| | | .hero-top { |
| | | display: flex; |
| | | align-items: center; |
| | | justify-content: space-between; |
| | | gap: 12px; |
| | | flex-wrap: wrap; |
| | | } |
| | | .hero-title { |
| | | display: flex; |
| | | align-items: center; |
| | | gap: 12px; |
| | | min-width: 280px; |
| | | } |
| | | .hero-title .main { |
| | | font-size: 18px; |
| | | font-weight: 700; |
| | | letter-spacing: 0.2px; |
| | | } |
| | | .hero-title .sub { |
| | | margin-top: 4px; |
| | | font-size: 12px; |
| | | opacity: 0.9; |
| | | } |
| | | .hero-actions { |
| | | display: flex; |
| | | align-items: center; |
| | | justify-content: flex-end; |
| | | gap: 8px; |
| | | flex-wrap: wrap; |
| | | } |
| | | .panel { |
| | | border-radius: 16px; |
| | | border: 1px solid #dfe8f1; |
| | | background: rgba(255, 255, 255, 0.88); |
| | | box-shadow: 0 10px 28px rgba(31, 62, 92, 0.1); |
| | | overflow: hidden; |
| | | margin-top: 12px; |
| | | } |
| | | .panel-head { |
| | | padding: 12px 14px; |
| | | display: flex; |
| | | align-items: center; |
| | | justify-content: space-between; |
| | | gap: 10px; |
| | | border-bottom: 1px solid #edf2f7; |
| | | background: linear-gradient(180deg, #ffffff 0%, #f8fbfd 100%); |
| | | } |
| | | .panel-title { |
| | | font-weight: 700; |
| | | color: #223046; |
| | | } |
| | | .panel-body { |
| | | padding: 12px 14px 14px; |
| | | } |
| | | .status-grid { |
| | | display: grid; |
| | | grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); |
| | | gap: 12px; |
| | | } |
| | | .status-item { |
| | | padding: 12px 14px; |
| | | border-radius: 10px; |
| | | border: 1px solid #e4ebf2; |
| | | background: #f8fbfd; |
| | | } |
| | | .status-item .label { |
| | | font-size: 12px; |
| | | color: #718299; |
| | | margin-bottom: 4px; |
| | | } |
| | | .status-item .value { |
| | | font-size: 14px; |
| | | font-weight: 600; |
| | | color: #223046; |
| | | } |
| | | .status-item .desc { |
| | | font-size: 11px; |
| | | color: #999; |
| | | margin-top: 2px; |
| | | } |
| | | .report-summary { |
| | | margin-top: 12px; |
| | | padding: 14px; |
| | | border-radius: 12px; |
| | | border: 1px solid #e4ebf2; |
| | | background: #f8fbfd; |
| | | } |
| | | .report-summary h3 { |
| | | margin: 0 0 10px 0; |
| | | font-size: 15px; |
| | | color: #223046; |
| | | } |
| | | .report-summary pre { |
| | | white-space: pre-wrap; |
| | | word-break: break-word; |
| | | font-size: 13px; |
| | | line-height: 1.6; |
| | | color: #333; |
| | | margin: 0; |
| | | max-height: 500px; |
| | | overflow-y: auto; |
| | | } |
| | | </style> |
| | | </head> |
| | | <body> |
| | | <div id="app"> |
| | | <div class="console-page"> |
| | | <div class="hero"> |
| | | <div class="hero-top"> |
| | | <div class="hero-title"> |
| | | <span v-html="headerIcon"></span> |
| | | <div> |
| | | <div class="main">AI 数据分析</div> |
| | | <div class="sub">基于 LLM 的 WCS 运营数据分析,支持手动触发和定时自动执行</div> |
| | | </div> |
| | | </div> |
| | | <div class="hero-actions"> |
| | | <span style="font-size:13px;opacity:0.9;">功能开关:</span> |
| | | <el-switch |
| | | v-model="enabled" |
| | | active-text="启用" |
| | | inactive-text="关闭" |
| | | active-color="#13ce66" |
| | | inactive-color="#ff4949" |
| | | :disabled="enabledLoading" |
| | | @change="onEnabledChange"> |
| | | </el-switch> |
| | | </div> |
| | | </div> |
| | | </div> |
| | | |
| | | <div class="panel"> |
| | | <div class="panel-head"> |
| | | <div> |
| | | <div class="panel-title">当前配置状态</div> |
| | | <div style="color:#718299;font-size:12px;margin-top:2px;">开关控制定时分析和手动分析是否执行</div> |
| | | </div> |
| | | <el-button size="mini" @click="loadConfig">刷新</el-button> |
| | | </div> |
| | | <div class="panel-body"> |
| | | <div class="status-grid"> |
| | | <div class="status-item"> |
| | | <div class="label">功能开关</div> |
| | | <div class="value" :style="{color: enabled ? '#67c23a' : '#f56c6c'}"> |
| | | {{ enabled ? '已启用' : '已关闭' }} |
| | | </div> |
| | | <div class="desc">关闭后定时任务和手动触发均不执行</div> |
| | | </div> |
| | | <div class="status-item"> |
| | | <div class="label">定时分析周期</div> |
| | | <div class="value">{{ periodLabel(config.scheduledPeriods || 'YESTERDAY') }}</div> |
| | | <div class="desc">定时任务分析的时间范围</div> |
| | | </div> |
| | | <div class="status-item"> |
| | | <div class="label">定时执行时间</div> |
| | | <div class="value">{{ cronDesc }}</div> |
| | | <div class="desc">Cron: {{ config.cron || '0 0 1 * * ?' }}</div> |
| | | </div> |
| | | <div class="status-item"> |
| | | <div class="label">公网上传</div> |
| | | <div class="value" :style="{color: config.uploadEnabled ? '#67c23a' : '#999'}"> |
| | | {{ config.uploadEnabled ? '已启用' : '未启用' }} |
| | | </div> |
| | | <div class="desc">{{ config.uploadUrl || '未配置上传地址' }}</div> |
| | | </div> |
| | | </div> |
| | | </div> |
| | | </div> |
| | | |
| | | <div class="panel"> |
| | | <div class="panel-head"> |
| | | <div> |
| | | <div class="panel-title">手动分析</div> |
| | | <div style="color:#718299;font-size:12px;margin-top:2px;">点击按钮立即触发指定周期的 AI 数据分析</div> |
| | | </div> |
| | | </div> |
| | | <div class="panel-body"> |
| | | <el-button-group> |
| | | <el-button type="primary" :disabled="!enabled || triggerLoading" :loading="triggerLoading && triggerPeriod==='TODAY'" @click="triggerAnalysis('TODAY')">分析今日</el-button> |
| | | <el-button type="primary" :disabled="!enabled || triggerLoading" :loading="triggerLoading && triggerPeriod==='YESTERDAY'" @click="triggerAnalysis('YESTERDAY')">分析昨日</el-button> |
| | | <el-button type="primary" :disabled="!enabled || triggerLoading" :loading="triggerLoading && triggerPeriod==='THIS_WEEK'" @click="triggerAnalysis('THIS_WEEK')">分析本周</el-button> |
| | | <el-button type="primary" :disabled="!enabled || triggerLoading" :loading="triggerLoading && triggerPeriod==='THIS_MONTH'" @click="triggerAnalysis('THIS_MONTH')">分析本月</el-button> |
| | | </el-button-group> |
| | | <span v-if="!enabled" style="margin-left:12px;color:#f56c6c;font-size:13px;"> |
| | | <i class="el-icon-warning"></i> 功能未启用,请先打开上方开关 |
| | | </span> |
| | | </div> |
| | | </div> |
| | | |
| | | <div class="panel"> |
| | | <div class="panel-head"> |
| | | <div> |
| | | <div class="panel-title">分析报告</div> |
| | | <div style="color:#718299;font-size:12px;margin-top:2px;">最近生成的分析报告</div> |
| | | </div> |
| | | <el-button size="mini" :loading="reportsLoading" @click="loadReports">刷新</el-button> |
| | | </div> |
| | | <div class="panel-body"> |
| | | <el-table :data="reports" v-loading="reportsLoading" stripe size="small" style="width:100%" @row-click="onReportClick"> |
| | | <el-table-column prop="id" label="ID" width="60"></el-table-column> |
| | | <el-table-column prop="periodType" label="周期" min-width="80"> |
| | | <template slot-scope="scope"> |
| | | {{ periodLabel(scope.row.periodType) }} |
| | | </template> |
| | | </el-table-column> |
| | | <el-table-column prop="triggerType" label="触发方式" width="90"> |
| | | <template slot-scope="scope"> |
| | | <el-tag size="mini" :type="scope.row.triggerType==='auto'?'success':'info'"> |
| | | {{ scope.row.triggerType === 'auto' ? '定时' : '手动' }} |
| | | </el-tag> |
| | | </template> |
| | | </el-table-column> |
| | | <el-table-column prop="status" label="状态" width="80"> |
| | | <template slot-scope="scope"> |
| | | <el-tag size="mini" :type="statusType(scope.row.status)">{{ statusLabel(scope.row.status) }}</el-tag> |
| | | </template> |
| | | </el-table-column> |
| | | <el-table-column prop="createTime" label="创建时间" min-width="160"> |
| | | <template slot-scope="scope">{{ formatTime(scope.row.createTime) }}</template> |
| | | </el-table-column> |
| | | <el-table-column prop="llmCallCount" label="LLM调用" width="80"></el-table-column> |
| | | <el-table-column prop="totalTokens" label="Token" width="90"></el-table-column> |
| | | <el-table-column prop="uploadStatus" label="上传" width="80"> |
| | | <template slot-scope="scope"> |
| | | <el-tag size="mini" :type="uploadType(scope.row.uploadStatus)"> |
| | | {{ uploadLabel(scope.row.uploadStatus) }} |
| | | </el-tag> |
| | | </template> |
| | | </el-table-column> |
| | | <el-table-column label="操作" width="80" fixed="right"> |
| | | <template slot-scope="scope"> |
| | | <el-button size="mini" type="text" @click.stop="viewReport(scope.row)">详情</el-button> |
| | | </template> |
| | | </el-table-column> |
| | | </el-table> |
| | | </div> |
| | | </div> |
| | | |
| | | <div class="panel" v-if="selectedReport"> |
| | | <div class="panel-head"> |
| | | <div> |
| | | <div class="panel-title">报告详情 #{{ selectedReport.id }}</div> |
| | | <div style="color:#718299;font-size:12px;margin-top:2px;"> |
| | | {{ periodLabel(selectedReport.periodType) }} · {{ formatTime(selectedReport.createTime) }} |
| | | </div> |
| | | </div> |
| | | <el-button size="mini" @click="selectedReport=null">关闭</el-button> |
| | | </div> |
| | | <div class="panel-body"> |
| | | <div class="report-summary"> |
| | | <h3>分析报告</h3> |
| | | <pre>{{ selectedReport.summary || '暂无报告内容' }}</pre> |
| | | </div> |
| | | </div> |
| | | </div> |
| | | </div> |
| | | </div> |
| | | |
| | | <script type="text/javascript" src="../../static/vue/js/vue.min.js"></script> |
| | | <script type="text/javascript" src="../../static/vue/element/element.js"></script> |
| | | <script type="text/javascript" src="../../static/js/common.js" charset="utf-8"></script> |
| | | <script> |
| | | new Vue({ |
| | | el: '#app', |
| | | data: function() { |
| | | return { |
| | | headerIcon: getAiIconHtml(36, 36), |
| | | baseUrl: baseUrl, |
| | | enabled: false, |
| | | config: {}, |
| | | enabledLoading: false, |
| | | triggerLoading: false, |
| | | triggerPeriod: '', |
| | | reportsLoading: false, |
| | | reports: [], |
| | | selectedReport: null |
| | | }; |
| | | }, |
| | | computed: { |
| | | cronDesc: function() { |
| | | var cron = this.config.cron || '0 0 1 * * ?'; |
| | | if (cron === '0 0 1 * * ?') return '每天凌晨 1:00'; |
| | | if (cron === '0 0 2 * * ?') return '每天凌晨 2:00'; |
| | | if (cron === '0 30 0 * * ?') return '每天 0:30'; |
| | | return cron; |
| | | } |
| | | }, |
| | | mounted: function() { |
| | | this.loadConfig(); |
| | | this.loadReports(); |
| | | }, |
| | | methods: { |
| | | authHeaders: function() { |
| | | return { 'token': localStorage.getItem('token') }; |
| | | }, |
| | | requestJson: function(url, options) { |
| | | var requestOptions = options || {}; |
| | | requestOptions.headers = requestOptions.headers || this.authHeaders(); |
| | | return fetch(url, requestOptions).then(function(response) { |
| | | return response.json(); |
| | | }); |
| | | }, |
| | | loadConfig: function() { |
| | | var self = this; |
| | | this.requestJson(this.baseUrl + '/ai/dataAnalysis/enabled/auth') |
| | | .then(function(res) { |
| | | if (res && res.code === 200 && res.data) { |
| | | self.enabled = res.data.enabled === true; |
| | | self.config = res.data; |
| | | } |
| | | }); |
| | | }, |
| | | onEnabledChange: function(val) { |
| | | var self = this; |
| | | this.enabledLoading = true; |
| | | this.requestJson(this.baseUrl + '/ai/dataAnalysis/enabled/auth?enabled=' + (val ? '1' : '0'), { method: 'POST' }) |
| | | .then(function(res) { |
| | | self.enabledLoading = false; |
| | | if (res && res.code === 200) { |
| | | self.enabled = res.data && res.data.enabled === true; |
| | | self.$message.success(self.enabled ? '已启用数据分析' : '已关闭数据分析'); |
| | | } else { |
| | | self.enabled = !val; |
| | | self.$message.error((res && res.msg) ? res.msg : '操作失败'); |
| | | } |
| | | }) |
| | | .catch(function() { |
| | | self.enabledLoading = false; |
| | | self.enabled = !val; |
| | | self.$message.error('请求失败'); |
| | | }); |
| | | }, |
| | | triggerAnalysis: function(periodType) { |
| | | var self = this; |
| | | this.triggerLoading = true; |
| | | this.triggerPeriod = periodType; |
| | | this.requestJson(this.baseUrl + '/ai/dataAnalysis/trigger/auth?periodType=' + periodType, { method: 'POST' }) |
| | | .then(function(res) { |
| | | self.triggerLoading = false; |
| | | self.triggerPeriod = ''; |
| | | if (res && res.code === 200) { |
| | | var result = res.data; |
| | | if (result && result.skipped) { |
| | | self.$message.warning('已跳过: ' + (result.reason || '未知原因')); |
| | | } else { |
| | | self.$message.success('分析完成'); |
| | | self.loadReports(); |
| | | } |
| | | } else { |
| | | self.$message.error((res && res.msg) ? res.msg : '触发失败'); |
| | | } |
| | | }) |
| | | .catch(function() { |
| | | self.triggerLoading = false; |
| | | self.triggerPeriod = ''; |
| | | self.$message.error('请求失败'); |
| | | }); |
| | | }, |
| | | loadReports: function() { |
| | | var self = this; |
| | | this.reportsLoading = true; |
| | | this.requestJson(this.baseUrl + '/ai/dataAnalysis/reports/auth?limit=20') |
| | | .then(function(res) { |
| | | self.reportsLoading = false; |
| | | if (res && res.code === 200 && Array.isArray(res.data)) { |
| | | self.reports = res.data; |
| | | } |
| | | }) |
| | | .catch(function() { |
| | | self.reportsLoading = false; |
| | | }); |
| | | }, |
| | | viewReport: function(row) { |
| | | var self = this; |
| | | this.requestJson(this.baseUrl + '/ai/dataAnalysis/report/' + row.id + '/auth') |
| | | .then(function(res) { |
| | | if (res && res.code === 200 && res.data) { |
| | | self.selectedReport = res.data; |
| | | } |
| | | }); |
| | | }, |
| | | onReportClick: function(row) { |
| | | this.viewReport(row); |
| | | }, |
| | | periodLabel: function(t) { |
| | | var map = { 'TODAY': '今日', 'YESTERDAY': '昨日', 'THIS_WEEK': '本周', 'THIS_MONTH': '本月' }; |
| | | return map[t] || t; |
| | | }, |
| | | statusType: function(s) { |
| | | if (s === 'success') return 'success'; |
| | | if (s === 'failed') return 'danger'; |
| | | if (s === 'running') return 'warning'; |
| | | return 'info'; |
| | | }, |
| | | statusLabel: function(s) { |
| | | var map = { 'pending': '待执行', 'running': '执行中', 'success': '成功', 'failed': '失败' }; |
| | | return map[s] || s; |
| | | }, |
| | | uploadType: function(s) { |
| | | if (s === 'uploaded') return 'success'; |
| | | if (s === 'failed') return 'danger'; |
| | | return 'info'; |
| | | }, |
| | | uploadLabel: function(s) { |
| | | var map = { 'pending': '待上传', 'uploaded': '已上传', 'failed': '失败', 'skipped': '跳过' }; |
| | | return map[s] || s; |
| | | }, |
| | | formatTime: function(t) { |
| | | if (!t) return '-'; |
| | | var d = new Date(t); |
| | | if (isNaN(d.getTime())) return t; |
| | | var pad = function(n) { return n < 10 ? '0' + n : n; }; |
| | | return d.getFullYear() + '-' + pad(d.getMonth() + 1) + '-' + pad(d.getDate()) |
| | | + ' ' + pad(d.getHours()) + ':' + pad(d.getMinutes()) + ':' + pad(d.getSeconds()); |
| | | } |
| | | } |
| | | }); |
| | | </script> |
| | | </body> |
| | | </html> |
| | |
| | | <div class="summary-card"> |
| | | <div class="label">{{ i18n('dashboard.aiTokenTotalLabel', 'AI 累计 Tokens') }}</div> |
| | | <div class="value">{{ formatNumber(overview.aiTokenTotal) }}</div> |
| | | <div class="desc">{{ i18n('dashboard.aiTokenTotalDesc', '按 AI 会话累计统计') }}</div> |
| | | <div class="desc">{{ i18n('dashboard.aiTokenTotalDesc', '所有 AI 调用累计消耗') }}</div> |
| | | </div> |
| | | <div class="summary-card"> |
| | | <div class="label">{{ i18n('dashboard.aiCallTotalLabel', 'LLM 调用次数') }}</div> |