From af5081bc0d0668d526a204076557a171097ddb8d Mon Sep 17 00:00:00 2001
From: zhang <zc857179121@qq.com>
Date: 星期四, 05 二月 2026 14:02:59 +0800
Subject: [PATCH] Merge branch 'refs/heads/rcs_master' into ctu_conveyor

---
 zy-acs-manager/src/main/java/com/zy/acs/manager/core/integrate/wms/TaskReportService.java |  180 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 1 files changed, 180 insertions(+), 0 deletions(-)

diff --git a/zy-acs-manager/src/main/java/com/zy/acs/manager/core/integrate/wms/TaskReportService.java b/zy-acs-manager/src/main/java/com/zy/acs/manager/core/integrate/wms/TaskReportService.java
new file mode 100644
index 0000000..e9df693
--- /dev/null
+++ b/zy-acs-manager/src/main/java/com/zy/acs/manager/core/integrate/wms/TaskReportService.java
@@ -0,0 +1,180 @@
+package com.zy.acs.manager.core.integrate.wms;
+
+import com.alibaba.fastjson.JSON;
+import com.alibaba.fastjson.TypeReference;
+import com.zy.acs.framework.common.Cools;
+import com.zy.acs.framework.common.R;
+import com.zy.acs.framework.common.SnowflakeIdWorker;
+import com.zy.acs.manager.common.config.UplinkProperties;
+import com.zy.acs.manager.common.constant.Constants;
+import com.zy.acs.manager.common.utils.HttpGo;
+import com.zy.acs.manager.core.domain.type.NamespaceType;
+import com.zy.acs.manager.core.integrate.dto.HttpResult;
+import com.zy.acs.manager.core.integrate.dto.TaskUplinkParam;
+import com.zy.acs.manager.core.service.ThreadPoolRegulator;
+import com.zy.acs.manager.manager.entity.Bus;
+import com.zy.acs.manager.manager.entity.IntegrationRecord;
+import com.zy.acs.manager.manager.entity.Task;
+import com.zy.acs.manager.manager.enums.IntegrationDirectionType;
+import com.zy.acs.manager.manager.enums.StatusType;
+import com.zy.acs.manager.manager.enums.TaskStsType;
+import com.zy.acs.manager.manager.enums.TaskUplinkStateType;
+import com.zy.acs.manager.manager.service.BusService;
+import com.zy.acs.manager.manager.service.IntegrationRecordService;
+import com.zy.acs.manager.manager.service.TaskService;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+import javax.annotation.PostConstruct;
+import java.time.Duration;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.CompletableFuture;
+
+@Slf4j
+@Service
+public class TaskReportService {
+
+    @Autowired
+    private UplinkProperties uplinkProperties;
+    @Autowired
+    private TaskService taskService;
+    @Autowired
+    private ThreadPoolRegulator threadPoolRegulator;
+    @Autowired
+    private BusService busService;
+    @Autowired
+    private SnowflakeIdWorker snowflakeIdWorker;
+    @Autowired
+    private IntegrationRecordService integrationRecordService;
+
+    private HttpGo http;
+
+    @PostConstruct
+    public void init() {
+        int timeoutSeconds = uplinkProperties.getTimeout() / 1000;
+        this.http = HttpGo.builder()
+                .connectTimeout(Duration.ofSeconds(timeoutSeconds))
+                .readTimeout(Duration.ofSeconds(timeoutSeconds))
+//                .defaultHeader("User-Agent", "HttpGo/1.0")
+//                .trustAllSsl(true) // ONLY if you really need it (self-signed internal)
+                .build();
+    }
+
+    public boolean reportFinished(Task task) {
+        if (Cools.isEmpty(task)) {
+            return false;
+        }
+        if (!uplinkProperties.getEnabled()) {
+            return false;
+        }
+        if (!task.getTaskSts().equals(TaskStsType.COMPLETE.val())) {
+            return false;
+        }
+        TaskUplinkStateType uplinkStateType = TaskUplinkStateType.of(task.getUplinkSts());
+        if (!uplinkStateType.equals(TaskUplinkStateType.PENDING)
+                && !uplinkStateType.equals(TaskUplinkStateType.SENDING)
+                && !uplinkStateType.equals(TaskUplinkStateType.FAILED)
+        ) {
+            return false;
+        }
+        Date now = new Date();
+        // block
+//        Future<R> future = threadPoolRegulator.getInstance().submit(() -> {
+//            mapDataDispatcher.modifyDynamicMatrix(null, null, param.getAgvNo(), true);
+//            return success();
+//        });
+//        System.out.println(JSON.toJSONString(future.get()));
+
+        // non-block
+        CompletableFuture<?> completableFuture = CompletableFuture.supplyAsync(() -> {
+//            mapDataDispatcher.modifyDynamicMatrix(null, null, param.getAgvNo(), true);
+//            avoidWaveCalculator.calcDynamicNodeByVehicle(agv, null);
+            return R.ok();
+        }, threadPoolRegulator.getInstance());
+
+        // url
+        String url = this.http.buildUrl(uplinkProperties.getHost(), uplinkProperties.getPort(), uplinkProperties.getUrl());
+        // headers
+        Map<String, String> headers = new HashMap<>();
+        // params
+        TaskUplinkParam param = new TaskUplinkParam();
+        if (null != task.getBusId()) {
+            Bus bus = busService.getById(task.getBusId());
+            param.setBatchNo(bus.getBusNo());
+        }
+        param.setTaskNo(task.getSeqNum());
+        param.setTimestamp(null == task.getEndTime() ? System.currentTimeMillis() : task.getEndTime().getTime());
+
+        IntegrationRecord integrationRecord = new IntegrationRecord(
+                String.valueOf(snowflakeIdWorker.nextId()).substring(3),    // 缂栧彿
+                NamespaceType.RCS_TASK_REPORT.name,    // 鍚嶇О绌洪棿
+                uplinkProperties.getUrl(),    // 鎺ュ彛鍦板潃
+                null,    // 骞冲彴瀵嗛挜
+                Constants.RCS,    // 璋冪敤鏂规爣璇�
+                IntegrationDirectionType.OUTBOUND.value,    // 鏂瑰悜[闈炵┖]
+                String.valueOf(now.getTime()),    // 鏃堕棿鎴�
+                uplinkProperties.getHost(),    // 瀹㈡埛绔疘P
+                JSON.toJSONString(param),    // 璇锋眰鍐呭
+                null,    // 鍝嶅簲鍐呭
+                null,    // 寮傚父鍐呭
+                0,    // 缁撴灉
+                null,    // 鑰楁椂
+                StatusType.ENABLE.val,    // 鐘舵��
+                now,    // 娣诲姞鏃堕棿[闈炵┖]
+                now,    // 淇敼鏃堕棿[闈炵┖]
+                null    // 澶囨敞
+        );
+        HttpResult<?> result;
+        try {
+            result = postForResult(url, headers, param);
+            integrationRecord.setResponse(JSON.toJSONString(result));
+
+            Integer code = result.getCode();
+            if (null == code || 200 != code) {
+                return false;
+            }
+//            Object data = result.getData();
+//            if (Cools.isEmpty(data)) {
+//                return false;
+//            }
+
+            integrationRecord.setResult(1);
+        } catch (Exception e) {
+            log.error("Uplink report failed, taskId={}",
+                    JSON.toJSONString(task),
+                    e);
+            integrationRecord.setErr(e.getMessage());
+            return false;
+        } finally {
+            integrationRecord.setCostMs((int) (System.currentTimeMillis() - now.getTime()));
+            integrationRecordService.syncRecord(integrationRecord);
+        }
+        return true;
+    }
+
+    private HttpResult<?> postForResult(String url, Map<String, String> headers, TaskUplinkParam param) throws Exception {
+        String json = JSON.toJSONString(param);
+        HttpGo.HttpResponse response = this.http.postJson(url, headers, json);
+
+        int status = response.statusCode();
+        if (status != 200) {
+            throw new RuntimeException("Uplink HTTP error: status=" + status + ", body=" + response.body());
+        }
+
+        String body = response.body();
+        if (Cools.isEmpty(body)) {
+            throw new RuntimeException("Uplink empty response body.");
+        }
+
+        HttpResult<?> result = JSON.parseObject(body, new TypeReference<HttpResult<?>>() {});
+        if (result == null) {
+            throw new RuntimeException("Uplink parse HttpResult failed: body=" + body);
+        }
+        return result;
+    }
+
+
+}

--
Gitblit v1.9.1