package com.zy.asrs.importexcle; import com.alibaba.excel.context.AnalysisContext; import com.alibaba.excel.event.AnalysisEventListener; import com.alibaba.excel.exception.ExcelAnalysisException; import com.alibaba.fastjson.JSON; import com.core.common.DateUtils; import com.core.common.SnowflakeIdWorker; import com.zy.asrs.entity.DocType; import com.zy.asrs.entity.Order; import com.zy.asrs.entity.OrderDetl; import com.zy.asrs.mapper.OrderDetlMapper; import com.zy.asrs.service.DocTypeService; import com.zy.asrs.service.OrderService; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import java.util.ArrayList; import java.util.Date; import java.util.List; /** * @author pang.jiabao * @description 导入订单监听器 * @createDate 2024/9/2 9:56 */ @Slf4j public class ImportOrderListener extends AnalysisEventListener { /** * 每隔1000条存储数据库,实际使用中可以3000条,然后清理list ,方便内存回收 */ private static final int BATCH_COUNT = 400; private int count = 0; private String orderNo; private long orderId; private long docTypeId; List list = new ArrayList<>(); /** * 假设这个是一个DAO,当然有业务逻辑这个也可以是一个service。当然如果不用存储这个对象没用。 */ private final OrderService orderService; private final OrderDetlMapper orderDetlMapper; private final DocTypeService docTypeService; private final SnowflakeIdWorker snowflakeIdWorker; private final Long userId; /** * 如果使用了spring,请使用这个构造方法。每次创建Listener的时候需要把spring管理的类传进来 */ public ImportOrderListener(OrderService orderService, OrderDetlMapper orderDetlMapper,DocTypeService docTypeService, SnowflakeIdWorker snowflakeIdWorker, Long userId) { this.orderService = orderService; this.orderDetlMapper = orderDetlMapper; this.docTypeService = docTypeService; this.snowflakeIdWorker = snowflakeIdWorker; this.userId = userId; } /** * 这个每一条数据解析都会来调用 */ @SneakyThrows @Override public void invoke(ImportOrderDto data, AnalysisContext context) { log.info("解析到第 {} 条数据:{}", ++count, JSON.toJSONString(data)); list.add(data); if (context.getCurrentRowNum() == 3) { String time = DateUtils.convert(new Date(),DateUtils.yyyyMMddHHmmss_F); Order order2 = orderService.selectByNo(list.get(0).getColumn2()); if(order2 != null) { throw new ExcelAnalysisException("单据已存在!"); } DocType docType = docTypeService.selectOrAdd(list.get(1).getColumn2(), Boolean.FALSE); if (docType == null) { throw new ExcelAnalysisException("单据类型错误:" + list.get(1).getColumn2()); } orderNo = list.get(0).getColumn2(); docTypeId = docType.getDocId(); Order order = new Order(); order.setUuid(String.valueOf(snowflakeIdWorker.nextId())); order.setOrderNo(orderNo); order.setOrderTime(time); order.setDocType(docType.getDocId()); order.setSettle(1L); order.setStatus(1); order.setCreateBy(userId); order.setCreateTime(new Date()); orderService.insert(order); Order order1 = orderService.selectByNo(orderNo); orderId = order1.getId(); list.clear(); return; } // 达到BATCH_COUNT了,需要去存储一次数据库,防止数据几万条数据在内存,容易OOM if (list.size() >= BATCH_COUNT) { saveData(); // 存储完成清理 list list.clear(); } } /** * 所有数据解析完成了 都会来调用 */ @Override public void doAfterAllAnalysed(AnalysisContext context) { // 这里也要保存数据,确保最后遗留的数据也存储到数据库 if (list.size() > 0) { saveData(); } log.info("所有数据解析完成!"); } /** * 加上存储数据库 */ private void saveData() { log.info("{}条数据,开始存储数据库!", list.size()); log.info("单据号:{},数据:{},",orderNo, JSON.toJSONString(list)); List orderDetlList = new ArrayList<>(); list.forEach(importOrderDto -> { OrderDetl orderDetl = new OrderDetl(); if (docTypeId == 24) { orderDetl.setSpecs(String.format("%07d",Integer.parseInt(importOrderDto.getColumn1()))); // 源库位 if (importOrderDto.getColumn2() != null) { orderDetl.setModel(String.format("%07d",Integer.parseInt(importOrderDto.getColumn2()))); // 目标库位 } if (importOrderDto.getColumn3() != null) { orderDetl.setBeBatch(Integer.parseInt(importOrderDto.getColumn3())); //巷道 } } else { orderDetl.setBrand(importOrderDto.getColumn1()); } orderDetl.setBatch(""); orderDetl.setOrderId(orderId); orderDetl.setOrderNo(orderNo); orderDetl.setCreateBy(9527L); orderDetl.setCreateTime(new Date()); orderDetl.setUpdateBy(9527L); orderDetl.setUpdateTime(new Date()); orderDetl.setStatus(1); orderDetl.setQty(0.0D); orderDetl.setAnfme(1.0); orderDetlList.add(orderDetl); }); orderDetlMapper.batchDetls(orderDetlList); log.info("存储数据库成功!"); } /** *解析出现错误会进入该方法 具体看源代码或文档 */ @Override public void onException(Exception exception, AnalysisContext context) throws Exception { log.error("处理异常:" + exception.getMessage()); throw exception; } }