no message
parent
4be352634c
commit
c0b11a68eb
|
|
@ -2,11 +2,16 @@ package org.cpte.modules.quartz.job;
|
|||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.cpte.modules.constant.enums.PickStatusEnum;
|
||||
import org.cpte.modules.constant.enums.PickTypeEnum;
|
||||
import org.cpte.modules.shipping.entity.Pick;
|
||||
import org.cpte.modules.shipping.entity.PickDetail;
|
||||
import org.cpte.modules.shipping.mapper.PickDetailMapper;
|
||||
import org.cpte.modules.shipping.mapper.PickMapper;
|
||||
import org.cpte.modules.shipping.service.IPickService;
|
||||
import org.cpte.modules.shipping.vo.AllocationPickDetailData;
|
||||
import org.cpte.modules.utils.RedisDistributedLockUtil;
|
||||
import org.jeecg.common.constant.CommonConstant;
|
||||
import org.jeecg.modules.base.service.BaseCommonService;
|
||||
import org.quartz.Job;
|
||||
|
|
@ -23,103 +28,62 @@ import java.util.stream.Collectors;
|
|||
*/
|
||||
@Slf4j
|
||||
public class AllocateJob implements Job {
|
||||
|
||||
@Autowired
|
||||
private PickMapper pickMapper;
|
||||
private PickDetailMapper pickDetailMapper;
|
||||
|
||||
@Autowired
|
||||
private IPickService pickService;
|
||||
|
||||
@Autowired
|
||||
private BaseCommonService baseCommonService;
|
||||
|
||||
// 使用 ConcurrentHashMap 缓存已处理的结果
|
||||
private static final Map<String, Boolean> processedCache = new ConcurrentHashMap<>();
|
||||
|
||||
// 缓存最大大小,防止内存溢出
|
||||
private static final int MAX_CACHE_SIZE = 1000;
|
||||
private RedisDistributedLockUtil redissonLock;
|
||||
|
||||
@Override
|
||||
public void execute(JobExecutionContext jobExecutionContext) {
|
||||
List<Pick> pickList = pickMapper.queryUnallocatedPick();
|
||||
if (CollectionUtils.isEmpty(pickList)) {
|
||||
log.info("没有待分配的出库单");
|
||||
|
||||
String lockKey = "lock:allocate";
|
||||
String lockValue = null;
|
||||
try {
|
||||
lockValue = redissonLock.tryLock(lockKey, 10);
|
||||
if (StringUtils.isEmpty(lockValue)) {
|
||||
throw new RuntimeException("分配明细中,请稍后重试");
|
||||
}
|
||||
//1.获取未拣货完成的出库明细
|
||||
List<PickDetail> pickDetails = pickDetailMapper.queryUnFinishedPickDetail();
|
||||
if (CollectionUtils.isEmpty(pickDetails)) {
|
||||
log.info("没有待分配的出库明细");
|
||||
return;
|
||||
}
|
||||
for (Pick pick : pickList) {
|
||||
//成品、配件
|
||||
if (isCPOrPJType(pick.getOrderType())) {
|
||||
// 判断出库单任务是否正在执行
|
||||
if (isPickExecuting(pick.getId())) {
|
||||
log.info("出库单任务未完成,ID: {}", pick.getId());
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
// 非成品/配件类型且已分配,则跳过
|
||||
if (isAlreadyAssigned(pick.getStatus())) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// 执行分配
|
||||
allocatePick(pick.getId());
|
||||
}
|
||||
}
|
||||
|
||||
// 辅助方法
|
||||
private boolean isCPOrPJType(Integer orderType) {
|
||||
return PickTypeEnum.CP.getValue().equals(orderType)
|
||||
|| PickTypeEnum.PJ.getValue().equals(orderType);
|
||||
}
|
||||
|
||||
private boolean isAlreadyAssigned(Integer status) {
|
||||
return PickStatusEnum.ASSIGNED.getValue().equals(status);
|
||||
}
|
||||
|
||||
private boolean isPickExecuting(Long pickId) {
|
||||
return pickMapper.queryPickIsExecuting(pickId) > 0;
|
||||
}
|
||||
|
||||
private void allocatePick(Long pickId) {
|
||||
// 分配单个出库单
|
||||
long startTime = System.currentTimeMillis();
|
||||
List<String> resultMsg;
|
||||
try {
|
||||
// 每次只分配一个出库单
|
||||
resultMsg = pickService.allocatePick(Collections.singletonList(pickId));
|
||||
if (CollectionUtils.isNotEmpty(resultMsg)) {
|
||||
// 生成缓存键
|
||||
String cacheKey = generateCacheKey(resultMsg);
|
||||
|
||||
// 检查是否已经处理过相同的内容
|
||||
if (!processedCache.containsKey(cacheKey)) {
|
||||
// 新内容,记录日志
|
||||
baseCommonService.addLog("出库任务分配:" + "\n" + cacheKey, CommonConstant.LOG_TYPE_2, CommonConstant.OPERATE_TYPE_1);
|
||||
|
||||
// 添加到缓存
|
||||
processedCache.put(cacheKey, true);
|
||||
|
||||
// 控制缓存大小
|
||||
if (processedCache.size() > MAX_CACHE_SIZE) {
|
||||
processedCache.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
allocate(pickDetails);
|
||||
} catch (Exception e) {
|
||||
resultMsg = List.of(e.getMessage());
|
||||
log.error("分配出库单失败,ID: {}, 错误: {}", pickId, resultMsg);
|
||||
log.error("分配明细异常", e);
|
||||
throw e;
|
||||
} finally {
|
||||
if (StringUtils.isNotEmpty(lockValue)) {
|
||||
redissonLock.unlock(lockKey, lockValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void allocate(List<PickDetail> pickDetails) {
|
||||
for (PickDetail detail : pickDetails) {
|
||||
boolean allocated = allocatePickDetail(detail);
|
||||
if (allocated) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean allocatePickDetail(PickDetail pickDetail) {
|
||||
try {
|
||||
return pickService.allocatePickDetail(pickDetail);
|
||||
} catch (Exception e) {
|
||||
log.error("分配出库明细失败,ID: {}, 错误: {}", pickDetail.getId(), e.getMessage());
|
||||
return false;
|
||||
}
|
||||
long endTime = System.currentTimeMillis();
|
||||
log.info("分配出库明细耗时:{}ms,处理ID:{}", endTime - startTime, pickId);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* 生成缓存键 - 基于分配结果生成唯一标识
|
||||
*/
|
||||
private String generateCacheKey(List<String> resultMsg) {
|
||||
// 对结果进行排序后拼接,确保相同内容生成相同键
|
||||
return resultMsg.stream()
|
||||
.filter(Objects::nonNull)
|
||||
.sorted()
|
||||
.collect(Collectors.joining("\n"));
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,14 +14,11 @@ import org.cpte.modules.shipping.service.IPickDetailService;
|
|||
import org.cpte.modules.shipping.service.IPickService;
|
||||
import org.cpte.modules.shipping.vo.AllocationPickDetailData;
|
||||
import org.cpte.modules.utils.RedisDistributedLockUtil;
|
||||
import org.jeecg.common.constant.CommonConstant;
|
||||
import org.jeecg.modules.base.service.BaseCommonService;
|
||||
import org.quartz.Job;
|
||||
import org.quartz.JobExecutionContext;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -183,24 +183,16 @@ public class AsnServiceImpl extends ServiceImpl<AsnMapper, Asn> implements IAsnS
|
|||
List<AsnDetail> asnDetails = asnDetailMapper.queryByAsnIds(asnIds);
|
||||
//根据出库单ID分组
|
||||
Map<Long, List<AsnDetail>> asnDetailMapGroup = asnDetails.stream().collect(Collectors.groupingBy(AsnDetail::getAsnId));
|
||||
//遍历集合
|
||||
List<Asn> asnToUpdate = new ArrayList<>();
|
||||
List<AsnDetail> asnDetailToUpdate = new ArrayList<>();
|
||||
for (Map.Entry<Long, List<AsnDetail>> entry : asnDetailMapGroup.entrySet()) {
|
||||
Asn asn = asnMap.get(entry.getKey());
|
||||
List<AsnDetail> asnDetailList = entry.getValue();
|
||||
for (AsnDetail asnDetail : asnDetailList) {
|
||||
asnDetail.setStatus(AsnStatusEnum.CLOSED.getValue());
|
||||
asnDetailToUpdate.add(asnDetail);
|
||||
}
|
||||
asnDetailMapper.updateById(asnDetailList);
|
||||
asn.setStatus(AsnStatusEnum.CLOSED.getValue());
|
||||
asnToUpdate.add(asn);
|
||||
}
|
||||
if (CollectionUtils.isNotEmpty(asnDetailToUpdate)) {
|
||||
asnDetailMapper.updateById(asnDetailToUpdate);
|
||||
}
|
||||
if (CollectionUtils.isNotEmpty(asnToUpdate)) {
|
||||
this.updateBatchById(asnToUpdate);
|
||||
this.baseMapper.updateById(asn);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -652,7 +652,6 @@ public class AllocateProcessor {
|
|||
|
||||
log.info("【{}】库位距离评分: {}-移位评分: {}-总得分: {}-移位次数: {}",
|
||||
currPoint.getPointCode(), distanceScore, moveScore, totalScore, movePoints.size());
|
||||
|
||||
return new InventoryScore(inventory, inventory.getStockId(), totalScore, movePoints);
|
||||
}
|
||||
|
||||
|
|
@ -948,7 +947,6 @@ public class AllocateProcessor {
|
|||
public List<Task> buildMoveTask(List<Point> movePoints) {
|
||||
List<Task> moveList = new ArrayList<>();
|
||||
|
||||
Map<Long,Point> pointMap = movePoints.stream().collect(Collectors.toMap(Point::getId, point -> point));
|
||||
//库存
|
||||
List<Long> pointIds = movePoints.stream().map(Point::getId).toList();
|
||||
List<Inventory> moveInventoryList = inventoryMapper.queryByPointIds(pointIds);
|
||||
|
|
|
|||
Loading…
Reference in New Issue