Commit 51643d4a by 刘鑫

Merge branch 'develop' of https://gitlab.dituhui.com/bsh/project/project into develop

2 parents f1b13ca2 5be97c31
Showing with 443 additions and 43 deletions
......@@ -214,6 +214,11 @@ public class BatchServiceImpl implements BatchService {
} else {
batchNo = optional.get().getBatchNo();
batchDay = optional.get().getBatchDate();
// 执行数据库操作
String sqlInsert = "update dispatch_batch set" +
" group_id=?, engineer_num=0, order_num=0, start_time=?, end_time=null,status='RUNNING'" +
" where team_id=? and batch_no=?";
jdbcTemplate.update(sqlInsert, groupId, LocalDateTime.now(), teamId, batchNo);
}
log.info("清理原批次数据, teamId:{}, day:{}, batchNo:{}", teamId, batchDay, batchNo);
......@@ -225,40 +230,40 @@ public class BatchServiceImpl implements BatchService {
//dispatchOrderRepository.deleteAllInBatch(dispatchOrderRepository.findByTeamIdAndBatchNo(teamId,batchNo));
log.info("写入新批次技术员、工单数据, teamId:{}, day:{}, batchNo:{}", teamId, batchDay, batchNo);
String sqlEngineer = "INSERT INTO dispatch_engineer (group_id, team_id, batch_no, engineer_code, engineer_name, x, y, max_num, max_minute, max_distance, vehicle_type)\n"
+ "SELECT ?, o.team_id,?,o.engineer_code, a.name , b.x, b.y , max_num, max_minute, max_distance, b.vehicle FROM `org_team_engineer` o\n"
String sqlEngineer = "INSERT INTO dispatch_engineer (group_id, team_id, batch_no, engineer_code, engineer_name, x, y, max_num, max_minute, max_distance, vehicle_type)\n"
+ "SELECT ?, o.team_id,?,o.engineer_code, a.name , b.x, b.y , max_num, max_minute, max_distance, b.vehicle FROM `org_team_engineer` o\n"
+ " join engineer_info a on o.engineer_code=a.engineer_code\n"
+ " left join engineer_business b on a.engineer_code = b.engineer_code\n"
+ " WHERE o.team_id=? AND o.`status`=1\n"
//+ " AND b.x IS NOT NULL AND b.x !=''"
+ " WHERE o.team_id=? AND o.`status`=1\n"
//+ " AND b.x IS NOT NULL AND b.x !=''"
+ " order by a.engineer_code asc";
int engCount = jdbcTemplate.update(sqlEngineer, groupId, batchNo, teamId);
int engCount = jdbcTemplate.update(sqlEngineer, groupId, batchNo, teamId);
// 未派过的工单(已派过PRE状态还可以再次派)
String sqlOrder = "INSERT INTO dispatch_order (group_id, batch_no, team_id, order_id , dt, x, y, \n"
+ " expect_time_begin, expect_time_end, tags, priority , skills , take_time, status )\n"
+ " SELECT ?, ?, a.org_team_id , a.order_id, ?, a.x, a.y , \n"
+ " a.expect_time_begin, a.expect_time_end, a.tags, a.priority , \n"
+ " CONCAT(a.brand, '-', a.type, '-', a.skill) skills , a.take_time , a.appointment_status\n"
+ " FROM order_info a \n" + " WHERE a.org_team_id=? AND a.dt = ? AND bean_status='OPEN'\n"
+ " AND appointment_method LIKE 'AUTO%' AND a.appointment_status IN ('INIT', 'PRE')\n"
+ " AND order_status ='NORMAL' AND service_status='INIT'\n"
+ " ORDER BY a.expect_time_begin ASC \n";
int orderCount = jdbcTemplate.update(sqlOrder, groupId, batchNo, batchDay, teamId, batchDay);
String sqlOrder = "INSERT INTO dispatch_order (group_id, batch_no, team_id, order_id , dt, x, y, \n"
+ " expect_time_begin, expect_time_end, tags, priority , skills , take_time, status )\n"
+ " SELECT ?, ?, a.org_team_id , a.order_id, ?, a.x, a.y , \n"
+ " a.expect_time_begin, a.expect_time_end, a.tags, a.priority , \n"
+ " CONCAT(a.brand, '-', a.type, '-', a.skill) skills , a.take_time , a.appointment_status\n"
+ " FROM order_info a \n" + " WHERE a.org_team_id=? AND a.dt = ? AND bean_status='OPEN'\n"
+ " AND appointment_method LIKE 'AUTO%' AND a.appointment_status IN ('INIT', 'PRE')\n"
+ " AND order_status ='NORMAL' AND service_status='INIT'\n"
+ " ORDER BY a.expect_time_begin ASC \n";
int orderCount = jdbcTemplate.update(sqlOrder, groupId, batchNo, batchDay, teamId, batchDay);
// confirm的要做预占用,所以也加入进来
String sqlOrderConfirm = "INSERT INTO dispatch_order (group_id, batch_no, team_id, order_id , dt, x, y, \n" +
" expect_time_begin, expect_time_end, tags, priority , skills , take_time, status, engineer_code, time_begin, time_end )\n" +
" select ?, ?, a.org_team_id , a.order_id, a.dt, a.x, a.y , \n" +
" a.expect_time_begin, a.expect_time_end, a.tags, a.priority , \n" +
" concat(a.brand, '-', a.type, '-', a.skill) skills , a.take_time, a.appointment_status, \n" +
" a.engineer_code, a.plan_start_time, a.plan_end_time \n" +
" from order_info a \n" +
" where a.org_team_id=? and a.dt = ? and bean_status='OPEN'\n" +
" and appointment_method like 'AUTO%' and a.appointment_status in ('CONFIRM')\n" +
" and order_status ='NORMAL' and service_status='INIT'\n" +
" order by a.expect_time_begin asc ";
int orderConfirmCount = jdbcTemplate.update(sqlOrderConfirm, groupId, batchNo, teamId, batchDay);
String sqlOrderConfirm = "INSERT INTO dispatch_order (group_id, batch_no, team_id, order_id , dt, x, y, \n" +
" expect_time_begin, expect_time_end, tags, priority , skills , take_time, status, engineer_code, time_begin, time_end )\n" +
" select ?, ?, a.org_team_id , a.order_id, a.dt, a.x, a.y , \n" +
" a.expect_time_begin, a.expect_time_end, a.tags, a.priority , \n" +
" concat(a.brand, '-', a.type, '-', a.skill) skills , a.take_time, a.appointment_status, \n" +
" a.engineer_code, a.plan_start_time, a.plan_end_time \n" +
" from order_info a \n" +
" where a.org_team_id=? and a.dt = ? and bean_status='OPEN'\n" +
" and appointment_method like 'AUTO%' and a.appointment_status in ('CONFIRM')\n" +
" and order_status ='NORMAL' and service_status='INIT'\n" +
" order by a.expect_time_begin asc ";
int orderConfirmCount = jdbcTemplate.update(sqlOrderConfirm, groupId, batchNo, teamId, batchDay);
log.info("准备批次数据 engCount:{}, orderCount:{}, orderConfirmCount:{}", engCount, orderCount, orderConfirmCount);
......
......@@ -60,12 +60,14 @@ spring:
#调度实例失效的检查时间间隔
clusterCheckinInterval: 10000
useProperties: false
#设置调度引擎对触发器超时的忍耐时间 (单位毫秒),20分钟
misfireThreshold: 1200000
threadPool:
class: org.quartz.simpl.SimpleThreadPool
# 指定在线程池里面创建的线程是否是守护线程
makeThreadsDaemons: false
#指定线程数,至少为1(无默认值,一般设置为1-100)
threadCount: 5
threadCount: 10
#设置线程的优先级(最大为java.lang.Thread.MAX_PRIORITY 10,最小为Thread.MIN_PRIORITY 1,默认为5)
threadPriority: 5
threadsInheritContextClassLoaderOfInitializingThread: true
......
......@@ -142,6 +142,12 @@
<version>3.17</version>
</dependency>
<!-- quartz依赖 -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-quartz</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
......
......@@ -42,7 +42,8 @@ public class MsgController {
request.setUserId(userId);
if ("isRead".equalsIgnoreCase(request.getSort())) {
request.setSort("is_read");
} if ("type".equalsIgnoreCase(request.getSort())) {
}
if ("type".equalsIgnoreCase(request.getSort())) {
request.setSort("type");
} else {
request.setSort("create_time");
......@@ -108,4 +109,17 @@ public class MsgController {
return Result.success(true);
}
/**
* 获取未读消息数量
*
* @param userId 用户 ID
* @return {@link Result }<{@link Integer }>
* @author RenPing
* @date 2023/11/07
*/
@GetMapping("/msg/getUnReadNum")
public Result<Integer> getUnReadNum(@RequestHeader(name = "userId", required = true) String userId) {
return Result.success(msgService.getUnReadNum(userId));
}
}
......@@ -38,13 +38,13 @@ public interface MsgDao extends JpaRepository<MsgEntity, Integer> {
" and IF(:#{#req.groupId} is not null, tt.group_id=:#{#req.groupId}, 1=1)" +
"group by tt.cluster_id,tt.branch_id,tt.group_id" +
") t"
, nativeQuery = true // 开启原生sql
, nativeQuery = true
)
Page<Map<String, Objects>> getGroupList(@Param("req") MsgQuery req, Pageable pageable);
@Query(value = "select t.* from (select tt.*,r.is_read," +
" (select group_concat(u.nickname) from zzz_msg_receiver rr join sys_user u on u.id=rr.user_id where rr.msg_id=tt.id) receivers"+
" (select group_concat(u.nickname) from zzz_msg_receiver rr join sys_user u on u.id=rr.user_id where rr.msg_id=tt.id) receivers" +
" from zzz_msg tt join zzz_msg_receiver r on r.deleted=0 and r.msg_id=tt.id and r.user_id=:#{#req.userId}" +
" where IF(:#{#msgGroupResp.clusterId} is not null, tt.cluster_id=:#{#msgGroupResp.clusterId}, tt.cluster_id is null)" +
" and IF(:#{#msgGroupResp.branchId} is not null, tt.branch_id=:#{#msgGroupResp.branchId}, tt.branch_id is null)" +
......@@ -53,7 +53,7 @@ public interface MsgDao extends JpaRepository<MsgEntity, Integer> {
" and IF(:#{#req.endDate} is not null, tt.create_time<:#{#req.endDate}, 1=1)" +
" and IF(:#{#req.keyWord} is not null, tt.content like concat('%',:#{#req.keyWord},'%'), 1=1)" +
") t"
, nativeQuery = true // 开启原生sql
, nativeQuery = true
)
List<Map<String, Objects>> getList(@Param("req") MsgQuery req
, @Param("msgGroupResp") MsgGroupResp msgGroupResp, Pageable pageable);
......@@ -72,13 +72,18 @@ public interface MsgDao extends JpaRepository<MsgEntity, Integer> {
" from sys_user u" +
" join sys_user_org uo on u.id=uo.user_id" +
" join sys_user_role ur on ur.user_id=u.id" +
" join sys_role r on r.id=ur.role_id and r.name='分站派工'" +
" join sys_role r on r.id=ur.role_id" +
" and IF(:groupId is not null, ((uo.org_id=:groupId and uo.org_level=2) or (uo.org_id=:branchId and uo.org_level=1) or (uo.org_id=:clusterId and uo.org_level=0)), 1=1)" +
" and IF(:groupId is null and :branchId is not null, ((uo.org_id=:branchId and uo.org_level=1) or (uo.org_id=:clusterId and uo.org_level=0)), 1=1)" +
" and IF(:groupId is null and :branchId is null and :clusterId is not null, uo.org_id=:clusterId and uo.org_level=0, 1=1)"
, nativeQuery = true // 开启原生sql
, nativeQuery = true
)
List<String> getDispatchUserList(@Param("clusterId") String clusterId,
@Param("branchId") String branchId,
@Param("groupId") String groupId);
@Query(value = "select count(distinct tt.id)" +
" from zzz_msg tt join zzz_msg_receiver r on r.deleted=0 and r.msg_id=tt.id and r.is_read=0 and r.user_id=?1"
, nativeQuery = true)
Integer getUnReadNum(String userId);
}
package com.dituhui.pea.order.quartz;
import com.dituhui.pea.order.common.DateUtils;
import com.dituhui.pea.order.scheduler.InitEngineerCapacityScheduler;
import lombok.extern.slf4j.Slf4j;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.quartz.JobKey;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.quartz.QuartzJobBean;
import org.springframework.stereotype.Component;
import java.time.LocalDate;
/**
* 自动派工任务
*
* @author RenPing
* @date 2023/11/02
*/
@Component
@Slf4j
public class InitEngineerCapacityJob extends QuartzJobBean {
public static final String INIT_ENGINEER_JOB_PREFIX = "BOXI_INIT_ENGINEER_CAPACITY_";
@Value("${scheduler.init-engineer-capacity.day-offset-begin}")
private int dayOffsetBegin;
@Value("${scheduler.init-engineer-capacity.day-offset-end}")
private int dayOffsetEnd;
@Autowired
private InitEngineerCapacityScheduler initEngineerCapacityScheduler;
@Override
protected void executeInternal(JobExecutionContext jobExecutionContext) throws JobExecutionException {
try {
JobKey jobKey = jobExecutionContext.getJobDetail().getKey();
String name = jobKey.getName();
String engineerCode = name.substring(INIT_ENGINEER_JOB_PREFIX.length());
long start = System.currentTimeMillis();
log.info(">>> 初始化开始,工程师(engineerCode:{})的容量将根据日历表的记录进行计算设置", engineerCode);
String bdate = DateUtils.formatDate(LocalDate.now().plusDays(dayOffsetBegin));
String edate = DateUtils.formatDate(LocalDate.now().plusDays(dayOffsetEnd));
initEngineerCapacityScheduler.initOneEngineerByDays(bdate, edate, engineerCode);
long end = System.currentTimeMillis();
log.info(">>> 初始化结束,工程师(engineerCode:{})的容量,耗时:{}", engineerCode, end - start);
} catch (Exception e) {
log.error(e.getMessage(), e);
}
}
}
\ No newline at end of file
package com.dituhui.pea.order.quartz;
import cn.hutool.core.collection.CollectionUtil;
import com.dituhui.pea.order.dao.EngineerInfoDao;
import com.dituhui.pea.order.entity.EngineerInfoEntity;
import lombok.extern.slf4j.Slf4j;
import org.quartz.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.event.ApplicationStartedEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import java.util.List;
import java.util.stream.Collectors;
/**
* 手动触发定时任务
*
* @author RenPing
* @date 2023/11/01
*/
@Component
@Slf4j
public class InitEngineerCapacityListener implements ApplicationListener<ApplicationStartedEvent> {
@Resource
private Scheduler scheduler;
@Autowired
private EngineerInfoDao engineerInfoDao;
@Value("${scheduler.init-engineer-capacity.cron-expr}")
private String cronExpr;
@Override
public void onApplicationEvent(ApplicationStartedEvent applicationStartedEvent) {
List<String> allEngineerCodes = engineerInfoDao.findAll().stream().map(EngineerInfoEntity::getEngineerCode).collect(Collectors.toList());
allEngineerCodes.forEach(engineerCode -> {
String jobName = InitEngineerCapacityJob.INIT_ENGINEER_JOB_PREFIX + engineerCode;
JobDetail jobDetail = JobBuilder.newJob(InitEngineerCapacityJob.class)
.withIdentity(jobName, jobName)
.storeDurably()
.build();
Trigger trigger = TriggerBuilder.newTrigger()
.forJob(jobDetail)
.withIdentity(jobName, jobName)
.startNow()
.withSchedule(CronScheduleBuilder.cronSchedule(cronExpr))
.build();
try {
scheduler.scheduleJob(jobDetail, CollectionUtil.newHashSet(trigger), true);
} catch (SchedulerException e) {
//log.error(e.getMessage(), e);
}
});
}
}
\ No newline at end of file
package com.dituhui.pea.order.quartz;
import org.quartz.Job;
import org.quartz.spi.TriggerFiredBundle;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.scheduling.quartz.AdaptableJobFactory;
import org.springframework.stereotype.Component;
/**
* 自定义JobFactory,从Spring容器中拿单例Job
*
* @author RenPing
* @date 2023/11/02
*/
@Component
public class MyQuartzJobFactory extends AdaptableJobFactory implements ApplicationContextAware {
private ApplicationContext applicationContext;
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
@Override
protected Object createJobInstance(TriggerFiredBundle bundle) throws Exception {
Job job = applicationContext.getBean(bundle.getJobDetail().getJobClass());
return job;
}
}
package com.dituhui.pea.order.quartz;
import org.quartz.spi.JobFactory;
import org.springframework.boot.autoconfigure.quartz.QuartzProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import javax.annotation.Resource;
import javax.sql.DataSource;
import java.io.IOException;
import java.util.Properties;
@Configuration
public class QuartzConfig {
@Resource
private JobFactory jobFactory;
@Resource
private QuartzProperties quartzProperties;
@Bean
public SchedulerFactoryBean schedulerFactoryBean(DataSource dataSource) throws IOException {
SchedulerFactoryBean factory = new SchedulerFactoryBean();
factory.setDataSource(dataSource);
Properties properties = new Properties();
for (String key : quartzProperties.getProperties().keySet()) {
properties.put(key, quartzProperties.getProperties().get(key));
}
factory.setQuartzProperties(properties);
factory.setJobFactory(jobFactory);
return factory;
}
}
......@@ -227,6 +227,17 @@ public class InitEngineerCapacityScheduler {
log.info("====== 处理完毕 ======");
}
public void initOneEngineerByDays(String bdate, String edate, String engineerCode) {
log.info("==== initAllEngineerByDays, bdate[{}] edate[{}]", bdate, edate);
LocalDate currentDate = DateUtils.localDateFromStr(bdate);
LocalDate endDate = DateUtils.localDateFromStr(edate);
List<String> allEngineerCodes = engineerInfoDao.findAll().stream().map(EngineerInfoEntity::getEngineerCode).collect(Collectors.toList());
while (!currentDate.isAfter(endDate)) {
initOneEngineer(DateUtils.formatDate(currentDate), engineerCode);
currentDate = currentDate.plusDays(1);
}
}
private void initAllEngineerByDays(String bdate, String edate) {
log.info("==== initAllEngineerByDays, bdate[{}] edate[{}]", bdate, edate);
LocalDate currentDate = DateUtils.localDateFromStr(bdate);
......@@ -240,7 +251,7 @@ public class InitEngineerCapacityScheduler {
}
}
@Scheduled(cron = "${scheduler.init-engineer-capacity.cron-expr}")
//@Scheduled(cron = "${scheduler.init-engineer-capacity.cron-expr}")
public void run() {
log.info("开始初始化,所有工程师的容量将根据日历表的记录进行计算设置");
String bdate = DateUtils.formatDate(LocalDate.now().plusDays(dayOffsetBegin));
......
......@@ -49,4 +49,14 @@ public interface MsgService {
* @date 2023/10/25
*/
void add(MsgDTO dto);
/**
* 获取未读消息数量
*
* @param userId 用户 ID
* @return {@link Object }
* @author RenPing
* @date 2023/11/07
*/
Integer getUnReadNum(String userId);
}
......@@ -64,9 +64,13 @@ public class EngineerGanttServiceImpl implements EngineerGanttService {
// todo 需要根据订单时间和状态,采用expectXXXX或者actualXXXX
slot.setOrderId(order.getOrderId());
slot.setServiceStatus(order.getServiceStatus());
String startServiceStr = null;
if (checkOrderIsFinish(order.getServiceStatus())) {
slot.setBtime(ObjectUtil.isNull(order.getActualStartTime()) ? null : getHourMinute(order.getActualStartTime()));
slot.setEtime(ObjectUtil.isNull(order.getActualEndTime()) ? null :getHourMinute(order.getActualEndTime()));
slot.setEtime(ObjectUtil.isNull(order.getActualEndTime()) ? null : getHourMinute(order.getActualEndTime()));
if (ObjectUtil.isNotNull(slot.getBtime()) && ObjectUtil.isNotNull(slot.getEtime())) {
startServiceStr = getHourMinute(order.getActualStartTime()) + "-" + getHourMinute(order.getActualEndTime());
}
} else if (order.getPlanStartTime() != null && StringUtils.isNotEmpty(order.getPlanStartTime().toString())) {
slot.setBtime(getHourMinute(order.getPlanStartTime()));
slot.setEtime(getHourMinute(order.getPlanEndTime()));
......@@ -74,7 +78,7 @@ public class EngineerGanttServiceImpl implements EngineerGanttService {
slot.setBtime(getHourMinute(order.getExpectTimeBegin()));
slot.setEtime(getHourMinute(order.getExpectTimeEnd()));
}
slot.setTooltip(getOrderTips(order));
slot.setTooltip(getOrderTips(order, startServiceStr));
OrderSkillProjection orderSkill = orderInfoDao.getOrderSkillCaptionByOrderIdAndDt(order.getOrderId(), localDate);
if (orderSkill != null) {
slot.setText(orderSkill.getSkillCaption());
......@@ -194,7 +198,7 @@ public class EngineerGanttServiceImpl implements EngineerGanttService {
return tips;
}
private List<LabelValueDTO> getOrderTips(OrderInfoEntity order) {
private List<LabelValueDTO> getOrderTips(OrderInfoEntity order, String startServiceStr) {
// log.info("==> getOrderTips: orderId[{}]", orderId);
List<LabelValueDTO> tips = new ArrayList<>();
tips.add(new LabelValueDTO().setLabel("工单号码").setValue(order.getOrderId()));
......@@ -206,6 +210,9 @@ public class EngineerGanttServiceImpl implements EngineerGanttService {
DateUtils.formatDateTime(order.getExpectTimeBegin(), "HH:mm"), DateUtils.formatDateTime(order.getExpectTimeEnd(), "HH:mm"))));
tips.add(new LabelValueDTO().setLabel("计划时间").setValue(String.format("%s-%s",
DateUtils.formatDateTime(order.getPlanStartTime(), "HH:mm"), DateUtils.formatDateTime(order.getPlanEndTime(), "HH:mm"))));
if (StringUtils.isNotBlank(startServiceStr)) {
tips.add(new LabelValueDTO().setLabel("服务时间").setValue(startServiceStr));
}
if (StringUtils.isNotBlank(order.getApplyNote())) {
tips.add(new LabelValueDTO().setLabel("备注").setValue(order.getApplyNote()));
}
......
......@@ -135,4 +135,9 @@ public class MsgServiceImpl implements MsgService {
msgReceiverDao.save(msgReceiverEntity);
});
}
@Override
public Integer getUnReadNum(String userId) {
return msgDao.getUnReadNum(userId);
}
}
......@@ -305,6 +305,9 @@ public class OrderAssignImpl implements OrderAssign {
} else if (recommend.equals("group")) {
levelType = "group";
levelValue = order.getOrgGroupId();
} else if (recommend.equals("all")) {
levelType = "cluster";
levelValue = order.getOrgClusterId();
} else {
return new HashSet<>();
}
......
......@@ -268,6 +268,11 @@ public class OrderCreateServiceImpl implements OrderCreateService {
String joinTags = CollectionUtils.isEmpty(req.getOrderTags()) ? "" : String.join(",", req.getOrderTags());
entity.setBeanTags(joinTags);
entity.setBeanPriority(req.getPriority());
if (req.getPriority().equals("1")){
entity.setAppointmentMethod(AppointmentMethodEnum.AUTO_NOW.name());
//发送通知分部消息
sendMsg(entity.getOrgBranchId(), orderId, entity.getExpectTimeBegin().toLocalDate());
}
entity.setIsMultiple(joinTags.contains("重物搬运") ? 1 : 0);
//一家多单
......
......@@ -161,6 +161,11 @@ public class OrderInfoServiceImpl implements OrderInfoService {
if (StringUtils.isBlank(request.getDescription())) {
orderInfo.setDescription(request.getDescription());
}
//处理一家多单逻辑
Result<String> deleteMultipleOrders = this.deleteMultipleOrders(orderInfo.getMultipleOrders(), orderInfo.getAddressId());
if (!deleteMultipleOrders.getCode().equals(ResultEnum.SUCCESS.getCode())) {
throw new BusinessException("改约日期失败");
}
orderInfoDao.save(orderInfo);
//记录流程
commonService.addOrderEvent(request.getOrderId(), "", "BEAN", "BEAN", OrderStatusEnum.CANCELED.getDescription(), request.getStage(), "", LocalDateTimeUtil.of(request.getHappen()));
......@@ -210,10 +215,12 @@ public class OrderInfoServiceImpl implements OrderInfoService {
}
LocalDateTime expectEnd = TimeUtils.IsoDateTime2LocalDateTime(req.getExpectEnd());
OrderInfoEntity order = orderInfoDao.getByOrderId(req.getOrderId());
LocalDate orderDt = order.getDt();
order.setDt(expectBegin.toLocalDate());
order.setExpectTimeBegin(expectBegin);
order.setExpectTimeEnd(expectEnd);
order.setExpectTimeDesc(req.getExpectDesc());
order.setOrderStatus(OrderStatusEnum.RESCHEDULED.getCode());
if (order == null) {
throw new BusinessException("订单不存在");
}
......@@ -233,7 +240,6 @@ public class OrderInfoServiceImpl implements OrderInfoService {
// ii.网点:优先改约指派给当前网点
// (不给当前工程师)
if (!isToday && isBelong) {
order = handleMultipleOrders(order);
} else if (isToday && isBelong) {
// b.已派人-改约到当天
// i.自有:优先改约指派给当前工程师
......@@ -245,7 +251,6 @@ public class OrderInfoServiceImpl implements OrderInfoService {
// (不给当前工程师)
if (!isNetwork) {
order.setServiceStatus(ServiceStatusEnum.INIT.getCode());
order.setOrderStatus(OrderStatusEnum.RESCHEDULED.getCode());
CapacityQueryDTO.Service service = new CapacityQueryDTO.Service();
service.setBrand(order.getBrand());
service.setProductType(order.getType());
......@@ -266,7 +271,6 @@ public class OrderInfoServiceImpl implements OrderInfoService {
// a.未派人-改约到未来
// i.动态排班
order.setServiceStatus(ServiceStatusEnum.INIT.getCode());
order.setOrderStatus(OrderStatusEnum.RESCHEDULED.getCode());
if (StringUtils.isBlank(req.getDescription())) {
order.setDescription(order.getDescription());
}
......@@ -319,8 +323,6 @@ public class OrderInfoServiceImpl implements OrderInfoService {
order.setIsSpecialTime(CommonUtil.isSpecial(order.getExpectTimeBegin().toLocalTime(),
order.getExpectTimeEnd().toLocalTime(), teamInfo.getWorkOn(), teamInfo.getWorkOff()));
}
//处理一家多单
order = handleMultipleOrders(order);
} else if (isToday && !isBelong) {
// b.未派人-改约到当天
// i.人工指派
......@@ -332,6 +334,11 @@ public class OrderInfoServiceImpl implements OrderInfoService {
sendMsg(branchEntity.getBranchId(), order.getOrderId(), req.getExpectBegin());
}
//处理一家多单
// order = handleMultipleOrders(order);
if (!orderDt.isEqual(expectBegin.toLocalDate())){
order = handleMultipleOrders(order);
}
order.setOrgClusterId(clusterId);
order.setOrgBranchId(branchId);
orderInfoDao.save(order);
......
server:
port: 8013
spring:
application:
name: project-order
jackson:
default-property-inclusion: NON_NULL
time-zone: GMT+8
date-format: yyyy-MM-dd HH:mm:ss
cloud:
nacos:
discovery:
server-addr: nacos-server:8848
group: project
config:
server-addr: nacos-server:8848
group: project
file-extension: yaml
import-check:
# no config file
enabled: false
config:
import:
- optional:nacos:redis-config.yaml?group=project&refreshEnabled=true
- optional:nacos:bean-kafka-config.yaml?group=project&refreshEnabled=true
#config:
# import:
# - optional:nacos:project-order.yaml
# - optional:nacos:datasource-config.yaml
datasource:
driver-class-name: com.mysql.cj.jdbc.Driver
#url: jdbc:mysql://10.10.0.116:32306/saas_aftersale_test?serverTimezone=Asia/Shanghai
url: jdbc:mysql://localhost:32306/saas_aftersale_test?serverTimezone=Asia/Shanghai
username: boxi
password: boxi_dev_0725
type: com.alibaba.druid.pool.DruidDataSource
jpa:
show-sql: true
properties:
hibernate:
format_sql: true
seata:
application-id: ${spring.application.name}
tx-service-group: ${spring.application.name}-group
service:
vgroup-mapping:
project-order-group: default
grouplist:
default: seata-server:8091
mybatis-plus:
global-config:
db-config:
id-type: auto
configuration:
log-impl: org.apache.ibatis.logging.stdout.StdOutImpl
sentry:
dsn: http://85293a9310fc4a8187422a7a257fc1ba@sentry.zjhuixinyun.com/8
# Set traces_sample_rate to 1.0 to capture 100%
# of transactions for performance monitoring.
# We recommend adjusting this value in production.
tracesSampleRate: 1.0
exception-resolver-order: -2147483647
SaaS:
url: https://pea-test.bshg.com.cn
ak: 64e1cde3f9144bfb850b7d37c51af559
scheduler:
init-engineer-capacity:
# 每天22点1次
#cron-expr: 0 0 22 * * ?
cron-expr: 0 32 * * * ?
day-offset-begin: 0
day-offset-end: 14
rewrite-force: true
calc-engineer-capacity:
# 8-20点,每30分钟1次
cron-expr: 0 */30 1-23 * * ?
day-offset-begin: 0
day-offset-end: 14
calc-org-capacity:
# 8-20点,每小时1次
cron-expr: 0 0 1-23 * * ?
day-offset-begin: 0
day-offset-end: 14
\ No newline at end of file
......@@ -34,6 +34,50 @@ spring:
username: boxi
password: boxi_dev_0725
type: com.alibaba.druid.pool.DruidDataSource
# quartz 配置
quartz:
# 应用关闭时,是否等待定时任务执行完成。默认为 false,建议设置为 true
wait-for-jobs-to-complete-on-shutdown: true
# 是否覆盖已有 Job 的配置,注意为false时,修改已存在的任务调度cron,周期不生效
overwrite-existing-jobs: true
#相关属性配置
properties:
org:
quartz:
scheduler:
#调度标识名 集群中每一个实例都必须使用相同的名称
instanceName: ProjectOrderScheduler
#ID设置为自动获取 每一个必须不同
instanceId: AUTO
makeSchedulerThreadDaemon: false
jobStore:
#class: org.quartz.impl.jdbcjobstore.JobStoreTX
#spring-boot-starter-quartz V2.5.7及以上
class: org.springframework.scheduling.quartz.LocalDataSourceJobStore
driverDelegateClass: org.quartz.impl.jdbcjobstore.StdJDBCDelegate
#表的前缀,默认QRTZ_
tablePrefix: QRTZ_
#是否加入集群
isClustered: true
#调度实例失效的检查时间间隔
clusterCheckinInterval: 10000
useProperties: false
#设置调度引擎对触发器超时的忍耐时间 (单位毫秒),20分钟
misfireThreshold: 1200000
threadPool:
class: org.quartz.simpl.SimpleThreadPool
# 指定在线程池里面创建的线程是否是守护线程
makeThreadsDaemons: false
#指定线程数,至少为1(无默认值,一般设置为1-100)
threadCount: 10
#设置线程的优先级(最大为java.lang.Thread.MAX_PRIORITY 10,最小为Thread.MIN_PRIORITY 1,默认为5)
threadPriority: 5
threadsInheritContextClassLoaderOfInitializingThread: true
#数据库方式
job-store-type: jdbc
#初始化表结构,初始化:always,再次重启改为 embedded
jdbc:
initialize-schema: embedded
seata:
application-id: ${spring.application.name}
......
Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!