Commit 51c8cab8 by Ren Ping

feat:任务执行分布式

1 parent ae692a05
Showing with 910 additions and 145 deletions
......@@ -11,5 +11,5 @@
/logs/
/*/.gitignore
dispatchSolution-*.json
/project-order/src/main/resources/application-dev.yaml
*.idea
*.json
......@@ -36,20 +36,20 @@
<version>${druid.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>${mysql.version}</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>com.alibaba.cloud</groupId>-->
<!-- <artifactId>spring-cloud-starter-alibaba-seata</artifactId>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>com.alibaba.cloud</groupId>-->
<!-- <artifactId>spring-cloud-starter-alibaba-seata</artifactId>-->
<!-- </dependency>-->
<dependency>
<groupId>com.alibaba.cloud</groupId>
......@@ -60,52 +60,52 @@
<groupId>com.alibaba.cloud</groupId>
<artifactId>spring-cloud-starter-alibaba-nacos-config</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba.cloud</groupId>
<artifactId>project-interface</artifactId>
<version>${revision}</version>
</dependency>
<dependency>
<groupId>org.optaplanner</groupId>
<artifactId>optaplanner-core</artifactId>
<version>${version.org.optaplanner}</version>
</dependency>
<dependency>
<groupId>org.optaplanner</groupId>
<artifactId>optaplanner-persistence-jackson</artifactId>
<version>${version.org.optaplanner}</version>
</dependency>
<!-- Testing -->
<dependency>
<groupId>org.optaplanner</groupId>
<artifactId>optaplanner-test</artifactId>
<scope>test</scope>
<version>${version.org.optaplanner}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<scope>provided</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/commons-io/commons-io -->
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.13.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-lang3 -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba.cloud</groupId>
<artifactId>project-interface</artifactId>
<version>${revision}</version>
</dependency>
<dependency>
<groupId>org.optaplanner</groupId>
<artifactId>optaplanner-core</artifactId>
<version>${version.org.optaplanner}</version>
</dependency>
<dependency>
<groupId>org.optaplanner</groupId>
<artifactId>optaplanner-persistence-jackson</artifactId>
<version>${version.org.optaplanner}</version>
</dependency>
<!-- Testing -->
<dependency>
<groupId>org.optaplanner</groupId>
<artifactId>optaplanner-test</artifactId>
<scope>test</scope>
<version>${version.org.optaplanner}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<scope>provided</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/commons-io/commons-io -->
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.13.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-lang3 -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
<dependency>
<groupId>org.gavaghan</groupId>
......@@ -130,18 +130,22 @@
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jsr310</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>easyexcel</artifactId>
<version>3.3.2</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>easyexcel</artifactId>
<version>3.3.2</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
</dependency>
<!-- quartz依赖 -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-quartz</artifactId>
</dependency>
</dependencies>
<build>
......
package com.dituhui.pea.dispatch.dao;
import com.dituhui.pea.dispatch.entity.DispatchEngineer;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.repository.CrudRepository;
import java.util.List;
public interface DispatchEngineerRepository extends CrudRepository<DispatchEngineer, Long> {
public interface DispatchEngineerRepository extends CrudRepository<DispatchEngineer, Long>, JpaRepository<DispatchEngineer, Long> {
List<DispatchEngineer> findByGroupId(String groupId);
......
package com.dituhui.pea.dispatch.dao;
import com.dituhui.pea.dispatch.entity.DispatchOrder;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
import java.util.List;
import java.util.Optional;
public interface DispatchOrderRepository extends CrudRepository<DispatchOrder, Long> {
public interface DispatchOrderRepository extends CrudRepository<DispatchOrder, Long>, JpaRepository<DispatchOrder, Long> {
// 查看未指派非confirm的,供算法计算
......@@ -31,4 +32,6 @@ public interface DispatchOrderRepository extends CrudRepository<DispatchOrder, L
List<DispatchOrder> findAllWithoutConfirm2(String teamId, String batchNo);
Optional<DispatchOrder> findByGroupIdAndBatchNoAndOrderIdAndDt(String groupId, String batchNo, String orderId, String dt);
List<DispatchOrder> findByTeamIdAndBatchNo(String teamId, String batchNo);
}
\ No newline at end of file
package com.dituhui.pea.dispatch.dao;
import com.dituhui.pea.dispatch.entity.OrgBranchEntity;
import org.hibernate.annotations.Where;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
@Where(clause = "status = 1")
public interface OrgBranchDao extends JpaRepository<OrgBranchEntity, Integer> {
List<OrgBranchEntity> findAllByClusterId(String clusterId);
OrgBranchEntity getByBranchId(String branchId);
public List<OrgBranchEntity> findByBranchIdIn(List<String> ids);
public OrgBranchEntity findByCitycodeListLike(String citycodeList);
}
package com.dituhui.pea.dispatch.dao;
import com.dituhui.pea.dispatch.entity.OrgClusterEntity;
import org.hibernate.annotations.Where;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
@Where(clause = "status = 1")
public interface OrgClusterDao extends JpaRepository <OrgClusterEntity, Integer> {
OrgClusterEntity getByClusterId(String clusterId);
public List<OrgClusterEntity> findByClusterIdIn(List<String> ids);
}
......@@ -12,4 +12,6 @@ import org.springframework.stereotype.Repository;
@Repository
public interface OrgGroupRepository extends CrudRepository<OrgGroup, Long> {
Optional<OrgGroup> findByGroupId(String groupId);
List<OrgGroup> findAllByBranchId(String branchId);
}
......@@ -4,6 +4,7 @@ import java.util.List;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
import org.springframework.data.jpa.repository.Query;
import org.springframework.stereotype.Repository;
import com.dituhui.pea.dispatch.entity.OrgTeamEntity;
......@@ -11,10 +12,16 @@ import com.dituhui.pea.dispatch.entity.OrgTeamEntity;
@Repository
public interface OrgTeamDao extends JpaRepository<OrgTeamEntity, Integer>, JpaSpecificationExecutor<OrgTeamEntity> {
OrgTeamEntity findByTeamNameAndGroupId(String teamName, String groupId);
List<OrgTeamEntity> findByGroupId(String groupId);
OrgTeamEntity findByTeamId(String teamId);
OrgTeamEntity findByTeamNameAndGroupId(String teamName, String groupId);
List<OrgTeamEntity> findByGroupId(String groupId);
OrgTeamEntity findByTeamId(String teamId);
@Query(value = "select t.* from org_team t\n" +
"join org_group g on g.group_id=t.group_id and t.`status`=1 and g.`status`=1\n" +
"join org_branch b on b.branch_id=g.branch_id and b.`status`=1\n" +
"join org_cluster c on c.cluster_id=b.cluster_id and c.`status`=1",
nativeQuery = true)
List<OrgTeamEntity> findAllTeam();
}
package com.dituhui.pea.dispatch.entity;
import lombok.Data;
import javax.persistence.*;
import java.time.LocalDateTime;
@Entity
@Data
@Table(name = "org_branch")
public class OrgBranchEntity {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Integer id;
private String branchId;
private String branchName;
private String clusterId;
private String address;
private String x;
private String y;
private String citycodeList;
private Integer kind;
private String layerId;
private String memo;
private LocalDateTime createTime = LocalDateTime.now();
private LocalDateTime updateTime = LocalDateTime.now();
public OrgBranchEntity() {
}
/**
* 部门缩写
*/
private String abbreviation;
/**
* 部门编号
*/
private String code;
/**
* 部门负责人联系方式
*/
private String phone;
/**
* 是否启用外围仓 0未启用 1启用
*/
private Integer warehouseEnabled = 0;
/**
* 最长配件保留时长(天)
*/
private Integer reserveTimeMax = 0;
/**
* 帐号状态(0无效 1有效)
*/
private Integer status = 1;
}
package com.dituhui.pea.dispatch.entity;
import lombok.Data;
import javax.persistence.*;
import java.time.LocalDateTime;
@Entity
@Data
@Table(name = "org_cluster")
public class OrgClusterEntity {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Integer id;
private String clusterId;
private String name;
private String citycodeList;
private String address;
private String cityName;
private Integer status = 1;
private String updateUser;
private LocalDateTime createTime = LocalDateTime.now();
private LocalDateTime updateTime = LocalDateTime.now();
public OrgClusterEntity() {
}
/**
* 部门描述 非必须
*/
private String memo;
/**
* 部门缩写
*/
private String abbreviation;
/**
* 部门编号
*/
private String code;
/**
* 部门负责人联系方式
*/
private String phone;
/**
* 是否启用外围仓 0未启用 1启用
*/
private Integer warehouseEnabled = 0;
/**
* 最长配件保留时长(天)
*/
private Integer reserveTimeMax = 0;
}
package com.dituhui.pea.dispatch.quartz;
import com.dituhui.pea.dispatch.service.SchedulerService;
import lombok.extern.slf4j.Slf4j;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.quartz.JobKey;
import org.springframework.scheduling.quartz.QuartzJobBean;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
/**
* 自动派工任务
*
* @author RenPing
* @date 2023/11/02
*/
@Component
@Slf4j
public class AutoDispatchJob extends QuartzJobBean {
public static final String TEAM_JOB_PREFIX="BOXI_TEAM_";
@Resource
private SchedulerService schedulerService;
@Override
protected void executeInternal(JobExecutionContext jobExecutionContext) throws JobExecutionException {
try {
System.out.println(this);
JobKey jobKey = jobExecutionContext.getJobDetail().getKey();
String name = jobKey.getName();
String teamId = name.substring(TEAM_JOB_PREFIX.length());
long start = System.currentTimeMillis();
log.info(">>> 自动派工(teamId:{}) 自动任务开始", teamId);
schedulerService.dispatchRun2(teamId);
long end = System.currentTimeMillis();
log.info(">>> 自动派工(teamId:{}) 自动任务结束,耗时:{}", teamId, end - start);
} catch (Exception e) {
log.error(e.getMessage(), e);
}
}
}
\ No newline at end of file
package com.dituhui.pea.dispatch.quartz;
import org.quartz.Job;
import org.quartz.spi.TriggerFiredBundle;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.scheduling.quartz.AdaptableJobFactory;
import org.springframework.stereotype.Component;
/**
* 自定义JobFactory,从Spring容器中拿单例Job
*
* @author RenPing
* @date 2023/11/02
*/
@Component
public class MyQuartzJobFactory extends AdaptableJobFactory implements ApplicationContextAware {
private ApplicationContext applicationContext;
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
@Override
protected Object createJobInstance(TriggerFiredBundle bundle) throws Exception {
Job job = applicationContext.getBean(bundle.getJobDetail().getJobClass());
return job;
}
}
package com.dituhui.pea.dispatch.quartz;
import org.quartz.spi.JobFactory;
import org.springframework.boot.autoconfigure.quartz.QuartzProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import javax.annotation.Resource;
import javax.sql.DataSource;
import java.io.IOException;
import java.util.Properties;
@Configuration
public class QuartzConfig {
@Resource
private JobFactory jobFactory;
@Resource
private QuartzProperties quartzProperties;
@Bean
public SchedulerFactoryBean schedulerFactoryBean(DataSource dataSource) throws IOException {
SchedulerFactoryBean factory = new SchedulerFactoryBean();
factory.setDataSource(dataSource);
Properties properties = new Properties();
for (String key : quartzProperties.getProperties().keySet()) {
properties.put(key, quartzProperties.getProperties().get(key));
}
factory.setQuartzProperties(properties);
factory.setJobFactory(jobFactory);
return factory;
}
}
package com.dituhui.pea.dispatch.quartz;
import cn.hutool.core.collection.CollectionUtil;
import com.dituhui.pea.dispatch.dao.OrgTeamDao;
import com.dituhui.pea.dispatch.entity.OrgTeamEntity;
import lombok.extern.slf4j.Slf4j;
import org.quartz.*;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.event.ApplicationStartedEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import java.util.List;
/**
* 手动触发定时任务
*
* @author RenPing
* @date 2023/11/01
*/
@Component
@Slf4j
public class QuartzTaskListener implements ApplicationListener<ApplicationStartedEvent> {
@Resource
private Scheduler scheduler;
@Resource
private OrgTeamDao orgTeamDao;
@Value("${dispatch.cron.expr}")
private String dispatchCron;
@Override
public void onApplicationEvent(ApplicationStartedEvent applicationStartedEvent) {
List<OrgTeamEntity> teamList = orgTeamDao.findAllTeam();
teamList.forEach(orgTeamEntity -> {
String jobName = AutoDispatchJob.TEAM_JOB_PREFIX + orgTeamEntity.getTeamId();
JobDetail jobDetail = JobBuilder.newJob(AutoDispatchJob.class)
.withIdentity(jobName, jobName)
.storeDurably()
.build();
Trigger trigger = TriggerBuilder.newTrigger()
.forJob(jobDetail)
.withIdentity(jobName, jobName)
.startNow()
.withSchedule(CronScheduleBuilder.cronSchedule(dispatchCron))
.build();
try {
scheduler.scheduleJob(jobDetail, CollectionUtil.newHashSet(trigger), true);
} catch (SchedulerException e) {
//log.error(e.getMessage(), e);
}
});
}
}
\ No newline at end of file
......@@ -26,7 +26,7 @@ import com.dituhui.pea.dispatch.utils.DispatchSolutionUtils;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Component
//@Component
public class BatchScheduler {
@Value("${dispatch.cron.next-day-limit}")
......@@ -55,7 +55,7 @@ public class BatchScheduler {
/*
* 异步执行任务开始
*/
@Scheduled(cron = "${dispatch.cron.expr}")
//@Scheduled(cron = "${dispatch.cron.expr}")
public void dispatchRun2() {
String groupId = "gsuzhou";
log.info("dispatchRun group:{}", groupId);
......@@ -83,12 +83,12 @@ public class BatchScheduler {
DispatchSolution solution = solver.solve(problem);
DispatchSolutionUtils.removeHardConstraintCustomer(solution, solverFactory);
log.info("dispatchRun solve done, teamId:{}, day:{}, batch:{}, problemId:{}, score:{}", teamId, currDay, batchNo, problemId, solution.getScore().toShortString());
this.solveService.saveSolutionWrp(solution);
this.solveService.saveSolutionWrp2(solution);
this.extractService.extractDispatchToOrder2(teamId, batchNo, false);
log.info("dispatchRun done ------ teamId:{}, day:{}", teamId, currDay);
JacksonSolutionFileIO<DispatchSolution> exporter = new JacksonSolutionFileIO<DispatchSolution>(DispatchSolution.class);
exporter.write(solution, new File(String.format("dispatchSolution_%s_%s.json",groupId, currDay)));
exporter.write(solution, new File(String.format("dispatchSolution_%s_%s.json",teamId, currDay)));
}
}
......
package com.dituhui.pea.dispatch.service;
public interface SchedulerService {
/**
* 以工作队为批次
*
* @param teamId 工作队 ID
* @author RenPing
* @date 2023/11/02
*/
void dispatchRun2(String teamId);
}
......@@ -35,6 +35,13 @@ public interface SolveService {
void saveSolutionWrp(DispatchSolution solution) throws RuntimeException;
/*
* 将计算结果回写到dispatch2个表
* 是下面两个方法的包装
* */
void saveSolutionWrp2(DispatchSolution solution) throws RuntimeException;
}
......@@ -5,10 +5,16 @@ import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import javax.persistence.EntityManager;
import com.alibaba.fastjson.JSONObject;
import com.dituhui.pea.dispatch.dao.DispatchEngineerRepository;
import com.dituhui.pea.dispatch.dao.DispatchOrderRepository;
import com.dituhui.pea.dispatch.entity.DispatchEngineer;
import com.dituhui.pea.dispatch.entity.DispatchOrder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Service;
......@@ -36,7 +42,13 @@ public class BatchServiceImpl implements BatchService {
@Autowired
DispatchBatchRepository batchRepository;
@Autowired
DispatchOrderRepository dispatchOrderRepository;
@Autowired
DispatchEngineerRepository dispatchEngineerRepository;
@Autowired
OrderInfoRepository orderInfoRepository;
......@@ -179,45 +191,49 @@ public class BatchServiceImpl implements BatchService {
return batchNo;
}
// 检查给定小队、日期是否有在运行的批次任务,没则返回,没有则创建
@Transactional(isolation = Isolation.READ_COMMITTED)
@Transactional(isolation = Isolation.READ_COMMITTED, rollbackFor = Exception.class)
@Override
public String buildBatchData2(String teamId, String day) {
entityManager.clear();
log.info("准备批次数据, teamId:{}, day:{}", teamId, day);
String batchNo = "";
String batchDay = "";
Optional<DispatchBatch> optional = batchRepository.findByTeamIdAndBatchDate(teamId, day);
if (!optional.isPresent()) {
// 创建batch
batchNo = calcBatchNo(day);
batchDay = day;
// 执行数据库操作
String sqlInsert = "INSERT INTO `dispatch_batch` ( `team_id`, `batch_no`, `batch_date`, `engineer_num`, `order_num`, `start_time`, `end_time`, `status`) " +
" VALUES(?, ?, ?, ?, ?, ?, ?, ?)";
jdbcTemplate.update(sqlInsert, teamId, batchNo, batchDay, 0, 0, LocalDateTime.now(), null, "RUNNING");
log.info("生成新批次, teamId:{}, day:{}", teamId, batchDay);
} else {
batchNo = optional.get().getBatchNo();
batchDay = optional.get().getBatchDate();
}
log.info("清理原批次数据, teamId:{}, day:{}, batchNo:{}", teamId, batchDay, batchNo);
jdbcTemplate.update("delete from dispatch_engineer where team_id=? and batch_no=?", teamId, batchNo);
jdbcTemplate.update("delete from dispatch_order where team_id=? and batch_no=?", teamId, batchNo);
log.info("写入新批次技术员、工单数据, teamId:{}, day:{}, batchNo:{}", teamId, batchDay, batchNo);
entityManager.clear();
log.info("准备批次数据, teamId:{}, day:{}", teamId, day);
String batchNo = "";
String batchDay = "";
Optional<DispatchBatch> optional = batchRepository.findByTeamIdAndBatchDate(teamId, day);
if (!optional.isPresent()) {
// 创建batch
batchNo = calcBatchNo(day);
batchDay = day;
// 执行数据库操作
String sqlInsert = "INSERT INTO `dispatch_batch` ( `team_id`, `batch_no`, `batch_date`, `engineer_num`, `order_num`, `start_time`, `end_time`, `status`) " +
" VALUES(?, ?, ?, ?, ?, ?, ?, ?)";
jdbcTemplate.update(sqlInsert, teamId, batchNo, batchDay, 0, 0, LocalDateTime.now(), null, "RUNNING");
log.info("生成新批次, teamId:{}, day:{}", teamId, batchDay);
} else {
batchNo = optional.get().getBatchNo();
batchDay = optional.get().getBatchDate();
}
log.info("清理原批次数据, teamId:{}, day:{}, batchNo:{}", teamId, batchDay, batchNo);
//并发出现死锁
jdbcTemplate.update("delete from dispatch_engineer where team_id=? and batch_no=?", teamId, batchNo);
//dispatchEngineerRepository.deleteAllInBatch(dispatchEngineerRepository.findByTeamIdAndBatchNo(teamId,batchNo));
//并发出现死锁
jdbcTemplate.update("delete from dispatch_order where team_id=? and batch_no=?", teamId, batchNo);
//dispatchOrderRepository.deleteAllInBatch(dispatchOrderRepository.findByTeamIdAndBatchNo(teamId,batchNo));
log.info("写入新批次技术员、工单数据, teamId:{}, day:{}, batchNo:{}", teamId, batchDay, batchNo);
String sqlEngineer = "INSERT INTO dispatch_engineer (team_id, batch_no, engineer_code, engineer_name, x, y, max_num, max_minute, max_distance, vehicle_type)\n"
+ "SELECT o.team_id,?,o.engineer_code, a.name , b.x, b.y , max_num, max_minute, max_distance, b.vehicle FROM `org_team_engineer` o,engineer_info a,engineer_business b \r\n"
+ " WHERE o.team_id=? AND `status`=1\r\n"
+ " AND o.engineer_code=a.engineer_code AND a.engineer_code = b.engineer_code \r\n"
+ " AND b.x IS NOT NULL AND b.x !=''" + " order by a.engineer_code asc";
int engCount = jdbcTemplate.update(sqlEngineer, batchNo, teamId);
// 未派过的工单(已派过PRE状态还可以再次派)
// 未派过的工单(已派过PRE状态还可以再次派)
String sqlOrder = "INSERT INTO dispatch_order (group_id, batch_no, team_id, order_id , dt, x, y, \n"
+ " expect_time_begin, expect_time_end, tags, priority , skills , take_time, status )\n"
+ " SELECT a.org_group_id, ?, a.org_team_id , a.order_id, ?, a.x, a.y , \r\n"
......@@ -228,8 +244,8 @@ public class BatchServiceImpl implements BatchService {
+ " AND order_status ='NORMAL' AND service_status='INIT'\r\n"
+ " ORDER BY a.expect_time_begin ASC \r\n";
int orderCount = jdbcTemplate.update(sqlOrder, batchNo, batchDay, teamId, batchDay);
// confirm的要做预占用,所以也加入进来
// confirm的要做预占用,所以也加入进来
String sqlOrderConfirm = "INSERT INTO dispatch_order (group_id, batch_no, team_id, order_id , dt, x, y, \n" +
" expect_time_begin, expect_time_end, tags, priority , skills , take_time, status, engineer_code, time_begin, time_end )\n" +
" select a.org_group_id, ?, a.org_team_id , a.order_id, a.dt, a.x, a.y , \n" +
......@@ -242,21 +258,21 @@ public class BatchServiceImpl implements BatchService {
" and order_status ='NORMAL' and service_status='INIT'\n" +
" order by a.expect_time_begin asc ";
int orderConfirmCount = jdbcTemplate.update(sqlOrderConfirm, batchNo, teamId, batchDay);
log.info("准备批次数据 engCount:{}, orderCount:{}, orderConfirmCount:{}", engCount, orderCount, orderConfirmCount);
if (orderCount + orderConfirmCount > 0) {
jdbcTemplate.update("update dispatch_batch set engineer_num=? , order_num=?, start_time=?, end_time=null, status='RUNNING' where team_id=? and batch_no=?",
engCount, orderCount + orderConfirmCount, LocalDateTime.now(), teamId, batchNo);
} else {
jdbcTemplate.update("update dispatch_batch set engineer_num=? , order_num=?, start_time=?, end_time=?, status='DONE' where team_id=? and batch_no=?",
engCount, 0, LocalDateTime.now(), LocalDateTime.now(), teamId, batchNo);
}
log.info("准备批次数据完成, teamId:{}, day:{}, batchNo:{}", teamId, batchDay, batchNo);
return batchNo;
log.info("准备批次数据 engCount:{}, orderCount:{}, orderConfirmCount:{}", engCount, orderCount, orderConfirmCount);
if (orderCount + orderConfirmCount > 0) {
jdbcTemplate.update("update dispatch_batch set engineer_num=? , order_num=?, start_time=?, end_time=null, status='RUNNING' where team_id=? and batch_no=?",
engCount, orderCount + orderConfirmCount, LocalDateTime.now(), teamId, batchNo);
} else {
jdbcTemplate.update("update dispatch_batch set engineer_num=? , order_num=?, start_time=?, end_time=?, status='DONE' where team_id=? and batch_no=?",
engCount, 0, LocalDateTime.now(), LocalDateTime.now(), teamId, batchNo);
}
log.info("准备批次数据完成, teamId:{}, day:{}, batchNo:{}", teamId, batchDay, batchNo);
return batchNo;
}
......@@ -270,16 +286,16 @@ public class BatchServiceImpl implements BatchService {
return "";
}
}
@Override
public String queryBatchNoByDay2(String teamId, String day) {
entityManager.clear();
Optional<DispatchBatch> optional = batchRepository.findByTeamIdAndBatchDate(teamId, day);
if (optional.isPresent()) {
return optional.get().getBatchNo();
} else {
return "";
}
entityManager.clear();
Optional<DispatchBatch> optional = batchRepository.findByTeamIdAndBatchDate(teamId, day);
if (optional.isPresent()) {
return optional.get().getBatchNo();
} else {
return "";
}
}
@Override
......@@ -288,12 +304,12 @@ public class BatchServiceImpl implements BatchService {
Optional<DispatchBatch> optional = batchRepository.findByGroupIdAndBatchDate(groupId, day);
return optional.orElseGet(DispatchBatch::new);
}
@Override
public DispatchBatch queryBatchInfoByDay2(String teamId, String day) {
entityManager.clear();
Optional<DispatchBatch> optional = batchRepository.findByTeamIdAndBatchDate(teamId, day);
return optional.orElseGet(DispatchBatch::new);
entityManager.clear();
Optional<DispatchBatch> optional = batchRepository.findByTeamIdAndBatchDate(teamId, day);
return optional.orElseGet(DispatchBatch::new);
}
@Override
......
package com.dituhui.pea.dispatch.service.impl;
import com.dituhui.pea.dispatch.dao.OrgTeamDao;
import com.dituhui.pea.dispatch.entity.OrgTeamEntity;
import com.dituhui.pea.dispatch.pojo.DispatchSolution;
import com.dituhui.pea.dispatch.service.BatchService;
import com.dituhui.pea.dispatch.service.ExtractService;
import com.dituhui.pea.dispatch.service.SchedulerService;
import com.dituhui.pea.dispatch.service.SolveService;
import com.dituhui.pea.dispatch.utils.DispatchSolutionUtils;
import lombok.extern.slf4j.Slf4j;
import org.optaplanner.core.api.solver.Solver;
import org.optaplanner.core.impl.solver.DefaultSolverFactory;
import org.optaplanner.persistence.jackson.impl.domain.solution.JacksonSolutionFileIO;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Isolation;
import org.springframework.transaction.annotation.Transactional;
import java.io.File;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.UUID;
/**
* 自动排班算法
*
* @author RenPing
* @date 2023/11/02
*/
@Service
@Slf4j
public class SchedulerServiceImpl implements SchedulerService {
@Value("${dispatch.cron.next-day-limit}")
int nextDaysLimit = 2;
@Autowired
BatchService batchService;
@Autowired
SolveService solveService;
@Autowired
ExtractService extractService;
@Autowired
OrgTeamDao orgTeamDao;
private DefaultSolverFactory<DispatchSolution> solverFactory;
private Solver<DispatchSolution> solver;
public SchedulerServiceImpl() {
solverFactory = DispatchSolutionUtils.getSolverFactory(30, 60 * 5);
solver = solverFactory.buildSolver();
}
@Override
@Transactional(isolation = Isolation.READ_COMMITTED, rollbackFor = Exception.class)
public void dispatchRun2(String teamId) {
OrgTeamEntity orgTeamEntity = orgTeamDao.findByTeamId(teamId);
String groupId = orgTeamEntity.getGroupId();
log.info("dispatchRun group:{}, team:{} done", groupId, teamId);
try {
for (int i = 1; i <= nextDaysLimit; i++) {
String currDay = LocalDate.now().plusDays(i).format(DateTimeFormatter.ISO_LOCAL_DATE);
log.info("dispatchRun begin----- teamId:{}, day:{}", teamId, currDay);
String batchNo = batchService.buildBatchData2(teamId, currDay);
UUID problemId = solveService.generateProblemId(teamId, batchNo);
log.info("dispatchRun teamId:{}, day:{}, batch:{}, problemId:{}", teamId, currDay, batchNo, problemId);
DispatchSolution problem = solveService.prepareSolution2(teamId, batchNo);
if (problem.getCustomerList().size() <= 0) {
log.info("dispatchRun 当前批次没有待指派工单 , teamId:{}, day:{}, batch:{}, problemId:{}, order-size:{}", teamId, currDay, batchNo, problemId, problem.getCustomerList().size());
continue;
}
log.info("dispatchRun prepare done, teamId:{}, day:{}, batch:{}, problemId:{}", teamId, currDay, batchNo, problemId);
DispatchSolution solution = solver.solve(problem);
DispatchSolutionUtils.removeHardConstraintCustomer(solution, solverFactory);
log.info("dispatchRun solve done, teamId:{}, day:{}, batch:{}, problemId:{}, score:{}", teamId, currDay, batchNo, problemId, solution.getScore().toShortString());
this.solveService.saveSolutionWrp2(solution);
this.extractService.extractDispatchToOrder2(teamId, batchNo, false);
log.info("dispatchRun done ------ teamId:{}, day:{}", teamId, currDay);
JacksonSolutionFileIO<DispatchSolution> exporter = new JacksonSolutionFileIO<DispatchSolution>(DispatchSolution.class);
exporter.write(solution, new File(String.format("dispatchSolution_%s_%s.json", teamId, currDay)));
//log.info("dispatchRun group:{}, team:{} done", groupId, teamId);
}
} catch (Exception e) {
log.error(">>> (teamId:{})自动排班失败:{}", teamId, e.getMessage(), e);
throw e;
}
}
}
\ No newline at end of file
......@@ -390,6 +390,35 @@ public class SolveServiceImpl implements SolveService {
}
@Transactional(isolation = Isolation.READ_COMMITTED)
@Override
public void saveSolutionWrp2(DispatchSolution solution) throws RuntimeException {
String teamId = solution.getTeamId();
String batchNo = solution.getBatchNo();
log.info("算法结果回写包装方法, teamId:{}, batchNo:{}", teamId, batchNo);
JacksonSolutionFileIO<DispatchSolution> exporter = new JacksonSolutionFileIO<DispatchSolution>(DispatchSolution.class);
String fileName = String.format("dispatchSolution-%s-%s.json", teamId, batchNo);
File tempFile = new File(fileName);
exporter.write(solution, tempFile);
String dispatchResultJson = "{}";
/*
try {
dispatchResultJson = FileUtil.readAsString(tempFile);
} catch (IOException e) {
log.error("json算法结果回写 error , teamId:{}, batchNo:{} ", teamId, batchNo, e);
}
*/
Object[] paramBatch = {LocalDateTime.now(), dispatchResultJson, teamId, batchNo};
jdbcTemplate.update(" update dispatch_batch set status='DONE', end_time=? , ext=? where team_id=? and batch_no=? ", paramBatch);
saveSolutionToDispatch2(teamId, batchNo, solution);
}
/**
* 将计算结果回写到dispatch_order表(更新补充技术员工号、上门时间)
......@@ -442,4 +471,55 @@ public class SolveServiceImpl implements SolveService {
}
/**
* 将计算结果回写到dispatch_order表(更新补充技术员工号、上门时间)
*/
void saveSolutionToDispatch2(String teamId, String batchNo, DispatchSolution solution) throws RuntimeException {
log.info("算法结果回写dispatch, teamId:{}, batchNo:{}", teamId, batchNo);
// 清理当前批次指派结果
entityManager.clear();
log.info("算法结果回写dispatch, step1-清除历史, groupId:{}, batchNo:{}", teamId, batchNo);
Object[] paramClear = {teamId, batchNo};
String sqlReset = "update dispatch_order set engineer_code='', seq=0, time_begin=null, time_end=null, path_time=0, path_distance=0 " + "where team_id=? and batch_no=? and status!='CONFIRM' ";
jdbcTemplate.update(sqlReset, paramClear);
log.info("算法结果回写dispatch, step2-开始回写, teamId:{}, batchNo:{}", teamId, batchNo);
// 保存当前批次指派结果
solution.getTechnicianList().forEach(technician -> {
log.info("算法结果回写dispatch, step2.1-按技术员逐个回写, teamId:{}, batchNo:{}, technician: {}, max-minute:{}, customlist.size:{}", teamId, batchNo, technician.getCode(), technician.getMaxMinute(), technician.getCustomerList().size());
AtomicInteger seq = new AtomicInteger();
technician.getCustomerList().forEach(customer -> {
int idx = seq.getAndIncrement();
// 时间相加操作
// LocalDateTime localExpectBegin = LocalDateTime.ofInstant(expectBegin[0].toInstant(), ZoneId.systemDefault());
// LocalDateTime localEndTime = localExpectBegin.plusMinutes(dOrder.getTakeTime());
// Date end = Date.from(localEndTime.atZone(ZoneId.systemDefault()).toInstant());
log.info("算法结果回写dispatch, step3-逐个客户处理, teamId:{}, batchNo:{}, employ: {}, customer:{}, service-duration:{} ", teamId, batchNo, technician.getCode(), customer.getCode(), customer.getServiceDuration());
log.info(customer.toString());
LocalDateTime customDateTime = LocalDateTime.parse(customer.getDt()+" 00:00:00", DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"));
LocalDateTime arriveTime = customDateTime.plusMinutes(customer.getArrivalTime());
LocalDateTime leaveTime = customDateTime.plusMinutes(customer.getDepartureTime());
int pathTime = customer.getPathTimeFromPreviousStandstill();
long pathDistance = customer.getDistanceFromPreviousStandstill();
String sql = "update dispatch_order set engineer_code=?, seq=?, time_begin=? ,time_end=?, path_time=?, path_distance=? " + " where teamId=? and batch_no=? and order_id=? and dt=? and status!='CONFIRM' ";
Object[] param = {technician.getCode(), idx, arriveTime, leaveTime, pathTime, pathDistance, teamId, batchNo, customer.getCode(), customer.getDt()};
int rowUpdated = jdbcTemplate.update(sql, param);
log.info("算法结果回写dispatch, step3-逐个客户处理, order_id:{}, engineer_code:{}, seq: {}, begin:{}, end:{} ,rowUpdated:{}", customer.getCode(), technician.getCode(), seq, arriveTime, leaveTime, rowUpdated);
});
});
log.info("算法结果回写dispatch完成, teamId:{}, batchNo:{}", teamId, batchNo);
}
}
......@@ -3,7 +3,7 @@ server:
dispatch:
cron:
expr: 0 43 8-18 * * ?
expr: 0 58 8-18 * * ?
next-day-limit: 2
# expr: 0 */10 8-18 * * ?
......@@ -28,7 +28,7 @@ spring:
enabled: false
datasource:
driver-class-name: com.mysql.cj.jdbc.Driver
url: jdbc:mysql://127.0.0.1:3388/saas_aftersale_test?serverTimezone=Asia/Shanghai
url: jdbc:mysql://127.0.0.1:3306/saas_aftersale_test?serverTimezone=Asia/Shanghai
username: root
password: 123456
type: com.alibaba.druid.pool.DruidDataSource
......
......@@ -32,10 +32,48 @@ spring:
password: boxi_dev_0725
type: com.alibaba.druid.pool.DruidDataSource
jpa:
show-sql: false
hibernate:
ddl-auto: none
# quartz 配置
quartz:
# 应用关闭时,是否等待定时任务执行完成。默认为 false,建议设置为 true
wait-for-jobs-to-complete-on-shutdown: true
# 是否覆盖已有 Job 的配置,注意为false时,修改已存在的任务调度cron,周期不生效
overwrite-existing-jobs: true
#相关属性配置
properties:
org:
quartz:
scheduler:
#调度标识名 集群中每一个实例都必须使用相同的名称
instanceName: Scheduler
#ID设置为自动获取 每一个必须不同
instanceId: AUTO
makeSchedulerThreadDaemon: false
jobStore:
#class: org.quartz.impl.jdbcjobstore.JobStoreTX
#spring-boot-starter-quartz V2.5.7及以上
class: org.springframework.scheduling.quartz.LocalDataSourceJobStore
driverDelegateClass: org.quartz.impl.jdbcjobstore.StdJDBCDelegate
#表的前缀,默认QRTZ_
tablePrefix: QRTZ_
#是否加入集群
isClustered: true
#调度实例失效的检查时间间隔
clusterCheckinInterval: 10000
useProperties: false
threadPool:
class: org.quartz.simpl.SimpleThreadPool
# 指定在线程池里面创建的线程是否是守护线程
makeThreadsDaemons: false
#指定线程数,至少为1(无默认值,一般设置为1-100)
threadCount: 5
#设置线程的优先级(最大为java.lang.Thread.MAX_PRIORITY 10,最小为Thread.MIN_PRIORITY 1,默认为5)
threadPriority: 5
threadsInheritContextClassLoaderOfInitializingThread: true
#数据库方式
job-store-type: jdbc
#初始化表结构,初始化:always,再次重启改为 embedded
jdbc:
initialize-schema: always
seata:
application-id: ${spring.application.name}
......
<?xml version="1.0" encoding="UTF-8"?>
<!--该日志将日志级别不同的log信息保存到不同的文件中 -->
<configuration>
<include resource="org/springframework/boot/logging/logback/defaults.xml"/>
<springProperty name="LOG_MAXFILESIZE" scope="context" source="logback.filesize" defaultValue="100MB"/>
<springProperty name="LOG_FILEMAXDAY" scope="context" source="logback.filemaxday" defaultValue="30"/>
<springProperty name="spring.application.name" scope="context" source="spring.application.name" defaultValue="spring-boot-fusion"/>
<!-- 日志在工程中的输出位置 -->
<property name="LOG_FILE" value="logs/${spring.application.name:-}"/>
<!-- 控制台的日志输出样式 -->
<property name="CONSOLE_LOG_PATTERN"
value="%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
<!--日志文件输出格式-->
<property name="FILE_LOG_PATTERN"
value="%d{yyyy-MM-dd HH:mm:ss} %-5level ${spring.application.name:-} %thread %logger %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
<!-- 日志输出编码 -->
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>UTF-8</charset>
</encoder>
</appender>
<!-- 按照每天生成常规日志文件 -->
<appender name="FileAppender" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_FILE}/info/${spring.application.name:-}.log</file>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${FILE_LOG_PATTERN}</pattern>
<charset>UTF-8</charset>
</encoder>
<!-- 基于时间的分包策略 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_FILE}/info/${spring.application.name:-}.%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<!--保留时间,单位:天-->
<maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
</rollingPolicy>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 异常文件输出设置,将异常堆栈另外保存一份到单独的文件中,方便查找 -->
<appender name="FILE_ERROR"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${LOG_FILE}/error/${spring.application.name:-}.error.log</File>
<rollingPolicy
class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_FILE}/error/${spring.application.name:-}.error-%d{yyyy-MM-dd}-%i.zip
</FileNamePattern>
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
<maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<totalSizeCap>500MB</totalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter"><!-- 只打印错误日志 -->
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<filter class="ch.qos.logback.classic.filter.LevelFilter"><!-- 只打印警告日志 -->
<level>WARN</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="FILE_DEBUG" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_FILE}/debug/${spring.application.name:-}.log</file>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${FILE_LOG_PATTERN}</pattern>
<charset>UTF-8</charset>
</encoder>
<!-- 基于时间的分包策略 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_FILE}/debug/${spring.application.name:-}.%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<!--保留时间,单位:天-->
<maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
</rollingPolicy>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>DEBUG</level>
</filter>
</appender>
<!-- 异步输出 -->
<appender name="ASYNC" class="ch.qos.logback.classic.AsyncAppender">
<!-- 0-不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>0</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
<queueSize>256</queueSize>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref ref="FileAppender"/>
</appender>
<logger name="org.optaplanner" level="info"/>
<!-- 日志输出级别 -->
<root level="debug">
<appender-ref ref="console"/>
<appender-ref ref="FileAppender"/>
<appender-ref ref="FILE_ERROR"/>
<appender-ref ref="FILE_DEBUG"/>
</root>
</configuration>
\ No newline at end of file
......@@ -12,9 +12,9 @@ import java.util.Objects;
* @date 2023/10/24
*/
public enum VehicleEnum {
CAR(1, "车"),
CAR(1, "车"),
ELECTRIC_CAR(2, "电动车"),
BICYCLE(3, "自行车"),
BICYCLE(3, "骑行"),
WALK(4, "步行");
private Integer code;
......
......@@ -242,6 +242,13 @@ public class EngineerServiceImpl implements EngineerService {
update.where(cb.equal(root.get("engineerCode"), engineerCode));
entityManager.createQuery(update).executeUpdate();
CriteriaBuilder cb2 = entityManager.getCriteriaBuilder();
CriteriaUpdate<EngineerInfoEntity> update2 = cb2.createCriteriaUpdate(EngineerInfoEntity.class);
Root<EngineerInfoEntity> root2 = update2.from(EngineerInfoEntity.class);
update2.set(root2.get("vehicle"), Integer.valueOf(transportMode));
update2.where(cb2.equal(root2.get("engineerCode"), engineerCode));
entityManager.createQuery(update2).executeUpdate();
return Result.success(null);
}
......
......@@ -7,6 +7,7 @@
<springProperty name="LOG_MAXFILESIZE" scope="context" source="logback.filesize" defaultValue="100MB"/>
<springProperty name="LOG_FILEMAXDAY" scope="context" source="logback.filemaxday" defaultValue="30"/>
<springProperty name="spring.application.name" scope="context" source="spring.application.name" defaultValue="spring-boot-fusion"/>
<!-- 日志在工程中的输出位置 -->
<property name="LOG_FILE" value="logs/${spring.application.name:-}"/>
......
Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!