Skip to content
Toggle navigation
Projects
Groups
Snippets
Help
yangxiujun
/
paidan_demo
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Issues
0
Merge Requests
0
Pipelines
Wiki
Snippets
Settings
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit 51c8cab8
authored
Nov 03, 2023
by
Ren Ping
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
feat:任务执行分布式
1 parent
ae692a05
Show whitespace changes
Inline
Side-by-side
Showing
27 changed files
with
786 additions
and
21 deletions
.gitignore
project-dispatch/pom.xml
project-dispatch/src/main/java/com/dituhui/pea/dispatch/dao/DispatchEngineerRepository.java
project-dispatch/src/main/java/com/dituhui/pea/dispatch/dao/DispatchOrderRepository.java
project-dispatch/src/main/java/com/dituhui/pea/dispatch/dao/OrgBranchDao.java
project-dispatch/src/main/java/com/dituhui/pea/dispatch/dao/OrgClusterDao.java
project-dispatch/src/main/java/com/dituhui/pea/dispatch/dao/OrgGroupRepository.java
project-dispatch/src/main/java/com/dituhui/pea/dispatch/dao/OrgTeamDao.java
project-dispatch/src/main/java/com/dituhui/pea/dispatch/entity/OrgBranchEntity.java
project-dispatch/src/main/java/com/dituhui/pea/dispatch/entity/OrgClusterEntity.java
project-dispatch/src/main/java/com/dituhui/pea/dispatch/quartz/AutoDispatchJob.java
project-dispatch/src/main/java/com/dituhui/pea/dispatch/quartz/MyQuartzJobFactory.java
project-dispatch/src/main/java/com/dituhui/pea/dispatch/quartz/QuartzConfig.java
project-dispatch/src/main/java/com/dituhui/pea/dispatch/quartz/QuartzTaskListener.java
project-dispatch/src/main/java/com/dituhui/pea/dispatch/scheduler/BatchScheduler.java
project-dispatch/src/main/java/com/dituhui/pea/dispatch/service/SchedulerService.java
project-dispatch/src/main/java/com/dituhui/pea/dispatch/service/SolveService.java
project-dispatch/src/main/java/com/dituhui/pea/dispatch/service/impl/BatchServiceImpl.java
project-dispatch/src/main/java/com/dituhui/pea/dispatch/service/impl/SchedulerServiceImpl.java
project-dispatch/src/main/java/com/dituhui/pea/dispatch/service/impl/SolveServiceImpl.java
project-dispatch/src/main/resources/application-dev.yaml
project-dispatch/src/main/resources/application.yaml
project-dispatch/src/main/resources/logback-spring.xml
project-dispatch/src/main/resources/logback.xml → project-dispatch/src/main/resources/logback.xml.bak
project-order/src/main/java/com/dituhui/pea/order/enums/VehicleEnum.java
project-order/src/main/java/com/dituhui/pea/order/service/impl/EngineerServiceImpl.java
project-order/src/main/resources/logback-spring.xml
.gitignore
View file @
51c8cab
...
...
@@ -11,5 +11,5 @@
/logs/
/*/.gitignore
dispatchSolution-*.json
/project-order/src/main/resources/application-dev.yaml
*.idea
*.json
project-dispatch/pom.xml
View file @
51c8cab
...
...
@@ -46,10 +46,10 @@
<version>
${mysql.version}
</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>com.alibaba.cloud</groupId>-->
<!-- <artifactId>spring-cloud-starter-alibaba-seata</artifactId>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>com.alibaba.cloud</groupId>-->
<!-- <artifactId>spring-cloud-starter-alibaba-seata</artifactId>-->
<!-- </dependency>-->
<dependency>
<groupId>
com.alibaba.cloud
</groupId>
...
...
@@ -141,7 +141,11 @@
<artifactId>
gson
</artifactId>
</dependency>
<!-- quartz依赖 -->
<dependency>
<groupId>
org.springframework.boot
</groupId>
<artifactId>
spring-boot-starter-quartz
</artifactId>
</dependency>
</dependencies>
<build>
...
...
project-dispatch/src/main/java/com/dituhui/pea/dispatch/dao/DispatchEngineerRepository.java
View file @
51c8cab
package
com
.
dituhui
.
pea
.
dispatch
.
dao
;
import
com.dituhui.pea.dispatch.entity.DispatchEngineer
;
import
org.springframework.data.jpa.repository.JpaRepository
;
import
org.springframework.data.repository.CrudRepository
;
import
java.util.List
;
public
interface
DispatchEngineerRepository
extends
CrudRepository
<
DispatchEngineer
,
Long
>
{
public
interface
DispatchEngineerRepository
extends
CrudRepository
<
DispatchEngineer
,
Long
>
,
JpaRepository
<
DispatchEngineer
,
Long
>
{
List
<
DispatchEngineer
>
findByGroupId
(
String
groupId
);
...
...
project-dispatch/src/main/java/com/dituhui/pea/dispatch/dao/DispatchOrderRepository.java
View file @
51c8cab
package
com
.
dituhui
.
pea
.
dispatch
.
dao
;
import
com.dituhui.pea.dispatch.entity.DispatchOrder
;
import
org.springframework.data.jpa.repository.JpaRepository
;
import
org.springframework.data.jpa.repository.Query
;
import
org.springframework.data.repository.CrudRepository
;
import
java.util.List
;
import
java.util.Optional
;
public
interface
DispatchOrderRepository
extends
CrudRepository
<
DispatchOrder
,
Long
>
{
public
interface
DispatchOrderRepository
extends
CrudRepository
<
DispatchOrder
,
Long
>
,
JpaRepository
<
DispatchOrder
,
Long
>
{
// 查看未指派非confirm的,供算法计算
...
...
@@ -31,4 +32,6 @@ public interface DispatchOrderRepository extends CrudRepository<DispatchOrder, L
List
<
DispatchOrder
>
findAllWithoutConfirm2
(
String
teamId
,
String
batchNo
);
Optional
<
DispatchOrder
>
findByGroupIdAndBatchNoAndOrderIdAndDt
(
String
groupId
,
String
batchNo
,
String
orderId
,
String
dt
);
List
<
DispatchOrder
>
findByTeamIdAndBatchNo
(
String
teamId
,
String
batchNo
);
}
\ No newline at end of file
project-dispatch/src/main/java/com/dituhui/pea/dispatch/dao/OrgBranchDao.java
0 → 100644
View file @
51c8cab
package
com
.
dituhui
.
pea
.
dispatch
.
dao
;
import
com.dituhui.pea.dispatch.entity.OrgBranchEntity
;
import
org.hibernate.annotations.Where
;
import
org.springframework.data.jpa.repository.JpaRepository
;
import
org.springframework.stereotype.Repository
;
import
java.util.List
;
@Repository
@Where
(
clause
=
"status = 1"
)
public
interface
OrgBranchDao
extends
JpaRepository
<
OrgBranchEntity
,
Integer
>
{
List
<
OrgBranchEntity
>
findAllByClusterId
(
String
clusterId
);
OrgBranchEntity
getByBranchId
(
String
branchId
);
public
List
<
OrgBranchEntity
>
findByBranchIdIn
(
List
<
String
>
ids
);
public
OrgBranchEntity
findByCitycodeListLike
(
String
citycodeList
);
}
project-dispatch/src/main/java/com/dituhui/pea/dispatch/dao/OrgClusterDao.java
0 → 100644
View file @
51c8cab
package
com
.
dituhui
.
pea
.
dispatch
.
dao
;
import
com.dituhui.pea.dispatch.entity.OrgClusterEntity
;
import
org.hibernate.annotations.Where
;
import
org.springframework.data.jpa.repository.JpaRepository
;
import
org.springframework.stereotype.Repository
;
import
java.util.List
;
@Repository
@Where
(
clause
=
"status = 1"
)
public
interface
OrgClusterDao
extends
JpaRepository
<
OrgClusterEntity
,
Integer
>
{
OrgClusterEntity
getByClusterId
(
String
clusterId
);
public
List
<
OrgClusterEntity
>
findByClusterIdIn
(
List
<
String
>
ids
);
}
project-dispatch/src/main/java/com/dituhui/pea/dispatch/dao/OrgGroupRepository.java
View file @
51c8cab
...
...
@@ -12,4 +12,6 @@ import org.springframework.stereotype.Repository;
@Repository
public
interface
OrgGroupRepository
extends
CrudRepository
<
OrgGroup
,
Long
>
{
Optional
<
OrgGroup
>
findByGroupId
(
String
groupId
);
List
<
OrgGroup
>
findAllByBranchId
(
String
branchId
);
}
project-dispatch/src/main/java/com/dituhui/pea/dispatch/dao/OrgTeamDao.java
View file @
51c8cab
...
...
@@ -4,6 +4,7 @@ import java.util.List;
import
org.springframework.data.jpa.repository.JpaRepository
;
import
org.springframework.data.jpa.repository.JpaSpecificationExecutor
;
import
org.springframework.data.jpa.repository.Query
;
import
org.springframework.stereotype.Repository
;
import
com.dituhui.pea.dispatch.entity.OrgTeamEntity
;
...
...
@@ -17,4 +18,10 @@ public interface OrgTeamDao extends JpaRepository<OrgTeamEntity, Integer>, JpaSp
OrgTeamEntity
findByTeamId
(
String
teamId
);
@Query
(
value
=
"select t.* from org_team t\n"
+
"join org_group g on g.group_id=t.group_id and t.`status`=1 and g.`status`=1\n"
+
"join org_branch b on b.branch_id=g.branch_id and b.`status`=1\n"
+
"join org_cluster c on c.cluster_id=b.cluster_id and c.`status`=1"
,
nativeQuery
=
true
)
List
<
OrgTeamEntity
>
findAllTeam
();
}
project-dispatch/src/main/java/com/dituhui/pea/dispatch/entity/OrgBranchEntity.java
0 → 100644
View file @
51c8cab
package
com
.
dituhui
.
pea
.
dispatch
.
entity
;
import
lombok.Data
;
import
javax.persistence.*
;
import
java.time.LocalDateTime
;
@Entity
@Data
@Table
(
name
=
"org_branch"
)
public
class
OrgBranchEntity
{
@Id
@GeneratedValue
(
strategy
=
GenerationType
.
IDENTITY
)
private
Integer
id
;
private
String
branchId
;
private
String
branchName
;
private
String
clusterId
;
private
String
address
;
private
String
x
;
private
String
y
;
private
String
citycodeList
;
private
Integer
kind
;
private
String
layerId
;
private
String
memo
;
private
LocalDateTime
createTime
=
LocalDateTime
.
now
();
private
LocalDateTime
updateTime
=
LocalDateTime
.
now
();
public
OrgBranchEntity
()
{
}
/**
* 部门缩写
*/
private
String
abbreviation
;
/**
* 部门编号
*/
private
String
code
;
/**
* 部门负责人联系方式
*/
private
String
phone
;
/**
* 是否启用外围仓 0未启用 1启用
*/
private
Integer
warehouseEnabled
=
0
;
/**
* 最长配件保留时长(天)
*/
private
Integer
reserveTimeMax
=
0
;
/**
* 帐号状态(0无效 1有效)
*/
private
Integer
status
=
1
;
}
project-dispatch/src/main/java/com/dituhui/pea/dispatch/entity/OrgClusterEntity.java
0 → 100644
View file @
51c8cab
package
com
.
dituhui
.
pea
.
dispatch
.
entity
;
import
lombok.Data
;
import
javax.persistence.*
;
import
java.time.LocalDateTime
;
@Entity
@Data
@Table
(
name
=
"org_cluster"
)
public
class
OrgClusterEntity
{
@Id
@GeneratedValue
(
strategy
=
GenerationType
.
IDENTITY
)
private
Integer
id
;
private
String
clusterId
;
private
String
name
;
private
String
citycodeList
;
private
String
address
;
private
String
cityName
;
private
Integer
status
=
1
;
private
String
updateUser
;
private
LocalDateTime
createTime
=
LocalDateTime
.
now
();
private
LocalDateTime
updateTime
=
LocalDateTime
.
now
();
public
OrgClusterEntity
()
{
}
/**
* 部门描述 非必须
*/
private
String
memo
;
/**
* 部门缩写
*/
private
String
abbreviation
;
/**
* 部门编号
*/
private
String
code
;
/**
* 部门负责人联系方式
*/
private
String
phone
;
/**
* 是否启用外围仓 0未启用 1启用
*/
private
Integer
warehouseEnabled
=
0
;
/**
* 最长配件保留时长(天)
*/
private
Integer
reserveTimeMax
=
0
;
}
project-dispatch/src/main/java/com/dituhui/pea/dispatch/quartz/AutoDispatchJob.java
0 → 100644
View file @
51c8cab
package
com
.
dituhui
.
pea
.
dispatch
.
quartz
;
import
com.dituhui.pea.dispatch.service.SchedulerService
;
import
lombok.extern.slf4j.Slf4j
;
import
org.quartz.JobExecutionContext
;
import
org.quartz.JobExecutionException
;
import
org.quartz.JobKey
;
import
org.springframework.scheduling.quartz.QuartzJobBean
;
import
org.springframework.stereotype.Component
;
import
javax.annotation.Resource
;
/**
* 自动派工任务
*
* @author RenPing
* @date 2023/11/02
*/
@Component
@Slf4j
public
class
AutoDispatchJob
extends
QuartzJobBean
{
public
static
final
String
TEAM_JOB_PREFIX
=
"BOXI_TEAM_"
;
@Resource
private
SchedulerService
schedulerService
;
@Override
protected
void
executeInternal
(
JobExecutionContext
jobExecutionContext
)
throws
JobExecutionException
{
try
{
System
.
out
.
println
(
this
);
JobKey
jobKey
=
jobExecutionContext
.
getJobDetail
().
getKey
();
String
name
=
jobKey
.
getName
();
String
teamId
=
name
.
substring
(
TEAM_JOB_PREFIX
.
length
());
long
start
=
System
.
currentTimeMillis
();
log
.
info
(
">>> 自动派工(teamId:{}) 自动任务开始"
,
teamId
);
schedulerService
.
dispatchRun2
(
teamId
);
long
end
=
System
.
currentTimeMillis
();
log
.
info
(
">>> 自动派工(teamId:{}) 自动任务结束,耗时:{}"
,
teamId
,
end
-
start
);
}
catch
(
Exception
e
)
{
log
.
error
(
e
.
getMessage
(),
e
);
}
}
}
\ No newline at end of file
project-dispatch/src/main/java/com/dituhui/pea/dispatch/quartz/MyQuartzJobFactory.java
0 → 100644
View file @
51c8cab
package
com
.
dituhui
.
pea
.
dispatch
.
quartz
;
import
org.quartz.Job
;
import
org.quartz.spi.TriggerFiredBundle
;
import
org.springframework.beans.BeansException
;
import
org.springframework.context.ApplicationContext
;
import
org.springframework.context.ApplicationContextAware
;
import
org.springframework.scheduling.quartz.AdaptableJobFactory
;
import
org.springframework.stereotype.Component
;
/**
* 自定义JobFactory,从Spring容器中拿单例Job
*
* @author RenPing
* @date 2023/11/02
*/
@Component
public
class
MyQuartzJobFactory
extends
AdaptableJobFactory
implements
ApplicationContextAware
{
private
ApplicationContext
applicationContext
;
@Override
public
void
setApplicationContext
(
ApplicationContext
applicationContext
)
throws
BeansException
{
this
.
applicationContext
=
applicationContext
;
}
@Override
protected
Object
createJobInstance
(
TriggerFiredBundle
bundle
)
throws
Exception
{
Job
job
=
applicationContext
.
getBean
(
bundle
.
getJobDetail
().
getJobClass
());
return
job
;
}
}
project-dispatch/src/main/java/com/dituhui/pea/dispatch/quartz/QuartzConfig.java
0 → 100644
View file @
51c8cab
package
com
.
dituhui
.
pea
.
dispatch
.
quartz
;
import
org.quartz.spi.JobFactory
;
import
org.springframework.boot.autoconfigure.quartz.QuartzProperties
;
import
org.springframework.context.annotation.Bean
;
import
org.springframework.context.annotation.Configuration
;
import
org.springframework.scheduling.quartz.SchedulerFactoryBean
;
import
javax.annotation.Resource
;
import
javax.sql.DataSource
;
import
java.io.IOException
;
import
java.util.Properties
;
@Configuration
public
class
QuartzConfig
{
@Resource
private
JobFactory
jobFactory
;
@Resource
private
QuartzProperties
quartzProperties
;
@Bean
public
SchedulerFactoryBean
schedulerFactoryBean
(
DataSource
dataSource
)
throws
IOException
{
SchedulerFactoryBean
factory
=
new
SchedulerFactoryBean
();
factory
.
setDataSource
(
dataSource
);
Properties
properties
=
new
Properties
();
for
(
String
key
:
quartzProperties
.
getProperties
().
keySet
())
{
properties
.
put
(
key
,
quartzProperties
.
getProperties
().
get
(
key
));
}
factory
.
setQuartzProperties
(
properties
);
factory
.
setJobFactory
(
jobFactory
);
return
factory
;
}
}
project-dispatch/src/main/java/com/dituhui/pea/dispatch/quartz/QuartzTaskListener.java
0 → 100644
View file @
51c8cab
package
com
.
dituhui
.
pea
.
dispatch
.
quartz
;
import
cn.hutool.core.collection.CollectionUtil
;
import
com.dituhui.pea.dispatch.dao.OrgTeamDao
;
import
com.dituhui.pea.dispatch.entity.OrgTeamEntity
;
import
lombok.extern.slf4j.Slf4j
;
import
org.quartz.*
;
import
org.springframework.beans.factory.annotation.Value
;
import
org.springframework.boot.context.event.ApplicationStartedEvent
;
import
org.springframework.context.ApplicationListener
;
import
org.springframework.stereotype.Component
;
import
javax.annotation.Resource
;
import
java.util.List
;
/**
* 手动触发定时任务
*
* @author RenPing
* @date 2023/11/01
*/
@Component
@Slf4j
public
class
QuartzTaskListener
implements
ApplicationListener
<
ApplicationStartedEvent
>
{
@Resource
private
Scheduler
scheduler
;
@Resource
private
OrgTeamDao
orgTeamDao
;
@Value
(
"${dispatch.cron.expr}"
)
private
String
dispatchCron
;
@Override
public
void
onApplicationEvent
(
ApplicationStartedEvent
applicationStartedEvent
)
{
List
<
OrgTeamEntity
>
teamList
=
orgTeamDao
.
findAllTeam
();
teamList
.
forEach
(
orgTeamEntity
->
{
String
jobName
=
AutoDispatchJob
.
TEAM_JOB_PREFIX
+
orgTeamEntity
.
getTeamId
();
JobDetail
jobDetail
=
JobBuilder
.
newJob
(
AutoDispatchJob
.
class
)
.
withIdentity
(
jobName
,
jobName
)
.
storeDurably
()
.
build
();
Trigger
trigger
=
TriggerBuilder
.
newTrigger
()
.
forJob
(
jobDetail
)
.
withIdentity
(
jobName
,
jobName
)
.
startNow
()
.
withSchedule
(
CronScheduleBuilder
.
cronSchedule
(
dispatchCron
))
.
build
();
try
{
scheduler
.
scheduleJob
(
jobDetail
,
CollectionUtil
.
newHashSet
(
trigger
),
true
);
}
catch
(
SchedulerException
e
)
{
//log.error(e.getMessage(), e);
}
});
}
}
\ No newline at end of file
project-dispatch/src/main/java/com/dituhui/pea/dispatch/scheduler/BatchScheduler.java
View file @
51c8cab
...
...
@@ -26,7 +26,7 @@ import com.dituhui.pea.dispatch.utils.DispatchSolutionUtils;
import
lombok.extern.slf4j.Slf4j
;
@Slf4j
@Component
//
@Component
public
class
BatchScheduler
{
@Value
(
"${dispatch.cron.next-day-limit}"
)
...
...
@@ -55,7 +55,7 @@ public class BatchScheduler {
/*
* 异步执行任务开始
*/
@Scheduled
(
cron
=
"${dispatch.cron.expr}"
)
//
@Scheduled(cron = "${dispatch.cron.expr}")
public
void
dispatchRun2
()
{
String
groupId
=
"gsuzhou"
;
log
.
info
(
"dispatchRun group:{}"
,
groupId
);
...
...
@@ -83,12 +83,12 @@ public class BatchScheduler {
DispatchSolution
solution
=
solver
.
solve
(
problem
);
DispatchSolutionUtils
.
removeHardConstraintCustomer
(
solution
,
solverFactory
);
log
.
info
(
"dispatchRun solve done, teamId:{}, day:{}, batch:{}, problemId:{}, score:{}"
,
teamId
,
currDay
,
batchNo
,
problemId
,
solution
.
getScore
().
toShortString
());
this
.
solveService
.
saveSolutionWrp
(
solution
);
this
.
solveService
.
saveSolutionWrp
2
(
solution
);
this
.
extractService
.
extractDispatchToOrder2
(
teamId
,
batchNo
,
false
);
log
.
info
(
"dispatchRun done ------ teamId:{}, day:{}"
,
teamId
,
currDay
);
JacksonSolutionFileIO
<
DispatchSolution
>
exporter
=
new
JacksonSolutionFileIO
<
DispatchSolution
>(
DispatchSolution
.
class
);
exporter
.
write
(
solution
,
new
File
(
String
.
format
(
"dispatchSolution_%s_%s.json"
,
group
Id
,
currDay
)));
exporter
.
write
(
solution
,
new
File
(
String
.
format
(
"dispatchSolution_%s_%s.json"
,
team
Id
,
currDay
)));
}
}
...
...
project-dispatch/src/main/java/com/dituhui/pea/dispatch/service/SchedulerService.java
0 → 100644
View file @
51c8cab
package
com
.
dituhui
.
pea
.
dispatch
.
service
;
public
interface
SchedulerService
{
/**
* 以工作队为批次
*
* @param teamId 工作队 ID
* @author RenPing
* @date 2023/11/02
*/
void
dispatchRun2
(
String
teamId
);
}
project-dispatch/src/main/java/com/dituhui/pea/dispatch/service/SolveService.java
View file @
51c8cab
...
...
@@ -35,6 +35,13 @@ public interface SolveService {
void
saveSolutionWrp
(
DispatchSolution
solution
)
throws
RuntimeException
;
/*
* 将计算结果回写到dispatch2个表
* 是下面两个方法的包装
* */
void
saveSolutionWrp2
(
DispatchSolution
solution
)
throws
RuntimeException
;
}
project-dispatch/src/main/java/com/dituhui/pea/dispatch/service/impl/BatchServiceImpl.java
View file @
51c8cab
...
...
@@ -5,10 +5,16 @@ import java.time.LocalDateTime;
import
java.time.LocalTime
;
import
java.time.format.DateTimeFormatter
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.Optional
;
import
javax.persistence.EntityManager
;
import
com.alibaba.fastjson.JSONObject
;
import
com.dituhui.pea.dispatch.dao.DispatchEngineerRepository
;
import
com.dituhui.pea.dispatch.dao.DispatchOrderRepository
;
import
com.dituhui.pea.dispatch.entity.DispatchEngineer
;
import
com.dituhui.pea.dispatch.entity.DispatchOrder
;
import
org.springframework.beans.factory.annotation.Autowired
;
import
org.springframework.jdbc.core.JdbcTemplate
;
import
org.springframework.stereotype.Service
;
...
...
@@ -38,6 +44,12 @@ public class BatchServiceImpl implements BatchService {
DispatchBatchRepository
batchRepository
;
@Autowired
DispatchOrderRepository
dispatchOrderRepository
;
@Autowired
DispatchEngineerRepository
dispatchEngineerRepository
;
@Autowired
OrderInfoRepository
orderInfoRepository
;
@Autowired
...
...
@@ -181,7 +193,7 @@ public class BatchServiceImpl implements BatchService {
}
// 检查给定小队、日期是否有在运行的批次任务,没则返回,没有则创建
@Transactional
(
isolation
=
Isolation
.
READ_COMMITTED
)
@Transactional
(
isolation
=
Isolation
.
READ_COMMITTED
,
rollbackFor
=
Exception
.
class
)
@Override
public
String
buildBatchData2
(
String
teamId
,
String
day
)
{
entityManager
.
clear
();
...
...
@@ -206,8 +218,12 @@ public class BatchServiceImpl implements BatchService {
}
log
.
info
(
"清理原批次数据, teamId:{}, day:{}, batchNo:{}"
,
teamId
,
batchDay
,
batchNo
);
//并发出现死锁
jdbcTemplate
.
update
(
"delete from dispatch_engineer where team_id=? and batch_no=?"
,
teamId
,
batchNo
);
//dispatchEngineerRepository.deleteAllInBatch(dispatchEngineerRepository.findByTeamIdAndBatchNo(teamId,batchNo));
//并发出现死锁
jdbcTemplate
.
update
(
"delete from dispatch_order where team_id=? and batch_no=?"
,
teamId
,
batchNo
);
//dispatchOrderRepository.deleteAllInBatch(dispatchOrderRepository.findByTeamIdAndBatchNo(teamId,batchNo));
log
.
info
(
"写入新批次技术员、工单数据, teamId:{}, day:{}, batchNo:{}"
,
teamId
,
batchDay
,
batchNo
);
String
sqlEngineer
=
"INSERT INTO dispatch_engineer (team_id, batch_no, engineer_code, engineer_name, x, y, max_num, max_minute, max_distance, vehicle_type)\n"
...
...
project-dispatch/src/main/java/com/dituhui/pea/dispatch/service/impl/SchedulerServiceImpl.java
0 → 100644
View file @
51c8cab
package
com
.
dituhui
.
pea
.
dispatch
.
service
.
impl
;
import
com.dituhui.pea.dispatch.dao.OrgTeamDao
;
import
com.dituhui.pea.dispatch.entity.OrgTeamEntity
;
import
com.dituhui.pea.dispatch.pojo.DispatchSolution
;
import
com.dituhui.pea.dispatch.service.BatchService
;
import
com.dituhui.pea.dispatch.service.ExtractService
;
import
com.dituhui.pea.dispatch.service.SchedulerService
;
import
com.dituhui.pea.dispatch.service.SolveService
;
import
com.dituhui.pea.dispatch.utils.DispatchSolutionUtils
;
import
lombok.extern.slf4j.Slf4j
;
import
org.optaplanner.core.api.solver.Solver
;
import
org.optaplanner.core.impl.solver.DefaultSolverFactory
;
import
org.optaplanner.persistence.jackson.impl.domain.solution.JacksonSolutionFileIO
;
import
org.springframework.beans.factory.annotation.Autowired
;
import
org.springframework.beans.factory.annotation.Value
;
import
org.springframework.stereotype.Service
;
import
org.springframework.transaction.annotation.Isolation
;
import
org.springframework.transaction.annotation.Transactional
;
import
java.io.File
;
import
java.time.LocalDate
;
import
java.time.format.DateTimeFormatter
;
import
java.util.UUID
;
/**
* 自动排班算法
*
* @author RenPing
* @date 2023/11/02
*/
@Service
@Slf4j
public
class
SchedulerServiceImpl
implements
SchedulerService
{
@Value
(
"${dispatch.cron.next-day-limit}"
)
int
nextDaysLimit
=
2
;
@Autowired
BatchService
batchService
;
@Autowired
SolveService
solveService
;
@Autowired
ExtractService
extractService
;
@Autowired
OrgTeamDao
orgTeamDao
;
private
DefaultSolverFactory
<
DispatchSolution
>
solverFactory
;
private
Solver
<
DispatchSolution
>
solver
;
public
SchedulerServiceImpl
()
{
solverFactory
=
DispatchSolutionUtils
.
getSolverFactory
(
30
,
60
*
5
);
solver
=
solverFactory
.
buildSolver
();
}
@Override
@Transactional
(
isolation
=
Isolation
.
READ_COMMITTED
,
rollbackFor
=
Exception
.
class
)
public
void
dispatchRun2
(
String
teamId
)
{
OrgTeamEntity
orgTeamEntity
=
orgTeamDao
.
findByTeamId
(
teamId
);
String
groupId
=
orgTeamEntity
.
getGroupId
();
log
.
info
(
"dispatchRun group:{}, team:{} done"
,
groupId
,
teamId
);
try
{
for
(
int
i
=
1
;
i
<=
nextDaysLimit
;
i
++)
{
String
currDay
=
LocalDate
.
now
().
plusDays
(
i
).
format
(
DateTimeFormatter
.
ISO_LOCAL_DATE
);
log
.
info
(
"dispatchRun begin----- teamId:{}, day:{}"
,
teamId
,
currDay
);
String
batchNo
=
batchService
.
buildBatchData2
(
teamId
,
currDay
);
UUID
problemId
=
solveService
.
generateProblemId
(
teamId
,
batchNo
);
log
.
info
(
"dispatchRun teamId:{}, day:{}, batch:{}, problemId:{}"
,
teamId
,
currDay
,
batchNo
,
problemId
);
DispatchSolution
problem
=
solveService
.
prepareSolution2
(
teamId
,
batchNo
);
if
(
problem
.
getCustomerList
().
size
()
<=
0
)
{
log
.
info
(
"dispatchRun 当前批次没有待指派工单 , teamId:{}, day:{}, batch:{}, problemId:{}, order-size:{}"
,
teamId
,
currDay
,
batchNo
,
problemId
,
problem
.
getCustomerList
().
size
());
continue
;
}
log
.
info
(
"dispatchRun prepare done, teamId:{}, day:{}, batch:{}, problemId:{}"
,
teamId
,
currDay
,
batchNo
,
problemId
);
DispatchSolution
solution
=
solver
.
solve
(
problem
);
DispatchSolutionUtils
.
removeHardConstraintCustomer
(
solution
,
solverFactory
);
log
.
info
(
"dispatchRun solve done, teamId:{}, day:{}, batch:{}, problemId:{}, score:{}"
,
teamId
,
currDay
,
batchNo
,
problemId
,
solution
.
getScore
().
toShortString
());
this
.
solveService
.
saveSolutionWrp2
(
solution
);
this
.
extractService
.
extractDispatchToOrder2
(
teamId
,
batchNo
,
false
);
log
.
info
(
"dispatchRun done ------ teamId:{}, day:{}"
,
teamId
,
currDay
);
JacksonSolutionFileIO
<
DispatchSolution
>
exporter
=
new
JacksonSolutionFileIO
<
DispatchSolution
>(
DispatchSolution
.
class
);
exporter
.
write
(
solution
,
new
File
(
String
.
format
(
"dispatchSolution_%s_%s.json"
,
teamId
,
currDay
)));
//log.info("dispatchRun group:{}, team:{} done", groupId, teamId);
}
}
catch
(
Exception
e
)
{
log
.
error
(
">>> (teamId:{})自动排班失败:{}"
,
teamId
,
e
.
getMessage
(),
e
);
throw
e
;
}
}
}
\ No newline at end of file
project-dispatch/src/main/java/com/dituhui/pea/dispatch/service/impl/SolveServiceImpl.java
View file @
51c8cab
...
...
@@ -390,6 +390,35 @@ public class SolveServiceImpl implements SolveService {
}
@Transactional
(
isolation
=
Isolation
.
READ_COMMITTED
)
@Override
public
void
saveSolutionWrp2
(
DispatchSolution
solution
)
throws
RuntimeException
{
String
teamId
=
solution
.
getTeamId
();
String
batchNo
=
solution
.
getBatchNo
();
log
.
info
(
"算法结果回写包装方法, teamId:{}, batchNo:{}"
,
teamId
,
batchNo
);
JacksonSolutionFileIO
<
DispatchSolution
>
exporter
=
new
JacksonSolutionFileIO
<
DispatchSolution
>(
DispatchSolution
.
class
);
String
fileName
=
String
.
format
(
"dispatchSolution-%s-%s.json"
,
teamId
,
batchNo
);
File
tempFile
=
new
File
(
fileName
);
exporter
.
write
(
solution
,
tempFile
);
String
dispatchResultJson
=
"{}"
;
/*
try {
dispatchResultJson = FileUtil.readAsString(tempFile);
} catch (IOException e) {
log.error("json算法结果回写 error , teamId:{}, batchNo:{} ", teamId, batchNo, e);
}
*/
Object
[]
paramBatch
=
{
LocalDateTime
.
now
(),
dispatchResultJson
,
teamId
,
batchNo
};
jdbcTemplate
.
update
(
" update dispatch_batch set status='DONE', end_time=? , ext=? where team_id=? and batch_no=? "
,
paramBatch
);
saveSolutionToDispatch2
(
teamId
,
batchNo
,
solution
);
}
/**
* 将计算结果回写到dispatch_order表(更新补充技术员工号、上门时间)
...
...
@@ -442,4 +471,55 @@ public class SolveServiceImpl implements SolveService {
}
/**
* 将计算结果回写到dispatch_order表(更新补充技术员工号、上门时间)
*/
void
saveSolutionToDispatch2
(
String
teamId
,
String
batchNo
,
DispatchSolution
solution
)
throws
RuntimeException
{
log
.
info
(
"算法结果回写dispatch, teamId:{}, batchNo:{}"
,
teamId
,
batchNo
);
// 清理当前批次指派结果
entityManager
.
clear
();
log
.
info
(
"算法结果回写dispatch, step1-清除历史, groupId:{}, batchNo:{}"
,
teamId
,
batchNo
);
Object
[]
paramClear
=
{
teamId
,
batchNo
};
String
sqlReset
=
"update dispatch_order set engineer_code='', seq=0, time_begin=null, time_end=null, path_time=0, path_distance=0 "
+
"where team_id=? and batch_no=? and status!='CONFIRM' "
;
jdbcTemplate
.
update
(
sqlReset
,
paramClear
);
log
.
info
(
"算法结果回写dispatch, step2-开始回写, teamId:{}, batchNo:{}"
,
teamId
,
batchNo
);
// 保存当前批次指派结果
solution
.
getTechnicianList
().
forEach
(
technician
->
{
log
.
info
(
"算法结果回写dispatch, step2.1-按技术员逐个回写, teamId:{}, batchNo:{}, technician: {}, max-minute:{}, customlist.size:{}"
,
teamId
,
batchNo
,
technician
.
getCode
(),
technician
.
getMaxMinute
(),
technician
.
getCustomerList
().
size
());
AtomicInteger
seq
=
new
AtomicInteger
();
technician
.
getCustomerList
().
forEach
(
customer
->
{
int
idx
=
seq
.
getAndIncrement
();
// 时间相加操作
// LocalDateTime localExpectBegin = LocalDateTime.ofInstant(expectBegin[0].toInstant(), ZoneId.systemDefault());
// LocalDateTime localEndTime = localExpectBegin.plusMinutes(dOrder.getTakeTime());
// Date end = Date.from(localEndTime.atZone(ZoneId.systemDefault()).toInstant());
log
.
info
(
"算法结果回写dispatch, step3-逐个客户处理, teamId:{}, batchNo:{}, employ: {}, customer:{}, service-duration:{} "
,
teamId
,
batchNo
,
technician
.
getCode
(),
customer
.
getCode
(),
customer
.
getServiceDuration
());
log
.
info
(
customer
.
toString
());
LocalDateTime
customDateTime
=
LocalDateTime
.
parse
(
customer
.
getDt
()+
" 00:00:00"
,
DateTimeFormatter
.
ofPattern
(
"yyyy-MM-dd HH:mm:ss"
));
LocalDateTime
arriveTime
=
customDateTime
.
plusMinutes
(
customer
.
getArrivalTime
());
LocalDateTime
leaveTime
=
customDateTime
.
plusMinutes
(
customer
.
getDepartureTime
());
int
pathTime
=
customer
.
getPathTimeFromPreviousStandstill
();
long
pathDistance
=
customer
.
getDistanceFromPreviousStandstill
();
String
sql
=
"update dispatch_order set engineer_code=?, seq=?, time_begin=? ,time_end=?, path_time=?, path_distance=? "
+
" where teamId=? and batch_no=? and order_id=? and dt=? and status!='CONFIRM' "
;
Object
[]
param
=
{
technician
.
getCode
(),
idx
,
arriveTime
,
leaveTime
,
pathTime
,
pathDistance
,
teamId
,
batchNo
,
customer
.
getCode
(),
customer
.
getDt
()};
int
rowUpdated
=
jdbcTemplate
.
update
(
sql
,
param
);
log
.
info
(
"算法结果回写dispatch, step3-逐个客户处理, order_id:{}, engineer_code:{}, seq: {}, begin:{}, end:{} ,rowUpdated:{}"
,
customer
.
getCode
(),
technician
.
getCode
(),
seq
,
arriveTime
,
leaveTime
,
rowUpdated
);
});
});
log
.
info
(
"算法结果回写dispatch完成, teamId:{}, batchNo:{}"
,
teamId
,
batchNo
);
}
}
project-dispatch/src/main/resources/application-dev.yaml
View file @
51c8cab
...
...
@@ -3,7 +3,7 @@ server:
dispatch
:
cron
:
expr
:
0
43
8-18 * * ?
expr
:
0
58
8-18 * * ?
next-day-limit
:
2
# expr: 0 */10 8-18 * * ?
...
...
@@ -28,7 +28,7 @@ spring:
enabled
:
false
datasource
:
driver-class-name
:
com.mysql.cj.jdbc.Driver
url
:
jdbc:mysql://127.0.0.1:33
88
/saas_aftersale_test?serverTimezone=Asia/Shanghai
url
:
jdbc:mysql://127.0.0.1:33
06
/saas_aftersale_test?serverTimezone=Asia/Shanghai
username
:
root
password
:
123456
type
:
com.alibaba.druid.pool.DruidDataSource
...
...
project-dispatch/src/main/resources/application.yaml
View file @
51c8cab
...
...
@@ -32,10 +32,48 @@ spring:
password
:
boxi_dev_0725
type
:
com.alibaba.druid.pool.DruidDataSource
jpa
:
show-sql
:
false
hibernate
:
ddl-auto
:
none
# quartz 配置
quartz
:
# 应用关闭时,是否等待定时任务执行完成。默认为 false,建议设置为 true
wait-for-jobs-to-complete-on-shutdown
:
true
# 是否覆盖已有 Job 的配置,注意为false时,修改已存在的任务调度cron,周期不生效
overwrite-existing-jobs
:
true
#相关属性配置
properties
:
org
:
quartz
:
scheduler
:
#调度标识名 集群中每一个实例都必须使用相同的名称
instanceName
:
Scheduler
#ID设置为自动获取 每一个必须不同
instanceId
:
AUTO
makeSchedulerThreadDaemon
:
false
jobStore
:
#class: org.quartz.impl.jdbcjobstore.JobStoreTX
#spring-boot-starter-quartz V2.5.7及以上
class
:
org.springframework.scheduling.quartz.LocalDataSourceJobStore
driverDelegateClass
:
org.quartz.impl.jdbcjobstore.StdJDBCDelegate
#表的前缀,默认QRTZ_
tablePrefix
:
QRTZ_
#是否加入集群
isClustered
:
true
#调度实例失效的检查时间间隔
clusterCheckinInterval
:
10000
useProperties
:
false
threadPool
:
class
:
org.quartz.simpl.SimpleThreadPool
# 指定在线程池里面创建的线程是否是守护线程
makeThreadsDaemons
:
false
#指定线程数,至少为1(无默认值,一般设置为1-100)
threadCount
:
5
#设置线程的优先级(最大为java.lang.Thread.MAX_PRIORITY 10,最小为Thread.MIN_PRIORITY 1,默认为5)
threadPriority
:
5
threadsInheritContextClassLoaderOfInitializingThread
:
true
#数据库方式
job-store-type
:
jdbc
#初始化表结构,初始化:always,再次重启改为 embedded
jdbc
:
initialize-schema
:
always
seata
:
application-id
:
${spring.application.name}
...
...
project-dispatch/src/main/resources/logback-spring.xml
0 → 100644
View file @
51c8cab
<?xml version="1.0" encoding="UTF-8"?>
<!--该日志将日志级别不同的log信息保存到不同的文件中 -->
<configuration>
<include
resource=
"org/springframework/boot/logging/logback/defaults.xml"
/>
<springProperty
name=
"LOG_MAXFILESIZE"
scope=
"context"
source=
"logback.filesize"
defaultValue=
"100MB"
/>
<springProperty
name=
"LOG_FILEMAXDAY"
scope=
"context"
source=
"logback.filemaxday"
defaultValue=
"30"
/>
<springProperty
name=
"spring.application.name"
scope=
"context"
source=
"spring.application.name"
defaultValue=
"spring-boot-fusion"
/>
<!-- 日志在工程中的输出位置 -->
<property
name=
"LOG_FILE"
value=
"logs/${spring.application.name:-}"
/>
<!-- 控制台的日志输出样式 -->
<property
name=
"CONSOLE_LOG_PATTERN"
value=
"%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"
/>
<!--日志文件输出格式-->
<property
name=
"FILE_LOG_PATTERN"
value=
"%d{yyyy-MM-dd HH:mm:ss} %-5level ${spring.application.name:-} %thread %logger %msg%n"
/>
<!-- 控制台输出 -->
<appender
name=
"console"
class=
"ch.qos.logback.core.ConsoleAppender"
>
<filter
class=
"ch.qos.logback.classic.filter.ThresholdFilter"
>
<level>
INFO
</level>
</filter>
<!-- 日志输出编码 -->
<encoder>
<pattern>
${CONSOLE_LOG_PATTERN}
</pattern>
<charset>
UTF-8
</charset>
</encoder>
</appender>
<!-- 按照每天生成常规日志文件 -->
<appender
name=
"FileAppender"
class=
"ch.qos.logback.core.rolling.RollingFileAppender"
>
<file>
${LOG_FILE}/info/${spring.application.name:-}.log
</file>
<encoder
class=
"ch.qos.logback.classic.encoder.PatternLayoutEncoder"
>
<pattern>
${FILE_LOG_PATTERN}
</pattern>
<charset>
UTF-8
</charset>
</encoder>
<!-- 基于时间的分包策略 -->
<rollingPolicy
class=
"ch.qos.logback.core.rolling.TimeBasedRollingPolicy"
>
<fileNamePattern>
${LOG_FILE}/info/${spring.application.name:-}.%d{yyyy-MM-dd}.%i.log
</fileNamePattern>
<!--保留时间,单位:天-->
<maxHistory>
${LOG_FILEMAXDAY}
</maxHistory>
<timeBasedFileNamingAndTriggeringPolicy
class=
"ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"
>
<maxFileSize>
${LOG_MAXFILESIZE}
</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
</rollingPolicy>
<filter
class=
"ch.qos.logback.classic.filter.LevelFilter"
>
<level>
INFO
</level>
<onMatch>
ACCEPT
</onMatch>
<onMismatch>
DENY
</onMismatch>
</filter>
</appender>
<!-- 异常文件输出设置,将异常堆栈另外保存一份到单独的文件中,方便查找 -->
<appender
name=
"FILE_ERROR"
class=
"ch.qos.logback.core.rolling.RollingFileAppender"
>
<File>
${LOG_FILE}/error/${spring.application.name:-}.error.log
</File>
<rollingPolicy
class=
"ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"
>
<FileNamePattern>
${LOG_FILE}/error/${spring.application.name:-}.error-%d{yyyy-MM-dd}-%i.zip
</FileNamePattern>
<maxFileSize>
${LOG_MAXFILESIZE}
</maxFileSize>
<maxHistory>
${LOG_FILEMAXDAY}
</maxHistory>
<totalSizeCap>
500MB
</totalSizeCap>
</rollingPolicy>
<encoder
class=
"ch.qos.logback.classic.encoder.PatternLayoutEncoder"
>
<pattern>
%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level - %msg%n
</pattern>
<charset>
UTF-8
</charset>
</encoder>
<filter
class=
"ch.qos.logback.classic.filter.LevelFilter"
>
<!-- 只打印错误日志 -->
<level>
ERROR
</level>
<onMatch>
ACCEPT
</onMatch>
<onMismatch>
DENY
</onMismatch>
</filter>
<filter
class=
"ch.qos.logback.classic.filter.LevelFilter"
>
<!-- 只打印警告日志 -->
<level>
WARN
</level>
<onMatch>
ACCEPT
</onMatch>
<onMismatch>
DENY
</onMismatch>
</filter>
</appender>
<appender
name=
"FILE_DEBUG"
class=
"ch.qos.logback.core.rolling.RollingFileAppender"
>
<file>
${LOG_FILE}/debug/${spring.application.name:-}.log
</file>
<encoder
class=
"ch.qos.logback.classic.encoder.PatternLayoutEncoder"
>
<pattern>
${FILE_LOG_PATTERN}
</pattern>
<charset>
UTF-8
</charset>
</encoder>
<!-- 基于时间的分包策略 -->
<rollingPolicy
class=
"ch.qos.logback.core.rolling.TimeBasedRollingPolicy"
>
<fileNamePattern>
${LOG_FILE}/debug/${spring.application.name:-}.%d{yyyy-MM-dd}.%i.log
</fileNamePattern>
<!--保留时间,单位:天-->
<maxHistory>
${LOG_FILEMAXDAY}
</maxHistory>
<timeBasedFileNamingAndTriggeringPolicy
class=
"ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"
>
<maxFileSize>
${LOG_MAXFILESIZE}
</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
</rollingPolicy>
<filter
class=
"ch.qos.logback.classic.filter.LevelFilter"
>
<level>
DEBUG
</level>
</filter>
</appender>
<!-- 异步输出 -->
<appender
name=
"ASYNC"
class=
"ch.qos.logback.classic.AsyncAppender"
>
<!-- 0-不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>
0
</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
<queueSize>
256
</queueSize>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref
ref=
"FileAppender"
/>
</appender>
<logger
name=
"org.optaplanner"
level=
"info"
/>
<!-- 日志输出级别 -->
<root
level=
"debug"
>
<appender-ref
ref=
"console"
/>
<appender-ref
ref=
"FileAppender"
/>
<appender-ref
ref=
"FILE_ERROR"
/>
<appender-ref
ref=
"FILE_DEBUG"
/>
</root>
</configuration>
\ No newline at end of file
project-dispatch/src/main/resources/logback.xml
→
project-dispatch/src/main/resources/logback.xml
.bak
View file @
51c8cab
File moved
project-order/src/main/java/com/dituhui/pea/order/enums/VehicleEnum.java
View file @
51c8cab
...
...
@@ -12,9 +12,9 @@ import java.util.Objects;
* @date 2023/10/24
*/
public
enum
VehicleEnum
{
CAR
(
1
,
"
汽
车"
),
CAR
(
1
,
"
驾
车"
),
ELECTRIC_CAR
(
2
,
"电动车"
),
BICYCLE
(
3
,
"
自行车
"
),
BICYCLE
(
3
,
"
骑行
"
),
WALK
(
4
,
"步行"
);
private
Integer
code
;
...
...
project-order/src/main/java/com/dituhui/pea/order/service/impl/EngineerServiceImpl.java
View file @
51c8cab
...
...
@@ -242,6 +242,13 @@ public class EngineerServiceImpl implements EngineerService {
update
.
where
(
cb
.
equal
(
root
.
get
(
"engineerCode"
),
engineerCode
));
entityManager
.
createQuery
(
update
).
executeUpdate
();
CriteriaBuilder
cb2
=
entityManager
.
getCriteriaBuilder
();
CriteriaUpdate
<
EngineerInfoEntity
>
update2
=
cb2
.
createCriteriaUpdate
(
EngineerInfoEntity
.
class
);
Root
<
EngineerInfoEntity
>
root2
=
update2
.
from
(
EngineerInfoEntity
.
class
);
update2
.
set
(
root2
.
get
(
"vehicle"
),
Integer
.
valueOf
(
transportMode
));
update2
.
where
(
cb2
.
equal
(
root2
.
get
(
"engineerCode"
),
engineerCode
));
entityManager
.
createQuery
(
update2
).
executeUpdate
();
return
Result
.
success
(
null
);
}
...
...
project-order/src/main/resources/logback-spring.xml
View file @
51c8cab
...
...
@@ -7,6 +7,7 @@
<springProperty
name=
"LOG_MAXFILESIZE"
scope=
"context"
source=
"logback.filesize"
defaultValue=
"100MB"
/>
<springProperty
name=
"LOG_FILEMAXDAY"
scope=
"context"
source=
"logback.filemaxday"
defaultValue=
"30"
/>
<springProperty
name=
"spring.application.name"
scope=
"context"
source=
"spring.application.name"
defaultValue=
"spring-boot-fusion"
/>
<!-- 日志在工程中的输出位置 -->
<property
name=
"LOG_FILE"
value=
"logs/${spring.application.name:-}"
/>
...
...
Write
Preview
Markdown
is supported
Attach a file
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to post a comment