Commit e89c42fb by 刘鑫

Merge branch 'develop' of https://gitlab.dituhui.com/bsh/project/project into develop

2 parents 7c6f2663 e1647731
...@@ -113,6 +113,8 @@ services: ...@@ -113,6 +113,8 @@ services:
env_file: env_file:
- .env - .env
container_name: project-gateway container_name: project-gateway
volumes:
- /home/dev_boxi/logs:/logs
ports: ports:
- ${GATEWAY_PORT}:${GATEWAY_PORT} - ${GATEWAY_PORT}:${GATEWAY_PORT}
depends_on: depends_on:
...@@ -129,6 +131,7 @@ services: ...@@ -129,6 +131,7 @@ services:
container_name: project-order container_name: project-order
volumes: volumes:
- /usr/local/src:/usr/local/src - /usr/local/src:/usr/local/src
- /home/dev_boxi/logs:/logs
ports: ports:
- ${ORDER_PORT}:${ORDER_PORT} - ${ORDER_PORT}:${ORDER_PORT}
- "62001:62001" - "62001:62001"
...@@ -147,6 +150,8 @@ services: ...@@ -147,6 +150,8 @@ services:
env_file: env_file:
- .env - .env
container_name: project-user container_name: project-user
volumes:
- /home/dev_boxi/logs:/logs
ports: ports:
- ${USER_PORT}:${USER_PORT} - ${USER_PORT}:${USER_PORT}
- "62000:5005" - "62000:5005"
...@@ -213,7 +218,7 @@ services: ...@@ -213,7 +218,7 @@ services:
- .env - .env
container_name: project-dispatch container_name: project-dispatch
volumes: volumes:
- /home/dev_boxi/project-dispatch/logs:/logs - /home/dev_boxi/logs:/logs
ports: ports:
- ${DISPATCH_PORT}:${DISPATCH_PORT} - ${DISPATCH_PORT}:${DISPATCH_PORT}
depends_on: depends_on:
......
...@@ -27,7 +27,7 @@ import org.springframework.scheduling.annotation.EnableScheduling; ...@@ -27,7 +27,7 @@ import org.springframework.scheduling.annotation.EnableScheduling;
*/ */
@SpringBootApplication @SpringBootApplication
@EnableScheduling @EnableScheduling
@EnableFeignClients(basePackages = {"com.dituhui.pea.user"}) @EnableFeignClients(basePackages = {"com.dituhui.pea.user", "com.dituhui.pea.msg"})
@Import(cn.hutool.extra.spring.SpringUtil.class) @Import(cn.hutool.extra.spring.SpringUtil.class)
public class DispatchServiceApplication { public class DispatchServiceApplication {
......
...@@ -3,7 +3,7 @@ server: ...@@ -3,7 +3,7 @@ server:
dispatch: dispatch:
cron: cron:
expr: 0 20 8-23 * * ? expr: 0 58 8-23 * * ?
next-day-limit: 2 next-day-limit: 2
scheduler: scheduler:
......
<?xml version="1.0" encoding="UTF-8"?>
<!--该日志将日志级别不同的log信息保存到不同的文件中 -->
<configuration>
<include resource="org/springframework/boot/logging/logback/defaults.xml"/>
<springProperty name="LOG_MAXFILESIZE" scope="context" source="logback.filesize" defaultValue="100MB"/>
<springProperty name="LOG_FILEMAXDAY" scope="context" source="logback.filemaxday" defaultValue="30"/>
<springProperty name="spring.application.name" scope="context" source="spring.application.name" defaultValue="spring-boot-fusion"/>
<!-- 日志在工程中的输出位置 -->
<property name="LOG_FILE" value="logs/${spring.application.name:-}"/>
<!-- 控制台的日志输出样式 -->
<property name="CONSOLE_LOG_PATTERN"
value="%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
<!--日志文件输出格式-->
<property name="FILE_LOG_PATTERN"
value="%d{yyyy-MM-dd HH:mm:ss} %-5level ${spring.application.name:-} %thread %logger %msg%n"/>
<!-- 控制台输出 -->
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>DEBUG</level>
</filter>
<!-- 日志输出编码 -->
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>UTF-8</charset>
</encoder>
</appender>
<appender name="FILE_DEBUG"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${LOG_FILE}/debug/${spring.application.name:-}.debug.log</File>
<rollingPolicy
class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_FILE}/debug/${spring.application.name:-}.debug-%d{yyyy-MM-dd}-%i.zip
</FileNamePattern>
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
<maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<totalSizeCap>500MB</totalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter"><!-- 只打印错误日志 -->
<level>DEBUG</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="FILE_INFO"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${LOG_FILE}/info/${spring.application.name:-}.info.log</File>
<rollingPolicy
class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_FILE}/debug/${spring.application.name:-}.info-%d{yyyy-MM-dd}-%i.zip
</FileNamePattern>
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
<maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<totalSizeCap>500MB</totalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter"><!-- 只打印错误日志 -->
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 异常文件输出设置,将异常堆栈另外保存一份到单独的文件中,方便查找 -->
<appender name="FILE_ERROR"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${LOG_FILE}/error/${spring.application.name:-}.error.log</File>
<rollingPolicy
class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_FILE}/error/${spring.application.name:-}.error-%d{yyyy-MM-dd}-%i.zip
</FileNamePattern>
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
<maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<totalSizeCap>500MB</totalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter"><!-- 只打印错误日志 -->
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 异常文件输出设置,将异常堆栈另外保存一份到单独的文件中,方便查找 -->
<appender name="FILE_WARN"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${LOG_FILE}/warn/${spring.application.name:-}.warn.log</File>
<rollingPolicy
class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_FILE}/warn/${spring.application.name:-}.warn-%d{yyyy-MM-dd}-%i.zip
</FileNamePattern>
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
<maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<totalSizeCap>500MB</totalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter"><!-- 只打印警告日志 -->
<level>WARN</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 异步输出 -->
<appender name="ASYNC" class="ch.qos.logback.classic.AsyncAppender">
<!-- 0-不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>0</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
<queueSize>256</queueSize>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref ref="FILE_INFO"/>
</appender>
<!-- 日志输出级别 -->
<root level="info">
<appender-ref ref="CONSOLE"/>
<appender-ref ref="FILE_INFO"/>
<appender-ref ref="FILE_ERROR"/>
<appender-ref ref="FILE_WARN"/>
</root>
</configuration>
\ No newline at end of file
package com.dituhui.pea.msg;
import com.dituhui.pea.common.Result;
import com.dituhui.pea.pojo.*;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.*;
import javax.validation.Valid;
/**
* 角色相关接口
*/
@FeignClient(value = "project-order", contextId = "msg")
public interface IMsg {
/**
* 新增消息
*
* @param dto 消息
* @return
*/
@PostMapping(value = "/pea-order/msg/add")
public Result<Boolean> add(@RequestBody @Valid MsgDTO dto);
}
package com.dituhui.pea.pojo;
import lombok.Data;
@Data
public class MsgDTO {
/**
* 大区ID(大区Id、分部Id、分组Id只需传一个)
*/
private String clusterId;
/**
* 分部ID
*/
private String branchId;
/**
* 小组ID
*/
private String groupId;
/**
* 消息类型,0:派工类,1:服务类,2:容量类
*/
private Integer type;
/**
* 消息内容
*/
private String content;
/**
* 单号集合,以“、”连接
*/
private String orderIds;
/**
* 标签类型,0:紧急,1:正常
*/
private Integer tag;
/**
* 容量所属图层 ID
*/
private String capacityLayerId;
/**
* 容量所属日期
*/
private String capacityDate;
}
\ No newline at end of file
...@@ -8,7 +8,9 @@ public enum ServiceStatusEnum { ...@@ -8,7 +8,9 @@ public enum ServiceStatusEnum {
STARTED("STARTED", "已开始"), STARTED("STARTED", "已开始"),
CANCELED("CANCELED", "已取消"), CANCELED("CANCELED", "已取消"),
FINISHED("FINISHED", "已完成"), FINISHED("FINISHED", "已完成"),
UNFINISHED("UNFINISHED", "已上门未完成"); //已超时
UNFINISHED("UNFINISHED", "已上门未完成"),
DELAY("DELAY", "已延误");
private final String code; private final String code;
......
...@@ -20,6 +20,7 @@ import com.dituhui.pea.order.service.DispatchService; ...@@ -20,6 +20,7 @@ import com.dituhui.pea.order.service.DispatchService;
import com.dituhui.pea.order.service.EngineerCalendarService; import com.dituhui.pea.order.service.EngineerCalendarService;
import com.dituhui.pea.order.service.OrderInfoService; import com.dituhui.pea.order.service.OrderInfoService;
import com.dituhui.pea.order.utils.CommonUtil; import com.dituhui.pea.order.utils.CommonUtil;
import com.dituhui.pea.order.utils.OrderUtil;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
...@@ -293,7 +294,7 @@ public class DispatchServiceImpl implements DispatchService { ...@@ -293,7 +294,7 @@ public class DispatchServiceImpl implements DispatchService {
item.setAppointmentType(o.getAppointmentMethod()); item.setAppointmentType(o.getAppointmentMethod());
item.setAppointmentStatus(o.getAppointmentStatus()); item.setAppointmentStatus(o.getAppointmentStatus());
item.setOrderStatus(o.getOrderStatus()); // 订单状态 item.setOrderStatus(o.getOrderStatus()); // 订单状态
item.setServiceStatus(o.getServiceStatus()); item.setServiceStatus(OrderUtil.handleServiceStatus(o));
item.setOrderStatusDesc(OrderStatusEnum.valueOf(o.getOrderStatus()).getDescription()); item.setOrderStatusDesc(OrderStatusEnum.valueOf(o.getOrderStatus()).getDescription());
item.setCreateTime(TimeUtils.IsoLocalDateTime2String(o.getCreateTime())); item.setCreateTime(TimeUtils.IsoLocalDateTime2String(o.getCreateTime()));
......
...@@ -73,7 +73,7 @@ public class EngineerGanttServiceImpl implements EngineerGanttService { ...@@ -73,7 +73,7 @@ public class EngineerGanttServiceImpl implements EngineerGanttService {
EngineersGanttDTO.Slot slot = new EngineersGanttDTO.Slot(); EngineersGanttDTO.Slot slot = new EngineersGanttDTO.Slot();
// todo 需要根据订单时间和状态,采用expectXXXX或者actualXXXX // todo 需要根据订单时间和状态,采用expectXXXX或者actualXXXX
slot.setOrderId(order.getOrderId()); slot.setOrderId(order.getOrderId());
slot.setServiceStatus(order.getServiceStatus()); slot.setServiceStatus(OrderUtil.handleServiceStatus(order));
String startServiceStr = null; String startServiceStr = null;
if (checkOrderIsFinish(order.getServiceStatus())) { if (checkOrderIsFinish(order.getServiceStatus())) {
slot.setBtime(ObjectUtil.isNull(order.getActualStartTime()) ? null : getHourMinute(order.getActualStartTime())); slot.setBtime(ObjectUtil.isNull(order.getActualStartTime()) ? null : getHourMinute(order.getActualStartTime()));
......
...@@ -621,6 +621,10 @@ public class OrderInfoServiceImpl implements OrderInfoService { ...@@ -621,6 +621,10 @@ public class OrderInfoServiceImpl implements OrderInfoService {
nestOrder.setPlanStartTime(nestOrder.getPlanStartTime().plusMinutes(minutes)); nestOrder.setPlanStartTime(nestOrder.getPlanStartTime().plusMinutes(minutes));
nestOrder.setPlanEndTime(nestOrder.getPlanEndTime().plusMinutes(minutes)); nestOrder.setPlanEndTime(nestOrder.getPlanEndTime().plusMinutes(minutes));
orderInfoDao.save(nestOrder); orderInfoDao.save(nestOrder);
//下一单为待插入订单且为最后一单直接返回
if (i == orderSegments.size() - 2) {
return Result.success(insertOrder);
}
moveTime = (int) minutes; moveTime = (int) minutes;
isMove = true; isMove = true;
} }
...@@ -687,7 +691,7 @@ public class OrderInfoServiceImpl implements OrderInfoService { ...@@ -687,7 +691,7 @@ public class OrderInfoServiceImpl implements OrderInfoService {
return Result.success(expectTimeBegin); return Result.success(expectTimeBegin);
} }
// 请假时间内则顺延 // 请假时间内则顺延
if (expectTimeBegin.compareTo(occupyInfoDetail.getBeginTime()) >= 0 || expectTimeBegin.compareTo(occupyInfoDetail.getEndTime()) <= 0) { if (expectTimeBegin.compareTo(occupyInfoDetail.getBeginTime()) >= 0 && expectTimeBegin.compareTo(occupyInfoDetail.getEndTime()) <= 0) {
return Result.success(occupyInfoDetail.getEndTime()); return Result.success(occupyInfoDetail.getEndTime());
} }
if (expectTimeEnd.compareTo(occupyInfoDetail.getEndTime().plusMinutes(takeTime)) >= 0) { if (expectTimeEnd.compareTo(occupyInfoDetail.getEndTime().plusMinutes(takeTime)) >= 0) {
......
package com.dituhui.pea.order.utils; package com.dituhui.pea.order.utils;
import cn.hutool.core.util.ObjectUtil;
import com.dituhui.pea.order.entity.OrderInfoEntity; import com.dituhui.pea.order.entity.OrderInfoEntity;
import com.dituhui.pea.order.enums.OrderPeaTagsEnum; import com.dituhui.pea.order.enums.OrderPeaTagsEnum;
import com.dituhui.pea.order.enums.ServiceStatusEnum;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import java.time.LocalDateTime;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
...@@ -40,6 +43,57 @@ public class OrderUtil { ...@@ -40,6 +43,57 @@ public class OrderUtil {
/** /**
* 处理工单状态,主要处理以延迟和已超时
*
* @param order
* @return
*/
public static String handleServiceStatus(OrderInfoEntity order) {
if (isDelay(order)) {
return ServiceStatusEnum.DELAY.getCode();
}
if (isOvertime(order)) {
return ServiceStatusEnum.UNFINISHED.getCode();
}
return order.getServiceStatus();
}
/**
* 判断订单是否是延迟订单
*
* @param order
* @return
*/
public static Boolean isDelay(OrderInfoEntity order) {
if (!order.getServiceStatus().equals(ServiceStatusEnum.INIT.getCode()) && !order.getServiceStatus().equals(ServiceStatusEnum.CONTACTED.getCode())) {
return false;
}
if (order.getExpectTimeBegin().plusMinutes(10).isBefore(LocalDateTime.now())) {
return true;
}
return false;
}
/**
* 判断订单是否是延迟订单
*
* @param order
* @return
*/
public static Boolean isOvertime(OrderInfoEntity order) {
if (!order.getServiceStatus().equals(ServiceStatusEnum.STARTED.getCode()) || ObjectUtil.isNotNull(order.getActualEndTime())) {
return false;
}
if (order.getPlanEndTime().plusMinutes(10).isBefore(LocalDateTime.now())) {
return true;
}
return false;
}
/**
* 分单,超派,特殊时间等处理工单指派工程师,网点等数据 * 分单,超派,特殊时间等处理工单指派工程师,网点等数据
* *
* @param order * @param order
......
...@@ -4,9 +4,8 @@ ...@@ -4,9 +4,8 @@
<configuration> <configuration>
<include resource="org/springframework/boot/logging/logback/defaults.xml"/> <include resource="org/springframework/boot/logging/logback/defaults.xml"/>
<springProperty name="LOG_MAXFILESIZE" scope="context" source="logback.filesize" defaultValue="100MB"/> <springProperty name="LOG_MAXFILESIZE" scope="context" source="logback.filesize" defaultValue="100MB"/>
<springProperty name="LOG_FILEMAXDAY" scope="contexmt" source="logback.filemaxday" defaultValue="30"/> <springProperty name="LOG_FILEMAXDAY" scope="context" source="logback.filemaxday" defaultValue="30"/>
<springProperty name="spring.application.name" scope="context" source="spring.application.name" defaultValue="spring-boot-fusion"/> <springProperty name="spring.application.name" scope="context" source="spring.application.name" defaultValue="spring-boot-fusion"/>
<!-- 日志在工程中的输出位置 --> <!-- 日志在工程中的输出位置 -->
...@@ -19,9 +18,9 @@ ...@@ -19,9 +18,9 @@
<property name="FILE_LOG_PATTERN" <property name="FILE_LOG_PATTERN"
value="%d{yyyy-MM-dd HH:mm:ss} %-5level ${spring.application.name:-} %thread %logger %msg%n"/> value="%d{yyyy-MM-dd HH:mm:ss} %-5level ${spring.application.name:-} %thread %logger %msg%n"/>
<!-- 控制台输出 --> <!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender"> <appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter"> <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level> <level>DEBUG</level>
</filter> </filter>
<!-- 日志输出编码 --> <!-- 日志输出编码 -->
<encoder> <encoder>
...@@ -30,23 +29,44 @@ ...@@ -30,23 +29,44 @@
</encoder> </encoder>
</appender> </appender>
<!-- 按照每天生成常规日志文件 --> <appender name="FILE_DEBUG"
<appender name="FileAppender" class="ch.qos.logback.core.rolling.RollingFileAppender"> class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_FILE}/info/${spring.application.name:-}.info.log</file> <File>${LOG_FILE}/debug/${spring.application.name:-}.debug.log</File>
<rollingPolicy
class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_FILE}/debug/${spring.application.name:-}.debug-%d{yyyy-MM-dd}-%i.zip
</FileNamePattern>
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
<maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<totalSizeCap>500MB</totalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"> <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${FILE_LOG_PATTERN}</pattern> <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level - %msg%n</pattern>
<charset>UTF-8</charset> <charset>UTF-8</charset>
</encoder> </encoder>
<!-- 基于时间的分包策略 --> <filter class="ch.qos.logback.classic.filter.LevelFilter"><!-- 只打印错误日志 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> <level>DEBUG</level>
<fileNamePattern>${LOG_FILE}/info/${spring.application.name:-}.info.%d{yyyy-MM-dd}.%i.log</fileNamePattern> <onMatch>ACCEPT</onMatch>
<!--保留时间,单位:天--> <onMismatch>DENY</onMismatch>
<maxHistory>${LOG_FILEMAXDAY}</maxHistory> </filter>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"> </appender>
<appender name="FILE_INFO"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${LOG_FILE}/info/${spring.application.name:-}.info.log</File>
<rollingPolicy
class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_FILE}/debug/${spring.application.name:-}.info-%d{yyyy-MM-dd}-%i.zip
</FileNamePattern>
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize> <maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy> <maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<totalSizeCap>500MB</totalSizeCap>
</rollingPolicy> </rollingPolicy>
<filter class="ch.qos.logback.classic.filter.LevelFilter"> <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter"><!-- 只打印错误日志 -->
<level>INFO</level> <level>INFO</level>
<onMatch>ACCEPT</onMatch> <onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch> <onMismatch>DENY</onMismatch>
...@@ -74,30 +94,28 @@ ...@@ -74,30 +94,28 @@
<onMatch>ACCEPT</onMatch> <onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch> <onMismatch>DENY</onMismatch>
</filter> </filter>
<filter class="ch.qos.logback.classic.filter.LevelFilter"><!-- 只打印警告日志 -->
<level>WARN</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender> </appender>
<appender name="FILE_DEBUG" class="ch.qos.logback.core.rolling.RollingFileAppender"> <!-- 异常文件输出设置,将异常堆栈另外保存一份到单独的文件中,方便查找 -->
<file>${LOG_FILE}/debug/${spring.application.name:-}.debug.log</file> <appender name="FILE_WARN"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${LOG_FILE}/warn/${spring.application.name:-}.warn.log</File>
<rollingPolicy
class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_FILE}/warn/${spring.application.name:-}.warn-%d{yyyy-MM-dd}-%i.zip
</FileNamePattern>
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
<maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<totalSizeCap>500MB</totalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"> <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${FILE_LOG_PATTERN}</pattern> <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level - %msg%n</pattern>
<charset>UTF-8</charset> <charset>UTF-8</charset>
</encoder> </encoder>
<!-- 基于时间的分包策略 --> <filter class="ch.qos.logback.classic.filter.LevelFilter"><!-- 只打印警告日志 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> <level>WARN</level>
<fileNamePattern>${LOG_FILE}/debug/${spring.application.name:-}.debug.%d{yyyy-MM-dd}.%i.log</fileNamePattern> <onMatch>ACCEPT</onMatch>
<!--保留时间,单位:天--> <onMismatch>DENY</onMismatch>
<maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
</rollingPolicy>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>DEBUG</level>
</filter> </filter>
</appender> </appender>
...@@ -108,15 +126,17 @@ ...@@ -108,15 +126,17 @@
<!-- 更改默认的队列的深度,该值会影响性能.默认值为256 --> <!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
<queueSize>256</queueSize> <queueSize>256</queueSize>
<!-- 添加附加的appender,最多只能添加一个 --> <!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref ref="FileAppender"/> <appender-ref ref="FILE_INFO"/>
</appender> </appender>
<logger name="org.springframework.jdbc.core.JdbcTemplate" level="debug" />
<!-- 日志输出级别 --> <!-- 日志输出级别 -->
<root level="debug"> <root level="info">
<appender-ref ref="console"/> <appender-ref ref="CONSOLE"/>
<appender-ref ref="FileAppender"/> <appender-ref ref="FILE_INFO"/>
<appender-ref ref="FILE_ERROR"/> <appender-ref ref="FILE_ERROR"/>
<!--<appender-ref ref="FILE_DEBUG"/>--> <appender-ref ref="FILE_WARN"/>
</root> </root>
</configuration> </configuration>
\ No newline at end of file
server:
port: 8000
spring:
application:
name: project-user
cloud:
nacos:
discovery:
server-addr: nacos-server:8848
group: project
config:
server-addr: nacos-server:8848
group: project
file-extension: yaml
config:
import:
- optional:nacos:redis-config.yaml?group=project&refreshEnabled=true
main:
allow-bean-definition-overriding: true
datasource:
driver-class-name: com.mysql.cj.jdbc.Driver
#url: jdbc:mysql://10.10.0.116:32306/saas_aftersale_test?serverTimezone=Asia/Shanghai
url: jdbc:mysql://localhost:32306/saas_aftersale_test?serverTimezone=Asia/Shanghai
username: boxi
password: boxi_dev_0725
type: com.alibaba.druid.pool.DruidDataSource
redis:
database: 0
host: redis
port: 6379
password: 123456
jedis:
pool:
max-active: 32
min-idle: 0
max-idle: 8
max-wait: -1
kafka:
bootstrap-servers: http://kafka:9092
consumer:
group-id: mpGroup
#seata:
# application-id: ${spring.application.name}
# tx-service-group: ${spring.application.name}-group
# service:
# vgroup-mapping:
# project-user-group: default
# grouplist:
# default: seata-server:8091
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<!--该日志将日志级别不同的log信息保存到不同的文件中 -->
<configuration> <configuration>
<include resource="org/springframework/boot/logging/logback/defaults.xml"/> <include resource="org/springframework/boot/logging/logback/defaults.xml"/>
<springProperty scope="context" name="springAppName" source="spring.application.name"/>
<!-- Example for logging into the build folder of your project -->
<property name="LOG_FILE" value="logs/${springAppName}"/>
<!-- You can override this to have a custom pattern --> <springProperty name="LOG_MAXFILESIZE" scope="context" source="logback.filesize" defaultValue="100MB"/>
<property name="CONSOLE_LOG_PATTERN" <springProperty name="LOG_FILEMAXDAY" scope="context" source="logback.filemaxday" defaultValue="30"/>
value="%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/> <springProperty name="spring.application.name" scope="context" source="spring.application.name" defaultValue="spring-boot-fusion"/>
<!-- Appender to log to console --> <!-- 日志在工程中的输出位置 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender"> <property name="LOG_FILE" value="logs/${spring.application.name:-}"/>
<!-- 控制台的日志输出样式 -->
<property name="CONSOLE_LOG_PATTERN"
value="%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
<!--日志文件输出格式-->
<property name="FILE_LOG_PATTERN"
value="%d{yyyy-MM-dd HH:mm:ss} %-5level ${spring.application.name:-} %thread %logger %msg%n"/>
<!-- 控制台输出 -->
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter"> <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<!-- Minimum logging level to be presented in the console logs-->
<level>DEBUG</level> <level>DEBUG</level>
</filter> </filter>
<!-- 日志输出编码 -->
<encoder> <encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern> <pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>utf8</charset> <charset>UTF-8</charset>
</encoder> </encoder>
</appender> </appender>
<!-- Appender to log to file --> <appender name="FILE_DEBUG"
<appender name="flatfile" class="ch.qos.logback.core.rolling.RollingFileAppender"> class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_FILE}</file> <File>${LOG_FILE}/debug/${spring.application.name:-}.debug.log</File>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> <rollingPolicy
<fileNamePattern>${LOG_FILE}.%d{yyyy-MM-dd}.gz</fileNamePattern> class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<maxHistory>7</maxHistory> <FileNamePattern>${LOG_FILE}/debug/${spring.application.name:-}.debug-%d{yyyy-MM-dd}-%i.zip
</FileNamePattern>
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
<maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<totalSizeCap>500MB</totalSizeCap>
</rollingPolicy> </rollingPolicy>
<encoder> <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>${CONSOLE_LOG_PATTERN}</pattern> <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level - %msg%n</pattern>
<charset>utf8</charset> <charset>UTF-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter"><!-- 只打印错误日志 -->
<level>DEBUG</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="FILE_INFO"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${LOG_FILE}/info/${spring.application.name:-}.info.log</File>
<rollingPolicy
class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_FILE}/debug/${spring.application.name:-}.info-%d{yyyy-MM-dd}-%i.zip
</FileNamePattern>
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
<maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<totalSizeCap>500MB</totalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter"><!-- 只打印错误日志 -->
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- 异常文件输出设置,将异常堆栈另外保存一份到单独的文件中,方便查找 -->
<appender name="FILE_ERROR"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${LOG_FILE}/error/${spring.application.name:-}.error.log</File>
<rollingPolicy
class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_FILE}/error/${spring.application.name:-}.error-%d{yyyy-MM-dd}-%i.zip
</FileNamePattern>
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
<maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<totalSizeCap>500MB</totalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level - %msg%n</pattern>
<charset>UTF-8</charset>
</encoder> </encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter"><!-- 只打印错误日志 -->
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender> </appender>
<!-- Appender to log to file in a JSON format
<appender name="logstash" class="ch.qos.logback.core.rolling.RollingFileAppender"> <!-- 异常文件输出设置,将异常堆栈另外保存一份到单独的文件中,方便查找 -->
<file>${LOG_FILE}.json</file> <appender name="FILE_WARN"
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> class="ch.qos.logback.core.rolling.RollingFileAppender">
<fileNamePattern>${LOG_FILE}.json.%d{yyyy-MM-dd}.gz</fileNamePattern> <File>${LOG_FILE}/warn/${spring.application.name:-}.warn.log</File>
<maxHistory>7</maxHistory> <rollingPolicy
class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_FILE}/warn/${spring.application.name:-}.warn-%d{yyyy-MM-dd}-%i.zip
</FileNamePattern>
<maxFileSize>${LOG_MAXFILESIZE}</maxFileSize>
<maxHistory>${LOG_FILEMAXDAY}</maxHistory>
<totalSizeCap>500MB</totalSizeCap>
</rollingPolicy> </rollingPolicy>
<encoder class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder"> <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<providers> <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level - %msg%n</pattern>
<timestamp> <charset>UTF-8</charset>
<timeZone>UTC</timeZone>
</timestamp>
<pattern>
<pattern>
{
"timestamp": "@timestamp",
"userId": "%X{userId:-}",
"severity": "%level",
"service": "${springAppName:-}",
"trace": "%X{traceId:-}",
"span": "%X{spanId:-}",
"pid": "${PID:-}",
"thread": "%thread",
"class": "%logger{40}",
"rest": "%message"
}
</pattern>
</pattern>
</providers>
</encoder> </encoder>
</appender> --> <filter class="ch.qos.logback.classic.filter.LevelFilter"><!-- 只打印警告日志 -->
<root level="INFO"> <level>WARN</level>
<appender-ref ref="console"/> <onMatch>ACCEPT</onMatch>
<!-- uncomment this to have also JSON logs --> <onMismatch>DENY</onMismatch>
<!--<appender-ref ref="logstash"/>--> </filter>
<!--<appender-ref ref="flatfile"/>--> </appender>
<!-- 异步输出 -->
<appender name="ASYNC" class="ch.qos.logback.classic.AsyncAppender">
<!-- 0-不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>0</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
<queueSize>256</queueSize>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref ref="FILE_INFO"/>
</appender>
<logger name="org.springframework.jdbc.core.JdbcTemplate" level="debug" />
<!-- 日志输出级别 -->
<root level="info">
<appender-ref ref="CONSOLE"/>
<appender-ref ref="FILE_INFO"/>
<appender-ref ref="FILE_ERROR"/>
<appender-ref ref="FILE_WARN"/>
</root> </root>
</configuration> </configuration>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<include resource="org/springframework/boot/logging/logback/defaults.xml"/>
<springProperty scope="context" name="springAppName" source="spring.application.name"/>
<!-- Example for logging into the build folder of your project -->
<property name="LOG_FILE" value="logs/${springAppName}"/>
<!-- You can override this to have a custom pattern -->
<property name="CONSOLE_LOG_PATTERN"
value="%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/>
<!-- Appender to log to console -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<!-- Minimum logging level to be presented in the console logs-->
<level>DEBUG</level>
</filter>
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>utf8</charset>
</encoder>
</appender>
<!-- Appender to log to file -->
<appender name="flatfile" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_FILE}</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_FILE}.%d{yyyy-MM-dd}.gz</fileNamePattern>
<maxHistory>7</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
<charset>utf8</charset>
</encoder>
</appender>
<!-- Appender to log to file in a JSON format
<appender name="logstash" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_FILE}.json</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_FILE}.json.%d{yyyy-MM-dd}.gz</fileNamePattern>
<maxHistory>7</maxHistory>
</rollingPolicy>
<encoder class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>
<timeZone>UTC</timeZone>
</timestamp>
<pattern>
<pattern>
{
"timestamp": "@timestamp",
"userId": "%X{userId:-}",
"severity": "%level",
"service": "${springAppName:-}",
"trace": "%X{traceId:-}",
"span": "%X{spanId:-}",
"pid": "${PID:-}",
"thread": "%thread",
"class": "%logger{40}",
"rest": "%message"
}
</pattern>
</pattern>
</providers>
</encoder>
</appender> -->
<root level="INFO">
<appender-ref ref="console"/>
<!-- uncomment this to have also JSON logs -->
<!--<appender-ref ref="logstash"/>-->
<!--<appender-ref ref="flatfile"/>-->
</root>
</configuration>
\ No newline at end of file
Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!