作者:书友56183408 | 来源:互联网 | 2023-10-11 11:33
首先是目录结构
logback-spring.xml
<configuration scan="true" scanPeriod="10 seconds" debug="false">
<include resource="logback/logback-base.xml"/>
<springProfile name="dev">
<include resource="logback/logback-dev.xml"/>
springProfile>
<springProfile name="prod">
<include resource="logback/logback-prod.xml"/>
springProfile>
configuration>
logback-base.xml
<included>
<springProperty name="LOGGER_FILE_MAX_SIZE" scope="context" source="logback.filesize" defaultValue="5MB"/>
<springProperty name="LOGGER_FILE_MAX_DAY" scope="context" source="logback.filemaxday" defaultValue="2"/>
<property name="LOGGER_FILE_PATH" value="logs"/>
<property name="CHARSET" value="utf-8"/>
<property name="TOTAL_SIZE_CAP" value="1GB" />
<property name="LOG_PATTERN"
value="%d{yyyy-MM-dd HH:mm:ss.SSS} %level ${PID} [%thread] %logger %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wex}"/>
<conversionRule conversionWord="wex"
converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>
included>
logback-dev.xml
<included>
<property name="DEV_LOGGER_LEVEL" value="DEBUG"/>
<property name="CONSOLE-LOG-PATTERN"
value="%red(%date{yyyy-MM-dd HH:mm:ss}) %highlight(%-5level) %red([%thread]) %boldMagenta(%logger{50}) %cyan(%msg%n)"/>
<appender name="stdoutAppender" class="ch.qos.logback.core.ConsoleAppender">
<withJansi>truewithJansi>
<encoder>
<pattern>${CONSOLE-LOG-PATTERN}pattern>
<charset>${CHARSET}charset>
encoder>
appender>
<appender name="console-async-appender" class="ch.qos.logback.classic.AsyncAppender">
<discardingThreshold>0discardingThreshold>
<appender-ref ref="stdoutAppender"/>
appender>
<root level="${DEV_LOGGER_LEVEL}" additivity="false">
<appender-ref ref="console-async-appender"/>
root>
included>
logback-prod.xml
<included>
<property name="PROD_LOGGER_LEVEL" value="INFO"/>
<appender name="data-appender-error" class="ch.qos.logback.classic.sift.SiftingAppender">
<discriminator>
<key>jobNamekey>
<defaultValue>systemdefaultValue>
discriminator>
<sift>
<appender name="${jobName}-error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<encoder>
<pattern>${LOG_PATTERN}pattern>
<charset>${CHARSET}charset>
encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${LOGGER_FILE_PATH}/${jobName}.%d{yyyy-MM-dd}.%i.errorfileNamePattern>
<maxHistory>${LOGGER_FILE_MAX_DAY}maxHistory>
<maxFileSize>${LOGGER_FILE_MAX_SIZE}maxFileSize>
<totalSizeCap>${TOTAL_SIZE_CAP}totalSizeCap>
<cleanHistoryOnStart>truecleanHistoryOnStart>
rollingPolicy>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>ERRORlevel>
filter>
appender>
sift>
appender>
<appender name="data-appender-info" class="ch.qos.logback.classic.sift.SiftingAppender">
<discriminator>
<key>jobNamekey>
<defaultValue>systemdefaultValue>
discriminator>
<sift>
<appender name="${jobName}-info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<encoder>
<pattern>${LOG_PATTERN}pattern>
<charset>${CHARSET}charset>
encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${LOGGER_FILE_PATH}/${jobName}.%d{yyyy-MM-dd}.%i.logfileNamePattern>
<maxHistory>${LOGGER_FILE_MAX_DAY}maxHistory>
<maxFileSize>${LOGGER_FILE_MAX_SIZE}maxFileSize>
<totalSizeCap>${TOTAL_SIZE_CAP}totalSizeCap>
<cleanHistoryOnStart>truecleanHistoryOnStart>
rollingPolicy>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERRORlevel>
<onMatch>DENYonMatch>
<onMismatch>ACCEPTonMismatch>
filter>
appender>
sift>
appender>
<appender name="file_async" class="ch.qos.logback.classic.AsyncAppender">
<discardingThreshold>0discardingThreshold>
<appender-ref ref="data-appender-info"/>
appender>
<appender name="error_file_async" class="ch.qos.logback.classic.AsyncAppender">
<discardingThreshold>0discardingThreshold>
<appender-ref ref="data-appender-error"/>
appender>
<logger name="根路径" level="error" additivity="false">
<appender-ref ref="error_file_async"/>
logger>
<logger name="根路径" level="${PROD_LOGGER_LEVEL}" additivity="false">
<appender-ref ref="file_async"/>
logger>
<root level="${PROD_LOGGER_LEVEL}">
<appender-ref ref="file_async"/>
<appender-ref ref="error_file_async"/>
root>
included>
最后是注解的实现,技术是logback的Mdc,只要任务在同一个线程内就可以实现日志区分
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface MdcLogger {
String value();
}
Aop的实现
@Aspect
@Component
public class MdcLoggerAspect {
private static Logger logger = LoggerFactory.getLogger(MdcLoggerAspect.class);
@Pointcut("@annotation(com.lz.bigdata.synchronizer.biz.dy.domain.annotation.MdcLogger)")
public void mdcLoggerCutPoint() {
logger.info("MDC切入点");
}
@Before("mdcLoggerCutPoint()")
public void mdcLoggerBefore(JoinPoint joinPoint) {
MdcLogger mdcLoggerAnnotation = ((MethodSignature) joinPoint.getSignature()).getMethod().getDeclaredAnnotation(MdcLogger.class);
String value = mdcLoggerAnnotation.value();
MDC.put(配置文件定义的名称(此文章是jobName), value);
}
@After("mdcLoggerCutPoint()")
public void mdcLoggerAfter() {
MDC.clear();
}
}