日志组件
1 slf4j
- 简单日志门面(Simple Logging Facade for Java)
- 不是具体的日志解决方案
- 如JDBC一样,是一种规则
- 编译时静态绑定真正的Log库
- 如果需要使用某一种日志实现,必须选择正确的SLF4J的jar包的集合(各种桥接包)
1.1 pom - 接口
<!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-api -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.25</version>
</dependency>
1.2 pom - 与log4j桥接包
<!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-log4j12 -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.25</version>
</dependency>
1.3 pom - 与log4j2桥接包
<!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-slf4j-impl -->
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>2.18.0</version>
</dependency>
2 slf4j-simple
slf4j的自带的简单日志输出接口
3 log4j
3.1 pom
<!-- https://mvnrepository.com/artifact/log4j/log4j -->
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
3.2 简单properties
log4j.rootLogger=INFO,CONSOLE
#-------------------------------------------------------------------------------
# Console Appender
log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender
log4j.appender.CONSOLE.Target=System.out
log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout
log4j.appender.CONSOLE.layout.ConversionPattern= %-d{yyyy-MM-dd HH:mm:ss} - [ %p ] %m%n
3.3 生成文件properties
log4j.rootLogger=debug,I,D,E,W
log4j.logger.org.apache.kafka.clients=off
#log4j.logger.org.apache.kafka.clients.consumer=off
log4j.appender.I = org.apache.log4j.DailyRollingFileAppender
log4j.appender.I.File = logs/info.log
log4j.appender.I.DatePattern ='_'yyyy-MM-dd'.log'
log4j.appender.I.Append = true
log4j.appender.I.Threshold = INFO
log4j.appender.I.layout = org.apache.log4j.PatternLayout
log4j.appender.I.layout.ConversionPattern = %-d{yyyy-MM-dd HH:mm:ss} - [ %p ] %m%n
log4j.appender.D = org.apache.log4j.DailyRollingFileAppender
log4j.appender.D.File = logs/debug.log
log4j.appender.D.DatePattern ='_'yyyy-MM-dd'.log'
log4j.appender.D.Append = true
log4j.appender.D.Threshold = DEBUG
log4j.appender.D.filter.F1=org.apache.log4j.varia.LevelRangeFilter
log4j.appender.D.filter.F1.LevelMin=DEBUG
log4j.appender.D.filter.F1.LevelMax=DEBUG
log4j.appender.D.layout = org.apache.log4j.PatternLayout
log4j.appender.D.layout.ConversionPattern = %-d{yyyy-MM-dd HH:mm:ss} - [ %p ] %m%n
log4j.appender.E = org.apache.log4j.DailyRollingFileAppender
log4j.appender.E.File = logs/error.log
log4j.appender.E.DatePattern ='_'yyyy-MM-dd'.log'
log4j.appender.E.Append = true
log4j.appender.E.Threshold = ERROR
log4j.appender.E.layout = org.apache.log4j.PatternLayout
log4j.appender.E.layout.ConversionPattern = %-d{yyyy-MM-dd HH:mm:ss} - [ %p ] %m%n
log4j.appender.W = org.apache.log4j.DailyRollingFileAppender
log4j.appender.W.File = logs/warn.log
log4j.appender.W.DatePattern ='_'yyyy-MM-dd'.log'
log4j.appender.W.Append = true
log4j.appender.W.Threshold = WARN
log4j.appender.W.layout = org.apache.log4j.PatternLayout
log4j.appender.W.layout.ConversionPattern = %-d{yyyy-MM-dd HH:mm:ss} - [ %p ] %m%n
3.4 生成文件xml
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE log4j:configuration PUBLIC "-//log4j/log4j Configuration//EN" "log4j.dtd">
<log4j:configuration>
<!-- 日志输出到控制台 -->
<appender name="console" class="org.apache.log4j.ConsoleAppender">
<!-- 日志输出格式 -->
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%-d{yyyy-MM-dd HH:mm:ss} - [ %p ] %m%n"/>
</layout>
<!--过滤器设置输出的级别-->
<filter class="org.apache.log4j.varia.LevelRangeFilter">
<!-- 设置日志输出的最小级别 -->
<param name="levelMin" value="INFO"/>
<!-- 设置日志输出的最大级别 -->
<param name="levelMax" value="ERROR"/>
</filter>
</appender>
<appender name="D" class="org.apache.log4j.DailyRollingFileAppender">
<param name="File" value="logs/debug.log" />
<param name="Append" value="true"/>
<param name="DatePattern" value="'_'yyyy-MM-dd'.log'"/>
<param name="Threshold" value="DEBUG"/>
<layout class="org.apache.log4j.PatternLayout">
<!--<param name="ConversionPattern" value="%-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n"/>-->
<param name="ConversionPattern" value="%-d{yyyy-MM-dd HH:mm:ss} - [ %p ] %m%n"/>
</layout>
<filter class="org.apache.log4j.varia.LevelRangeFilter">
<param name="levelMin" value="DEBUG" />
<param name="levelMax" value="DEBUG" />
<param name="AcceptOnMatch" value="true" />
</filter>
</appender>
<appender name="W" class="org.apache.log4j.DailyRollingFileAppender">
<param name="File" value="logs/warn.log" />
<param name="Append" value="true"/>
<param name="DatePattern" value="'_'yyyy-MM-dd'.log'"/>
<param name="Threshold" value="WARN"/>
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%-d{yyyy-MM-dd HH:mm:ss} - [ %p ] %m%n"/>
</layout>
<filter class="org.apache.log4j.varia.StringMatchFilter">
<param name="StringToMatch" value="TGT renewal thread has been interrupted and will exit." />
<param name="AcceptOnMatch" value="false" />
</filter>
<filter class="org.apache.log4j.varia.StringMatchFilter">
<param name="StringToMatch" value="com.newtouch.syncserver.entity.KafkaLoadingLog" />
<param name="AcceptOnMatch" value="false" />
</filter>
</appender>
<appender name="E" class="org.apache.log4j.DailyRollingFileAppender">
<param name="File" value="logs/error.log" />
<param name="Append" value="true"/>
<param name="DatePattern" value="'_'yyyy-MM-dd'.log'"/>
<param name="Threshold" value="ERROR"/>
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%-d{yyyy-MM-dd HH:mm:ss} - [ %p ] %m%n"/>
</layout>
</appender>
<appender name="I" class="org.apache.log4j.DailyRollingFileAppender">
<param name="File" value="logs/info.log" />
<param name="Append" value="true"/>
<param name="DatePattern" value="'_'yyyy-MM-dd'.log'"/>
<param name="Threshold" value="INFO"/>
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%-d{yyyy-MM-dd HH:mm:ss} - [ %p ] %m%n"/>
</layout>
<filter class="org.apache.log4j.varia.StringMatchFilter">
<param name="StringToMatch" value="TGT renewal thread has been interrupted and will exit." />
<param name="AcceptOnMatch" value="false" />
</filter>
<filter class="org.apache.log4j.varia.StringMatchFilter">
<param name="StringToMatch" value="TGT refresh thread started." />
<param name="AcceptOnMatch" value="false" />
</filter>
<filter class="org.apache.log4j.varia.StringMatchFilter">
<param name="StringToMatch" value="TGT valid starting at:" />
<param name="AcceptOnMatch" value="false" />
</filter>
<filter class="org.apache.log4j.varia.StringMatchFilter">
<param name="StringToMatch" value="TGT expires:" />
<param name="AcceptOnMatch" value="false" />
</filter>
<filter class="org.apache.log4j.varia.StringMatchFilter">
<param name="StringToMatch" value="TGT refresh sleeping" />
<param name="AcceptOnMatch" value="false" />
</filter>
<filter class="org.apache.log4j.varia.StringMatchFilter">
<param name="StringToMatch" value="Supplied authorities:" />
<param name="AcceptOnMatch" value="false" />
</filter>
<filter class="org.apache.log4j.varia.StringMatchFilter">
<param name="StringToMatch" value="Resolved authority:" />
<param name="AcceptOnMatch" value="false" />
</filter>
<filter class="org.apache.log4j.varia.StringMatchFilter">
<param name="StringToMatch" value="Successfully logged in." />
<param name="AcceptOnMatch" value="false" />
</filter>
<filter class="org.apache.log4j.varia.StringMatchFilter">
<param name="StringToMatch" value="Kafka version:" />
<param name="AcceptOnMatch" value="false" />
</filter>
<filter class="org.apache.log4j.varia.StringMatchFilter">
<param name="StringToMatch" value="Kafka commitId:" />
<param name="AcceptOnMatch" value="false" />
</filter>
<filter class="org.apache.log4j.varia.StringMatchFilter">
<param name="StringToMatch" value="Kafka startTimeMs:" />
<param name="AcceptOnMatch" value="false" />
</filter>
<filter class="org.apache.log4j.varia.StringMatchFilter">
<param name="StringToMatch" value="Client environment:" />
<param name="AcceptOnMatch" value="false" />
</filter>
<filter class="org.apache.log4j.varia.StringMatchFilter">
<param name="StringToMatch" value="Found no committed offset for partition" />
<param name="AcceptOnMatch" value="false" />
</filter>
</appender>
<!-- 关闭consumer.ConsumerConfig日志打印 -->
<logger name="org.apache.kafka.clients.consumer.ConsumerConfig">
<level value="off" />
</logger>
<logger name="org.apache.kafka.clients.admin">
<level value="off" />
</logger>
<!-- 输出级别是info级别及以上的日志 -->
<root>
<priority value="info" />
<appender-ref ref="console"/>
<appender-ref ref="I" />
<appender-ref ref="D" />
<appender-ref ref="E" />
<appender-ref ref="W" />
</root>
</log4j:configuration>
4 log4j2
4.1 pom - 指定版本(没有parent标签时)
<!-- https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-log4j2 -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-log4j2</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-core -->
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.18.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-api -->
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>2.18.0</version>
</dependency>
4.2 log4j2.xml
q<?xml version="1.0" encoding="UTF-8"?>
<!--日志级别以及优先级排序: OFF > FATAL > ERROR > WARN > INFO > DEBUG > TRACE > ALL -->
<!--Configuration后面的status,这个用于设置log4j2自身内部的信息输出,可以不设置,当设置成trace时,你会看到log4j2内部各种详细输出-->
<!--monitorInterval:Log4j能够自动检测修改配置 文件和重新配置本身,设置间隔秒数-->
<configuration status="WARN" monitorInterval="1800">
<Properties>
<!-- 日志默认存放的位置,可以设置为项目根路径下,也可指定绝对路径 -->
<!-- 存放路径一:通用路径 -->
<property name="basePath">./logs</property>
<!-- 设置日志文件的目录名称 -->
<!-- <property name="logFileName">log4j2DemoLog</property> -->
<!-- 存放路径二:web工程专用,java项目没有这个变量,需要删掉,否则会报异常,这里把日志放在web项目的根目录下 -->
<!-- <property name="basePath">${web:rootDir}/${logFileName}</property> -->
<!-- 存放路径三:web工程专用,java项目没有这个变量,需要删掉,否则会报异常,这里把日志放在tocmat的logs目录下 -->
<!-- <property name="basePath">${sys:catalina.home}/logs/${logFileName}</property> -->
<!-- 日志默认切割的最小单位 -->
<property name="every_file_size">2MB</property>
<!-- 日志默认输出级别 -->
<property name="output_log_level">INFO</property>
<!-- 控制台显示的日志最低级别 -->
<property name="console_print_level">INFO</property>
<!-- 控制台默认输出格式,"%-5level":日志级别,"%l":输出完整的错误位置,是小写的L,因为有行号显示,所以影响日志输出的性能 -->
<property name="console_log_pattern">%d{yyyy-MM-dd HH:mm:ss} [ %-5level] - %m%n</property>
<!-- <property name="console_log_pattern">%d{yyyy-MM-dd HH:mm:ss.SSS} [%-5level] %l - %m%n</property> -->
<!-- 日志文件默认输出格式,另类带行号输出(对日志输出性能未知);%C:大写,类名;%M:方法名;%L:行号;%m:错误信息;%n:换行 -->
<property name="log_pattern">%d{yyyy-MM-dd HH:mm:ss} [ %-5level] - %m%n</property>
<!-- 日志文件默认输出格式,不带行号输出(行号显示会影响日志输出性能);%C:大写,类名;%M:方法名;%m:错误信息;%n:换行 -->
<!-- <property name="log_pattern">%d{yyyy-MM-dd HH:mm:ss.SSS} [%-5level] %C.%M - %m%n</property> -->
<!-- ===========================================debug日志配置=========================================== -->
<!-- 日志默认存放路径(所有级别日志) -->
<property name="debug_fileName">${basePath}/debug.log</property>
<!-- 日志默认压缩路径,将超过指定文件大小的日志,自动存入按"年月"建立的文件夹下面并进行压缩,作为存档 -->
<property name="debug_filePattern">${basePath}/%d{yyyy-MM}/debug-%d{yyyy-MM-dd}-%i.log.gz</property>
<!-- 日志默认同类型日志,同一文件夹下可以存放的数量,不设置此属性则默认为7个,filePattern最后要带%i才会生效 -->
<property name="debug_max">500</property>
<!-- 日志默认同类型日志,多久生成一个新的日志文件,这个配置需要和filePattern结合使用;
如果设置为1,filePattern是%d{yyyy-MM-dd}到天的格式,则间隔一天生成一个文件
如果设置为12,filePattern是%d{yyyy-MM-dd-HH}到小时的格式,则间隔12小时生成一个文件 -->
<property name="debug_timeInterval">1</property>
<!-- 日志默认同类型日志,是否对封存时间进行调制,若为true,则封存时间将以0点为边界进行调整,
如:现在是早上3am,interval是4,那么第一次滚动是在4am,接着是8am,12am...而不是7am -->
<property name="debug_timeModulate">true</property>
<!-- ============================================Info级别日志============================================ -->
<property name="info_fileName">${basePath}/info.log</property>
<property name="info_filePattern">${basePath}/%d{yyyy-MM}/info-%d{yyyy-MM-dd}-%i.log.gz</property>
<property name="info_max">100</property>
<property name="info_timeInterval">1</property>
<property name="info_timeModulate">true</property>
<!-- ============================================Warn级别日志============================================ -->
<property name="warn_fileName">${basePath}/warn.log</property>
<property name="warn_filePattern">${basePath}/%d{yyyy-MM}/warn-%d{yyyy-MM-dd}-%i.log.gz</property>
<property name="warn_max">100</property>
<property name="warn_timeInterval">1</property>
<property name="warn_timeModulate">true</property>
<!-- ============================================Error级别日志============================================ -->
<property name="error_fileName">${basePath}/error.log</property>
<property name="error_filePattern">${basePath}/%d{yyyy-MM}/error-%d{yyyy-MM-dd}-%i.log.gz</property>
<property name="error_max">100</property>
<property name="error_timeInterval">1</property>
<property name="error_timeModulate">true</property>
</Properties>
<!--定义appender -->
<appenders>
<!-- =======================================用来定义输出到控制台的配置======================================= -->
<Console name="Console" target="SYSTEM_OUT">
<!-- 设置控制台只输出level及以上级别的信息(onMatch),其他的直接拒绝(onMismatch)-->
<ThresholdFilter level="${console_print_level}" onMatch="ACCEPT" onMismatch="DENY"/>
<!-- 设置输出格式,不设置默认为:%m%n -->
<PatternLayout pattern="${console_log_pattern}"/>
</Console>
<!-- ================================打印debug级别以上的日志到文件================================ -->
<RollingFile name="DebugFile" fileName="${debug_fileName}" filePattern="${debug_filePattern}">
<PatternLayout pattern="${log_pattern}"/>
<Policies>
<TimeBasedTriggeringPolicy interval="${debug_timeInterval}" modulate="${debug_timeModulate}"/>
<SizeBasedTriggeringPolicy size="${every_file_size}"/>
</Policies>
<DefaultRolloverStrategy max="${debug_max}">
<!--Delete:删除匹配到的过期备份文件 -->
<!--maxDepth:由于备份文件保存在${LOG_HOME}/$${date:yyyy-MM},所以目录深度设置为2 -->
<Delete basePath="${basePath}" maxDepth="2">
<!--IfFileName:匹配文件名称 -->
<!--glob:匹配2级目录深度下的以.log.gz结尾的备份文件 -->
<IfFileName glob="*/*.log.gz" />
<!--IfLastModified:匹配文件修改时间 -->
<!--age:匹配超过365天的文件,单位D、H、M、S分别表示天、小时、分钟、秒-->
<IfLastModified age="365D" />
</Delete>
</DefaultRolloverStrategy>
<Filters>
<ThresholdFilter level="INFO" onMatch="DENY" onMismatch="NEUTRAL"/>
<ThresholdFilter level="DEBUG" onMatch="ACCEPT" onMismatch="DENY"/>
</Filters>
</RollingFile>
<!-- =======================================打印INFO级别的日志到文件======================================= -->
<RollingFile name="InfoFile" fileName="${info_fileName}" filePattern="${info_filePattern}">
<PatternLayout pattern="${log_pattern}"/>
<Policies>
<TimeBasedTriggeringPolicy interval="${info_timeInterval}" modulate="${info_timeModulate}"/>
<SizeBasedTriggeringPolicy size="${every_file_size}"/>
</Policies>
<DefaultRolloverStrategy max="${info_max}" />
<Filters>
<StringMatchFilter text="TGT renewal thread has been interrupted and will exit." onMatch="DENY" onMismatch="NEUTRAL"/>
<StringMatchFilter text="TGT refresh thread started." onMatch="DENY" onMismatch="NEUTRAL"/>
<StringMatchFilter text="TGT valid starting at:" onMatch="DENY" onMismatch="NEUTRAL"/>
<StringMatchFilter text="TGT expires:" onMatch="DENY" onMismatch="NEUTRAL"/>
<StringMatchFilter text="TGT refresh sleeping" onMatch="DENY" onMismatch="NEUTRAL"/>
<StringMatchFilter text="Supplied authorities:" onMatch="DENY" onMismatch="NEUTRAL"/>
<StringMatchFilter text="Resolved authority:" onMatch="DENY" onMismatch="NEUTRAL"/>
<StringMatchFilter text="Successfully logged in." onMatch="DENY" onMismatch="NEUTRAL"/>
<StringMatchFilter text="Kafka version:" onMatch="DENY" onMismatch="NEUTRAL"/>
<StringMatchFilter text="Kafka commitId:" onMatch="DENY" onMismatch="NEUTRAL"/>
<StringMatchFilter text="Kafka startTimeMs:" onMatch="DENY" onMismatch="NEUTRAL"/>
<StringMatchFilter text="Client environment:" onMatch="DENY" onMismatch="NEUTRAL"/>
<StringMatchFilter text="Found no committed offset for partition" onMatch="DENY" onMismatch="NEUTRAL"/>
<ThresholdFilter level="WARN" onMatch="DENY" onMismatch="NEUTRAL"/>
<ThresholdFilter level="INFO" onMatch="ACCEPT" onMismatch="DENY"/>
</Filters>
</RollingFile>
<!-- =======================================打印WARN级别的日志到文件======================================= -->
<RollingFile name="WarnFile" fileName="${warn_fileName}" filePattern="${warn_filePattern}">
<PatternLayout pattern="${log_pattern}"/>
<Policies>
<TimeBasedTriggeringPolicy interval="${warn_timeInterval}" modulate="${warn_timeModulate}"/>
<SizeBasedTriggeringPolicy size="${every_file_size}"/>
</Policies>
<DefaultRolloverStrategy max="${warn_max}" />
<Filters>
<StringMatchFilter text="TGT renewal thread has been interrupted and will exit." onMatch="DENY" onMismatch="NEUTRAL"/>
<StringMatchFilter text="com.newtouch.syncserver.entity.KafkaLoadingLog" onMatch="DENY" onMismatch="NEUTRAL"/>
<ThresholdFilter level="ERROR" onMatch="DENY" onMismatch="NEUTRAL"/>
<ThresholdFilter level="WARN" onMatch="ACCEPT" onMismatch="DENY"/>
</Filters>
</RollingFile>
<!-- =======================================打印ERROR级别的日志到文件======================================= -->
<RollingFile name="ErrorFile" fileName="${error_fileName}" filePattern="${error_filePattern}">
<PatternLayout pattern="${log_pattern}"/>
<Policies>
<TimeBasedTriggeringPolicy interval="${error_timeInterval}" modulate="${error_timeModulate}"/>
<SizeBasedTriggeringPolicy size="${every_file_size}"/>
</Policies>
<DefaultRolloverStrategy max="${error_max}" />
<Filters>
<ThresholdFilter level="FATAL" onMatch="DENY" onMismatch="NEUTRAL"/>
<ThresholdFilter level="ERROR" onMatch="ACCEPT" onMismatch="DENY"/>
</Filters>
</RollingFile>
</appenders>
<!--定义logger,只有定义了logger并引入的appender,appender才会生效-->
<loggers>
<logger name="org.apache.kafka.clients.admin" level="OFF" additivity="false"/>
<logger name="org.apache.kafka.clients.consumer" level="OFF" additivity="false"/>
<!--建立一个默认的root的logger-->
<root level="${output_log_level}">
<appender-ref ref="DebugFile"/>
<appender-ref ref="Console"/>
<appender-ref ref="InfoFile"/>
<appender-ref ref="WarnFile"/>
<appender-ref ref="ErrorFile"/>
</root>
</loggers>
</configuration>