logback log combat

<? xml version = " 1.0 " encoding = " UTF-8 " ?> 
<!- 
    <configuration> 
        <appender> </ appender>     // There are 1 or more, the function specifies the record output destination 
        <root> </ root>             // There is at most one, root logger 
        <logger> <logger>         // There are 1 or more, ordinary logger 
    </ configuration> 
 -> 
<configuration> 

    <!-Get the project name from the local spring file- -> 
    <springProperty scope = " context " name = " PROJECT_NAME " source="spring.application.name"/> 

    <!-Print log format stored in the file-> 
    <property name = " FILE_PATTEN " 
              value = " [% level] [% date {ISO8601}] [% logger] [% thread] [% X { clientIp}] [% X {requestId}] [% X {serverIp}] [% X {X-B3-TraceId}]% msg% n " /> 
    <!-printed in the console file output format- > 
    < property name = " CONSOLE_PATTEN " 
              value = " [% highlight (%-5level)] [% date {ISO8601}] [% boldGreen (% logger)] [% boldYellow (% thread)] [% X {clientIp}] [% cyan (% X {requestId})] [% X {serverIp}]% msg% n " /> 


    <!-The log is printed on the console- > 
    <appender name="console" class = " ch.qos.logback.core.ConsoleAppender " > 
        <encoder> 
            <pattern> $ {CONSOLE_PATTEN} </ pattern> 
        </ encoder> 
    </ appender> 



    <!-According to the number of files and the size of the log file Recycling strategy keeps a maximum of 20 per day for a maximum of 100 trillion each, and a maximum of one day-> 
    <appender name = " file "  class = " ch.qos.logback.core.rolling.RollingFileAppender " > 
        <file> $ {user.home } / logs / $ {PROJECT_NAME} / $ {PROJECT_NAME} .log </ file> 
        <append> true </ append> 
        <!-Specify the log output level to a different log-> 
        <filter class="ch.qos.logback.classic.filter.LevelFilter " > 
            <level> INFO </ level> 
            <onMatch> ACCEPT </ onMatch> 
            <onMismatch> DENY </ onMismatch> 
        </ filter> 
        <!-The file exceeds 100 trillion, Create a new log file, up to 20 log files, clean up once a 
        day- > <rollingPolicy class = " ch.qos.logback.core.rolling.FixedWindowRollingPolicy " > 
            <!-Set to zip at the end of the file for compression-> 
            < fileNamePattern> $ {user.home} / logs / $ {PROJECT_NAME} / $ {PROJECT_NAME}.% i.log </ fileNamePattern> 
            <minIndex> 1 </ minIndex> 
            <maxIndex>20</maxIndex>
        </rollingPolicy>
        <triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
            <!--日志归档大小   -->
            <maxFileSize>1kB</maxFileSize>
        </triggeringPolicy>
        <encoder>
            <pattern>${FILE_PATTEN}</pattern>
            <charset>UTF-8</charset>
        </encoder>
    </appender>


    <!--按照日志级别打印-->
    <appender name="error_file" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <file> $ {user.home} / logs / $ {PROJECT_NAME} / error / $ {PROJECT_NAME} .log </ file> 
        <append> true </ append> 
        <!-Specify the log output level to different logs Custom filters output various types of logs such as sql-> 
        <filter class = " ch.qos.logback.classic.filter.LevelFilter " > 
            <level> ERROR </ level> 
            <onMatch> ACCEPT </ onMatch> 
            < onMismatch> DENY </ onMismatch> 
        </ filter> 
        <rollingPolicy class = " ch.qos.logback.core.rolling.FixedWindowRollingPolicy " >
            <fileNamePattern>${user.home}/logs/${PROJECT_NAME}/${PROJECT_NAME}_error.%i.log</fileNamePattern>
            <minIndex>1</minIndex>
            <maxIndex>20</maxIndex>
        </rollingPolicy>
        <triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
            <maxFileSize>1KB</maxFileSize>
        </triggeringPolicy>
        <encoder>
            <pattern>${FILE_PATTEN}</pattern>
            <charset>UTF-8</charset>
        </encoder>
    </appender>


    <!-The log is compressed daily and automatically updated according to the set retention days- > 
    <appender name = " info_file "  class = " ch.qos.logback.core.rolling.RollingFileAppender " > 
        <file> $ {user.home } / logs / $ {PROJECT_NAME} / info / $ {PROJECT_NAME} .log </ file> 
        <append> true </ append> 
        <!-Specify the log output level to different logs, you can customize the filter to output a variety of different Type log such as sql-> 
        <filter class = " ch.qos.logback.classic.filter.LevelFilter " > 
            <level> INFO </ level> 
            <onMatch> ACCEPT </ onMatch> 
            <onMismatch>DENY</onMismatch>DENY</onMismatch>
        </filter>
        <rollingPolicy class = " ch.qos.logback.core.rolling.TimeBasedRollingPolicy " > 
            <!-Generate the number of cognitive i% representatives-> 
            <!-<fileNamePattern> $ {user.home} / logs / $ {PROJECT_NAME} / info / $ {PROJECT_NAME}-% d {yyyy-MM-dd}.% I.log-> 
            <fileNamePattern> $ {user.home} / logs / $ {PROJECT_NAME} / info / $ { PROJECT_NAME}-% d {yyyy-MM-dd}.% I.log.zip
             </ fileNamePattern> 
            <!-How big is the log for archiving-> 
            <timeBasedFileNamingAndTriggeringPolicy class = " ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP " >  
                <maxFileSize>100MB</maxFileSize>
            </ timeBasedFileNamingAndTriggeringPolicy>
            <!-Number of days to keep-> 
            <MaxHistory> 2 </ MaxHistory> 
        </ rollingPolicy> 
        <encoder> 
            <pattern> $ {FILE_PATTEN} </ pattern> 
            <charset> UTF- 8 </ charset> 
        </ encoder> 
    </ appender> 



   <! 
    --Specify what environment log to print to Set the print log level to the console and file-> <springProfile name = " test " > 
        <root level = " DEBUG " > 
            <appender- ref  ref = " console "/>
            <appender-ref ref="file"/>
            <appender-ref ref="error_file"></appender-ref>
            <appender-ref ref="info_file"></appender-ref>
        </root>
    </springProfile>





</configuration>

 

Guess you like

Origin www.cnblogs.com/love-htt/p/12692467.html