33°

基于ELK收集业务日志的初步体验

elasticsearch - 7.5.1

logstash - 7.5.1

kibana - 7.5.1

logback.properties

# 应用名称
appName=data-center
# 日志级别
logLevel=debug
# logstash服务器ip和开发端口
logstash=localhost:5044
logstash.host=127.0.0.1
logstash.port=9600

logback.xml

<?xml version="1.0" encoding="UTF-8"?>

<configuration scan="true" scanPeriod="1 seconds"> <property scope="context" resource="config/logback.properties"/> <contextName>${appName}</contextName>

&lt;!-- 文件输出 --&gt;
&lt;appender name="file" class="ch.qos.logback.core.rolling.RollingFileAppender"&gt;

    &lt;file&gt;d:/logs/${appName}-all.log&lt;/file&gt;
    &lt;rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"&gt;
        &lt;FileNamePattern&gt;
            d:/logs/%d{yyyy-MM,aux}/${appName}-all.%d.%i.gz
        &lt;/FileNamePattern&gt;
        &lt;timeBasedFileNamingAndTriggeringPolicy
                class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"&gt;
            &lt;maxFileSize&gt;1024MB&lt;/maxFileSize&gt;
        &lt;/timeBasedFileNamingAndTriggeringPolicy&gt;
        &lt;maxHistory&gt;30&lt;/maxHistory&gt;
    &lt;/rollingPolicy&gt;
    &lt;encoder&gt;
        &lt;pattern&gt;%d{yyyy-MM-dd HH:mm:ss}[%t]%-5p %c{36}.%M\(%L\) %m%n&lt;/pattern&gt;
    &lt;/encoder&gt;
&lt;/appender&gt;

&lt;!--错误文件输出--&gt;
&lt;appender name="errorFile" class="ch.qos.logback.core.rolling.RollingFileAppender"&gt;
    &lt;file&gt;${user.home}/logs/${appName}-error.log&lt;/file&gt;
    &lt;filter class="ch.qos.logback.classic.filter.LevelFilter"&gt;
        &lt;level&gt;ERROR&lt;/level&gt;
        &lt;onMatch&gt;ACCEPT&lt;/onMatch&gt;
        &lt;onMismatch&gt;DENY&lt;/onMismatch&gt;
    &lt;/filter&gt;
    &lt;rollingPolicy
            class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"&gt;
        &lt;FileNamePattern&gt;
           d:/logs/error/${appName}-error.%d{yyyy-MM-dd}.log
        &lt;/FileNamePattern&gt;
        &lt;timeBasedFileNamingAndTriggeringPolicy
                class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"&gt;
            &lt;maxFileSize&gt;1024MB&lt;/maxFileSize&gt;
        &lt;/timeBasedFileNamingAndTriggeringPolicy&gt;
        &lt;maxHistory&gt;30&lt;/maxHistory&gt;
    &lt;/rollingPolicy&gt;
    &lt;encoder&gt;
        &lt;pattern&gt;
            %d{yyyy-MM-dd HH:mm:ss}[%t]%-5p %c{36}.%M\(%L\) %m%n
        &lt;/pattern&gt;
    &lt;/encoder&gt;
&lt;/appender&gt;

&lt;!-- SQL文件输出 --&gt;
&lt;appender name="sql" class="ch.qos.logback.core.rolling.RollingFileAppender"&gt;
    &lt;file&gt;${user.home}/logs/${appName}-sql.log&lt;/file&gt;
    &lt;rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"&gt;
        &lt;FileNamePattern&gt;
           d:/logs/%d{yyyy-MM,aux}-sql/${appName}-sql.%d.%i.gz
        &lt;/FileNamePattern&gt;
        &lt;timeBasedFileNamingAndTriggeringPolicy
                class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"&gt;
            &lt;maxFileSize&gt;1024MB&lt;/maxFileSize&gt;
        &lt;/timeBasedFileNamingAndTriggeringPolicy&gt;
        &lt;maxHistory&gt;30&lt;/maxHistory&gt;
    &lt;/rollingPolicy&gt;
    &lt;encoder&gt;
        &lt;pattern&gt;%d{yyyy-MM-dd HH:mm:ss}[%t]%-5p %c{36}.%M\(%L\) %m%n&lt;/pattern&gt;
    &lt;/encoder&gt;
&lt;/appender&gt;

&lt;!--日志输出到logstash--&gt;
&lt;appender name="logstash"
          class="net.logstash.logback.appender.LogstashTcpSocketAppender"&gt;
    &lt;destination&gt;${logstash}&lt;/destination&gt;
    &lt;!-- encoder必须配置,有多种可选 --&gt;
    &lt;encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder"&gt;
        &lt;customFields&gt;{"appname":"${appName}"}&lt;/customFields&gt;
    &lt;/encoder&gt;
    &lt;connectionStrategy&gt;
        &lt;roundRobin&gt;
            &lt;connectionTTL&gt;5 minutes&lt;/connectionTTL&gt;
        &lt;/roundRobin&gt;
    &lt;/connectionStrategy&gt;

    &lt;!--无用日志过滤--&gt;
    &lt;encoder class="net.logstash.logback.encoder.LogstashEncoder"&gt;
        &lt;throwableConverter class="net.logstash.logback.stacktrace.ShortenedThrowableConverter"&gt;
            &lt;!--&lt;exclude&gt;sun\.reflect\..*\.invoke.*&lt;/exclude&gt;--&gt;
            &lt;exclude&gt;o\.s\.w\.s\.h\.*&lt;/exclude&gt;
            &lt;exclude&gt;o\.s\.b\.f\.s\.*&lt;/exclude&gt;
            &lt;rootCauseFirst&gt;true&lt;/rootCauseFirst&gt;
            &lt;inlineHash&gt;true&lt;/inlineHash&gt;
        &lt;/throwableConverter&gt;
    &lt;/encoder&gt;

&lt;/appender&gt;

&lt;!--日志的异步输出配置--&gt;
&lt;appender name="ASYNCConsole" class="ch.qos.logback.classic.AsyncAppender"&gt;
    &lt;includeCallerData&gt;true&lt;/includeCallerData&gt;
    &lt;appender-ref ref="console"/&gt;
&lt;/appender&gt;
&lt;appender name="ASYNCFile" class="ch.qos.logback.classic.AsyncAppender"&gt;
    &lt;includeCallerData&gt;true&lt;/includeCallerData&gt;
    &lt;appender-ref ref="file"/&gt;
&lt;/appender&gt;
&lt;appender name="ASYNCErrorFile" class="ch.qos.logback.classic.AsyncAppender"&gt;
    &lt;includeCallerData&gt;true&lt;/includeCallerData&gt;
    &lt;appender-ref ref="errorFile"/&gt;
&lt;/appender&gt;
&lt;appender name="ASYNCSql" class="ch.qos.logback.classic.AsyncAppender"&gt;
    &lt;includeCallerData&gt;true&lt;/includeCallerData&gt;
    &lt;appender-ref ref="sql"/&gt;
&lt;/appender&gt;
&lt;appender name="ASYNCLogstash" class="ch.qos.logback.classic.AsyncAppender"&gt;
    &lt;includeCallerData&gt;true&lt;/includeCallerData&gt;
    &lt;appender-ref ref="logstash"/&gt;
&lt;/appender&gt;

&lt;logger name="org.apache" level="${logLevel}"/&gt;
&lt;logger name="org.springframework" level="${logLevel}"/&gt;
&lt;logger name="org.springframework.web" level="${logLevel}"/&gt;
&lt;logger name="dao" level="${logLevel}" additivity="false"&gt;
    &lt;appender-ref ref="ASYNCSql"/&gt;
    &lt;appender-ref ref="ASYNCConsole"/&gt;
&lt;/logger&gt;

&lt;root level="${logLevel}"&gt;
    &lt;appender-ref ref="ASYNCConsole"/&gt;
    &lt;appender-ref ref="ASYNCFile"/&gt;
    &lt;appender-ref ref="ASYNCErrorFile"/&gt;
    &lt;appender-ref ref="ASYNCLogstash"/&gt;
&lt;/root&gt;

</configuration>

依赖

<dependency>
            <groupId>net.logstash.logback</groupId>
            <artifactId>logstash-logback-encoder</artifactId>
            <version>5.3</version>
        </dependency>
    &lt;dependency&gt;
        &lt;groupId&gt;net.logstash.log4j&lt;/groupId&gt;
        &lt;artifactId&gt;jsonevent-layout&lt;/artifactId&gt;
        &lt;version&gt;1.6&lt;/version&gt;
    &lt;/dependency&gt;

    &lt;dependency&gt;
        &lt;groupId&gt;ch.qos.logback&lt;/groupId&gt;
        &lt;artifactId&gt;logback-classic&lt;/artifactId&gt;
        &lt;exclusions&gt;
            &lt;exclusion&gt;
                &lt;groupId&gt;ch.qos.logback&lt;/groupId&gt;
                &lt;artifactId&gt;logback-core&lt;/artifactId&gt;
            &lt;/exclusion&gt;
        &lt;/exclusions&gt;
        &lt;version&gt;1.1.8&lt;/version&gt;
    &lt;/dependency&gt;
    &lt;dependency&gt;
        &lt;groupId&gt;ch.qos.logback&lt;/groupId&gt;
        &lt;artifactId&gt;logback-core&lt;/artifactId&gt;
        &lt;version&gt;1.1.8&lt;/version&gt;
    &lt;/dependency&gt;

添加logstash配置文件

 input {
    tcp {
        host =>"127.0.0.1"
        port => 5044
    }
}
output {
     stdout {
      codec => rubydebug
     }
     elasticsearch {
        action => "index"
        hosts => ["127.0.0.1:9200"]
        index => "data-center"
     }

}

本文由【大大的微笑】发布于开源中国,原文链接:https://my.oschina.net/u/2486137/blog/3159193

全部评论: 0

    我有话说: