filebeat (8.9.0) collects logs to logstash, es sent by logstash

Filebeat collects logs to logstash, and the es sent by logstash

  • Download and configure filebeat
    • download
    • Configuration
    • logback.xml
  • logstash configuration

Download and configure filebeat

Download

refer to
filebeat download example

Configuration

filebeat.inputs:
- type: filestream
  enabled: true
  paths:
  # Log file directory
    - D:\modellog\elkdemo\*\*.log
  parsers:
  #Multi-line log merging (such as java exception stack information)
  - multiline:
      type: pattern
      # Matches those starting with [
      pattern: '^\['
      negate: true
      match: after

# Custom log format
processors:
- dissect:
    tokenizer: '[%{timestamp}] -%{parentTraceId} -%{traceId} -%{host} -%{port} -%{appName} -%{pid} -[%{level}] -[% {thread}] -%{class} -%{line} - %{message}'
    field: "message"

# Output to logstash
output.logstash:
  enabled: true
  hosts: ["192.168.0.100:21069"]

logback.xml

<?xml version="1.0" encoding="UTF-8"?>
<configuration>
    <!-- Color log -->
    <!-- Rendering classes that color logs depend on -->
    <conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter"/>
    <conversionRule conversionWord="wex"
                    converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter"/>
    <conversionRule conversionWord="wEx"
                    converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter"/>

    <define name="hostAddress" class="top.mingempty.commons.definer.IPLogDefiner"/>
    <springProperty scope="context" name="appName" source="spring.application.name"/>
    <springProperty scope="context" name="port" source="server.port"/>

    <!--Colored log format and package name output indentation alignment-->
    <property name="CONSOLE_LOG_PATTERN"
              value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr([${LOG_LEVEL_PATTERN:-%5p}]) %clr($ {PID:- }){magenta} %clr(-){faint} %clr([%t]){faint} %clr(%logger){cyan} %clr(:){magenta} %m%n$ {LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>

    <property name="FILE_LOG_PATTERN"
              value="[%d{yyyy-MM-dd HH:mm:ss.SSS}] -${hostAddress} -${port} -${appName} -${PID:-} -[%-5level] -[%thread] -%logger -%-3L - %msg%n"/>


    <contextName>fanxlxs</contextName>

    <!--The folder is in the root directory of the current project disk-->
    <property name="LOG_PATH" value="/modellog"/>
    <!--Set the system log directory-->
    <property name="APPDIR" value="/filebeat"/>

    <!-- Logger, date rolling record
            ERROR level
     -->
    <appender name="ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <!-- The path and file name of the log file being recorded -->
        <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
            <!--The file name of the log file output, the date must be added. If multiple logs need to be generated in a single day, you need to add %i-->
            <FileNamePattern>${LOG_PATH}/${APPDIR}/%d{yyyy-MM-dd}/error-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
            <!-- & amp;lt;! & amp;ndash;The number of days to retain historical log files, excluding today’s logs, and must be used in conjunction with the cleanHistoryOnStart field & amp;ndash; & amp;gt;
                         & amp;lt;! & amp;ndash;File retention can be achieved by policies such as "number of files, hours, days, months, years" etc. & amp;ndash; & amp;gt;
                        <MaxHistory>3</MaxHistory>
                        <cleanHistoryOnStart>true</cleanHistoryOnStart>-->
            <!-- Maximum size of a single log in a single day -->
            <maxFileSize>10MB</maxFileSize>
            <!-- & amp;lt;! & amp;ndash;Only control the total size of the day's log, and the "i" in the log name retains the last value & amp;ndash; & amp;gt;
                 <totalSizeCap>10MB</totalSizeCap>-->
        </rollingPolicy>
        <!-- Append log recording -->
        <append>true</append>
        <!-- Log file format -->
        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
            <pattern>${FILE_LOG_PATTERN}</pattern>
            <charset>utf-8</charset>
        </encoder>
        <!-- This log file records error level -->
        <filter class="ch.qos.logback.classic.filter.LevelFilter">
            <level>error</level>
            <onMatch>ACCEPT</onMatch>
            <onMismatch>DENY</onMismatch>
        </filter>
    </appender>


    <!-- Logger, date rolling record
            WARN level
     -->
    <appender name="WARN" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <!-- The path and file name of the log file being recorded -->
        <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
            <!--The file name of the log file output, the date must be added. If multiple logs need to be generated in a single day, you need to add %i-->
            <FileNamePattern>${LOG_PATH}/${APPDIR}/%d{yyyy-MM-dd}/warn-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
            <!-- & amp;lt;! & amp;ndash;The number of days to retain historical log files, excluding today’s logs, and must be used in conjunction with the cleanHistoryOnStart field & amp;ndash; & amp;gt;
                         & amp;lt;! & amp;ndash;File retention can be achieved by policies such as "number of files, hours, days, months, years" etc. & amp;ndash; & amp;gt;
                        <MaxHistory>3</MaxHistory>
                        <cleanHistoryOnStart>true</cleanHistoryOnStart>-->
            <!-- Maximum size of a single log in a single day -->
            <maxFileSize>10MB</maxFileSize>
            <!-- & amp;lt;! & amp;ndash;Only control the total size of the day's log, and the "i" in the log name retains the last value & amp;ndash; & amp;gt;
                 <totalSizeCap>10MB</totalSizeCap>-->
        </rollingPolicy>
        <!-- Append log recording -->
        <append>true</append>
        <!-- Log file format -->
        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
            <pattern>${FILE_LOG_PATTERN}</pattern>
            <charset>utf-8</charset>
        </encoder>
        <!-- This log file only records the warn level -->
        <filter class="ch.qos.logback.classic.filter.LevelFilter">
            <level>warn</level>
            <onMatch>ACCEPT</onMatch>
            <onMismatch>DENY</onMismatch>
        </filter>
    </appender>


    <!-- Logger, date rolling record
            INFO level
    -->
    <appender name="INFO" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <!-- The path and file name of the log file being recorded -->
        <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
            <!--The file name of the log file output, the date must be added. If multiple logs need to be generated in a single day, you need to add %i-->
            <FileNamePattern>${LOG_PATH}/${APPDIR}/%d{yyyy-MM-dd}/info-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
            <!-- & amp;lt;! & amp;ndash;The number of days to retain historical log files, excluding today’s logs, and must be used in conjunction with the cleanHistoryOnStart field & amp;ndash; & amp;gt;
                         & amp;lt;! & amp;ndash;File retention can be achieved by policies such as "number of files, hours, days, months, years" etc. & amp;ndash; & amp;gt;
                        <MaxHistory>3</MaxHistory>
                        <cleanHistoryOnStart>true</cleanHistoryOnStart>-->
            <!-- Maximum size of a single log in a single day -->
            <maxFileSize>10MB</maxFileSize>
            <!-- & amp;lt;! & amp;ndash;Only control the total size of the day's log, and the "i" in the log name retains the last value & amp;ndash; & amp;gt;
                 <totalSizeCap>10MB</totalSizeCap>-->
        </rollingPolicy>
        <!-- Append log recording -->
        <append>true</append>
        <!-- Log file format -->
        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
            <pattern>${FILE_LOG_PATTERN}</pattern>
            <charset>utf-8</charset>
        </encoder>
        <!-- This log file only records info level -->
        <filter class="ch.qos.logback.classic.filter.LevelFilter">
            <level>info</level>
            <onMatch>ACCEPT</onMatch>
            <onMismatch>DENY</onMismatch>
        </filter>
    </appender>


    <!-- Logger, date rolling record
            DEBUG level
    -->
    <appender name="DEBUG" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <!-- The path and file name of the log file being recorded -->
        <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
            <!--The file name of the log file output, the date must be added. If multiple logs need to be generated in a single day, you need to add %i-->
            <FileNamePattern>${LOG_PATH}/${APPDIR}/%d{yyyy-MM-dd}/debug-%d{yyyy-MM-dd}.%i.log</FileNamePattern>
            <!-- & amp;lt;! & amp;ndash;The number of days to retain historical log files, excluding today’s logs, and must be used in conjunction with the cleanHistoryOnStart field & amp;ndash; & amp;gt;
                         & amp;lt;! & amp;ndash;File retention can be achieved by policies such as "number of files, hours, days, months, years" etc. & amp;ndash; & amp;gt;
                        <MaxHistory>3</MaxHistory>
                        <cleanHistoryOnStart>true</cleanHistoryOnStart>-->
            <!-- Maximum size of a single log in a single day -->
            <maxFileSize>10MB</maxFileSize>
            <!-- & amp;lt;! & amp;ndash;Only control the total size of the day's log, and the "i" in the log name retains the last value & amp;ndash; & amp;gt;
                 <totalSizeCap>10MB</totalSizeCap>-->
        </rollingPolicy>
        <append>true</append>
        <!-- Log file format -->
        <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
            <pattern>${FILE_LOG_PATTERN}</pattern>
            <charset>utf-8</charset>
        </encoder>
        <!-- This log file only records debug level -->
        <filter class="ch.qos.logback.classic.filter.LevelFilter">
            <level>debug</level>
            <onMatch>ACCEPT</onMatch>
            <onMismatch>DENY</onMismatch>
        </filter>
    </appender>

    <!-- ConsoleAppender console output log -->
    <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
        <!--encoder default configuration is PatternLayoutEncoder-->
        <encoder>
            <pattern>${CONSOLE_LOG_PATTERN}</pattern>
            <charset>utf-8</charset>
        </encoder>
        <!--This log appender is for development use. Only the lowest level is configured. The log level output by the console is log information greater than or equal to this level-->
        <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
            <level>debug</level>
        </filter>
    </appender>
    
    <!-- FrameworkServlet log-->
    <logger name="org.springframework" level="WARN"/>

    <!-- nacos log-->
    <logger name="com.alibaba.nacos" level="WARN"/>
    
    <!--Project log-->
    <logger name="top.mingempty.metadata" level="INFO"/>

    <!-- In a production environment, configure this level to an appropriate level to avoid too many log files or affecting program performance -->
    <root level="INFO">
        <appender-ref ref="ERROR"/>
        <appender-ref ref="WARN"/>
        <appender-ref ref="INFO"/>
        <appender-ref ref="DEBUG"/>
        <!-- In the production environment, please remove stdout -->
        <appender-ref ref="STDOUT"/>
    </root>


</configuration>

logstash configuration

input {<!-- -->
  beats {<!-- -->
    port => 5044
  }
}

filter {<!-- -->
  grok {<!-- -->
    ## Specify the log output format (corresponding to logback)
    match => {<!-- --> "message" => "\[%{TIMESTAMP_ISO8601:timestamp}\] -%{IP:host} -%{NUMBER:port} -%{DATA :appName} -%{NUMBER:pid} -\[%{DATA:level}\] -\[%{DATA:thread}\] -%{DATA:class} -%{NUMBER:line} - %{GREEDYDATA:message}" }
  }
  
  mutate {<!-- -->
    remove_field => [ "[@timestamp]"]
    rename => {<!-- -->
        "[dissect][parentTraceId]" => "parentTraceId"
        "[dissect][traceId]" => "traceId"
        "[dissect][host]" => "host"
        "[dissect][port]" => "port"
        "[dissect][appName]" => "appName"
        "[dissect][pid]" => "pid"
        "[dissect][level]" => "level"
        "[dissect][thread]" => "thread"
        "[dissect][class]" => "class"
        "[dissect][line]" => "line"
        "[dissect][message]" => "message"
        "[dissect][timestamp]" => "timestamp"
        "[log][file][path]" => "path"
      }
  }

  date {<!-- -->
    match => [ "timestamp", "yyyy-MM-dd HH:mm:ss.SSS" ] # Here specifies the format of the timestamp in the log
    target => "@timestamp" # Store the parsed timestamp into the @timestamp field
    timezone => "Asia/Shanghai" # Specify the time zone as China Standard Time
  }
  
  mutate {<!-- -->
    remove_field => [ "timestamp","event", "agent", "ecs", "input", "tags", "flags", "log\ ", "dissect" ,"@version"]
  }
}


output {<!-- -->
  elasticsearch {<!-- -->
    hosts => ["https://172.27.0.2:9200"]
    index => "beats3-mingempty-top-%{ + YYYY.MM.dd}"
    #The x-pack of es is enabled here
    user => "Account"
    password => "password"
    ssl_enabled => true
    ssl_certificate_authorities => ["/usr/share/logstash/config/logstash.pem"]
  }
}

docker-compose deploys elk (8.9.0) and turns on ssl authentication