filebeat安装

    技术2022-07-10  160

    wget https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-6.5.4-linux-x86_64.tar.gz

    tar -zxvf filebeat-6.5.4-linux-x86_64.tar.gz

    chmod +x filebeat nohup ./filebeat run >nohup.out 2>&1 & nohup ./filebeat > nohup.out 2>&1 &

     

     

    filebeat.yml

     

    ###################### Filebeat Configuration Example #########################

    # This file is an example configuration file highlighting only the most common # options. The filebeat.reference.l file from the same directory contains all the # supported options with more comments. You can use it as a reference. # # You can find the full configuration reference here: # https://www.elastic.co/guide/en/beats/filebeat/index.html

    # For more available modules and options, please see the filebeat.reference.l sample # configuration file.

    #=========================== Filebeat prospectors =============================

    filebeat.prospectors:

    # Each - is a prospector. Most options can be set at the prospector level, so # you can use different prospectors for various configurations. # Below are the prospector specific configurations. - type: log

      # Change to true to enable this prospector configuration.   enabled: true   tail_files: true   # Paths that should be crawled and fetched. Glob based paths.   paths:     - /raid/logs/*   multiline.pattern: '^\[[0-9]{4}-[0-9]{2}-[0-9]{2}'   multiline.negate: true   multiline.match: after   multiline.timeout: 10s     #- c:\programdata\elasticsearch\logs\*   # Exclude lines. A list of regular expressions to match. It drops the lines that are   # matching any regular expression from the list.   #exclude_lines: ['^DBG']

      # Include lines. A list of regular expressions to match. It exports the lines that are   # matching any regular expression from the list.   #include_lines: ['^ERR', '^WARN']

      # Exclude files. A list of regular expressions to match. Filebeat drops the files that   # are matching any regular expression from the list. By default, no files are dropped.   exclude_files: ['error.*$']   encoding: utf-8   # Optional additional fields. These fields can be freely picked   # to add additional information to the crawled log files for filtering   fields:   #  level: debug   #  review: 1     host: ip

      ### Multiline options

      # Mutiline can be used for log messages spanning multiple lines. This is common   # for Java Stack Traces or C-Line Continuation

      # The regexp Pattern that has to be matched. The example pattern matches all lines starting with [   #multiline.pattern: ^\[

      # Defines if the pattern set under pattern should be negated or not. Default is false.   #multiline.negate: false

      # Match can be set to "after" or "before". It is used to define if lines should be append to a pattern   # that was (not) matched before or after or as long as a pattern is not matched based on negate.   # Note: After is the equivalent to previous and before is the equivalent to to next in Logstash   #multiline.match: after   ignore_older: 5m

    #================================ General =====================================

    # The name of the shipper that publishes the network data. It can be used to group # all the transactions sent by a single shipper in the web interface. name: $FILEBEAT_NAME

    # The tags of the shipper are included in their own field with each # transaction published. tags: ["500_cc"]

    #----------------------------- Logstash output -------------------------------- output.logstash:   # The Logstash hosts   hosts: ["kafka.log.com:5044"]   loadbalance: true   worker: 2   # Optional SSL. By default is off.   # List of root certificates for HTTPS server verifications   ssl.certificate_authorities: ["./filebeat.crt"]

      # Certificate for SSL client authentication   #ssl.certificate: "/etc/pki/client/cert.pem"

      # Client Certificate Key   #ssl.key: "/etc/pki/client/cert.key"

    Processed: 0.038, SQL: 9