zoukankan      html  css  js  c++  java
  • 0415关于通过FILEBEAT,LOGSTASH,ES,KIBNA实现数据的采集

    如何通过FILEBEAT,LOGSTASH,ES,KIBNA实现数据的采集
    总体参考网址:https://www.olinux.org.cn/elk/1157.html
    官方网址:https://www.elastic.co/guide/en/beats/filebeat/6.2/filebeat-getting-started.html

    第一步 启动ES,ES的安装请自行百度
    第二步 启动LOGSTASH,LOGSTASH的安装请自行百度
    启动命令../bin/logstash -f logstash.slowquery.conf
    配置logstash.slowquery.conf文件
    input {
    beats {
    host => "192.168.2.105"
    port => 5044
    }
    }
    filter {

    grok {
    match => [ "message", "(?m)^# User@Host: %{USER:query_user}[[^]]+] @ (?:(?<query_host>S*) )?[(?:%{IP:query_ip})?]s*Id: %{NUMBER:id:int}s+# Query_time: %{NUMBER:query_time:float}s+Lock_time: %{NUMBER:lock_time:float}s+Rows_sent: %{NUMBER:rows_sent:int}s+Rows_examined: %{NUMBER:rows_examined:int}s*(?:use %{DATA:database};s*)?SET timestamp=%{NUMBER:timestamp};s*(?<query>(?<action>w+)s+.*)" ]
    }
    grok {
    match => { "message" => "# Time: " }
    add_tag => [ "drop" ]
    tag_on_failure =>[]
    }
    if "drop" in [tags] {
    drop {}
    }
    date {
    match => [ "timestamp", "UNIX", "YYYY-MM-dd HH:mm:ss"]
    remove_field => [ "timestamp" ]
    }

    }
    output {
    elasticsearch {
    hosts => "192.168.2.105:9200"
    manage_template => false
    index => "%{[@metadata][beat]}-%{[type]}-%{+YYYY.MM.dd}"
    document_type => "%{[@metadata][type]}"

    第三步 启动filebeat 启动命令./filebeat -e -c filebeat.yml -d "publish"
    编辑filebeat.yml,比较重要的是type为log,enabled必须为true,document_type: mysqlslowquerylog
    -- filebeat.prospectors安装下面的进行配置
    filebeat.prospectors:

    # Each - is a prospector. Most options can be set at the prospector level, so
    # you can use different prospectors for various configurations.
    # Below are the prospector specific configurations.

    - type: log

    # Change to true to enable this prospector configuration.
    enabled: true

    # Paths that should be crawled and fetched. Glob based paths.
    paths:
    - /var/lib/mysql/*.log
    #- c:programdataelasticsearchlogs*
    document_type: mysqlslowquerylog
    # Exclude lines. A list of regular expressions to match. It drops the lines that are
    # matching any regular expression from the list.
    #exclude_lines: ['^DBG']

    # Include lines. A list of regular expressions to match. It exports the lines that are
    # matching any regular expression from the list.
    #include_lines: ['^ERR', '^WARN']

    # Exclude files. A list of regular expressions to match. Filebeat drops the files that
    # are matching any regular expression from the list. By default, no files are dropped.
    #exclude_files: ['.gz$']

    # Optional additional fields. These fields can be freely picked
    # to add additional information to the crawled log files for filtering
    #fields:
    # level: debug
    # review: 1

    ### Multiline options

    # Mutiline can be used for log messages spanning multiple lines. This is common
    # for Java Stack Traces or C-Line Continuation

    # The regexp Pattern that has to be matched. The example pattern matches all lines starting with [
    #multiline.pattern: ^[

    multiline.pattern: "^# User@Host: "

    # Defines if the pattern set under pattern should be negated or not. Default is false.
    multiline.negate: true

    # Match can be set to "after" or "before". It is used to define if lines should be append to a pattern
    # that was (not) matched before or after or as long as a pattern is not matched based on negate.
    # Note: After is the equivalent to previous and before is the equivalent to to next in Logstash
    multiline.match: after

    -- Output安装下面配置
    #================================ Outputs =====================================

    # Configure what output to use when sending the data collected by the beat.

    #-------------------------- Elasticsearch output ------------------------------
    #output.elasticsearch:
    # Array of hosts to connect to.
    # hosts: ["localhost:9200"]

    # Optional protocol and basic auth credentials.
    #protocol: "https"
    #username: "elastic"
    #password: "changeme"

    #----------------------------- Logstash output --------------------------------
    output.logstash:
    # The Logstash hosts
    hosts: ["192.168.2.105:5044"]


    # Optional SSL. By default is off.
    # List of root certificates for HTTPS server verifications
    #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"]

    # Certificate for SSL client authentication
    #ssl.certificate: "/etc/pki/client/cert.pem"

    # Client Certificate Key
    #ssl.key: "/etc/pki/client/cert.key"

    第四步 启动kibana,执行命令 ./kibana

    第五步 正常的使用浏览器192.168.2.105:5602进行访问,进行页面配置。

  • 相关阅读:
    【二分】
    【POJ】2456Aggressive cows
    【POJ】1064Cable master
    【动态规划】完全背包
    【DP】01背包
    【全排序】next_permutation
    【网络流】概念+EK算法+Dinic算法+Ford-Fulkerson算法
    【二分图】二分图的多重匹配
    web.xml配置SpringMVC时导致访问的页面资源不存在,跳转页面时出现404
    Struts2获得HttpServletRequest / HttpSession / ServletContext / HttpServletResponse对象
  • 原文地址:https://www.cnblogs.com/qcfeng/p/8846804.html
Copyright © 2011-2022 走看看