zoukankan      html  css  js  c++  java
  • ELK6+filebeat、kafka、zookeeper搭建文档

    系统:centos 6.5

    JDK:1.8

    Elasticsearch-6.0.0
    Logstash-6.0.0
    kibana-6.0.0
    zookeeper-3.5.3
    kafka_2.12-1.0.0
    filebeat-6.0.0

    wget https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-6.0.0-linux-x86_64.tar.gz
    wget http://mirrors.hust.edu.cn/apache/kafka/1.0.0/kafka_2.12-1.0.0.tgz
    wget http://mirror.bit.edu.cn/apache/zookeeper/zookeeper-3.5.3-beta/zookeeper-3.5.3-beta.tar.gz
    wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-6.0.0.tar.gz
    wget https://artifacts.elastic.co/downloads/kibana/kibana-6.0.0-linux-x86_64.tar.gz
    wget https://artifacts.elastic.co/downloads/logstash/logstash-6.0.0.tar.gz

    工作流程
    filebeat 收集日志out到kafka, kafka再out到logstash,logstash在out到elasticsearch,最后通过kafka展示到web页面

    filebeat 节点配置文件

    [root@centos199 filebeat-6.0.0-linux-x86_64]# cat filebeat.yml
    filebeat.prospectors:
    - input_type: log
    paths:
    - /home/test/backup/mysql-*.log
    document_type: mysql
    tail_files: true
    multiline.pattern: ^[[0-9]{4}-[0-9]{2}-[0-9]{2}

    multiline.negate: true
    multiline.match: after

    output.kafka:
    hosts: ["192.168.1.99:9092"]
    topic: guo
    partition.round_robin:
    reachable_only: false

    required_acks: 1
    compression: gzip
    max_message_bytes: 1000000

    启动filebeat   nohup ./filebeat -c filebeat.yml

    kafka 配置文件

    [root@centos199 config]# cat server.properties |grep -E -v "^#|^$"
    broker.id=0   如果是两个kafka,另一台的id改为其他就行了
    port = 9092
    host.name = 192.168.1.99
    num.network.threads=3
    num.io.threads=8
    socket.send.buffer.bytes=102400
    socket.receive.buffer.bytes=102400
    socket.request.max.bytes=104857600
    log.dirs=/tmp/kafka-logs
    num.partitions=1
    num.recovery.threads.per.data.dir=1
    offsets.topic.replication.factor=1
    transaction.state.log.replication.factor=1
    transaction.state.log.min.isr=1
    log.retention.hours=168
    log.segment.bytes=1073741824
    log.retention.check.interval.ms=300000
    zookeeper.connect=192.168.1.99:2181
    zookeeper.connection.timeout.ms=6000
    group.initial.rebalance.delay.ms=0

    启动kafka ./bin/kafka-server-start.sh -daemon config/server.properties

    创建消息主题:./bin/kafka-topics.sh --create --zookeeper 192.168.1.99:2181 --replication-factor 1 --partitions 2 --topic ecplogs
    输出测试消息生产与消费:./bin/kafka-console-consumer.sh --zookeeper 192.168.1.99:2181 --topic ecplogs --from-beginning
    写入测试消息:./bin/kafka-console-producer.sh --broker-list 192.168.1.99:9092 --topic ecplogs

    zookeeper 配置文件

    [root@centos199 conf]# cat zoo.cfg
    tickTime=2000
    initLimit=10
    syncLimit=5
    dataDir=/home/test/zookeeper
    clientPort=2181

    启动zookeeper  ./bin/zkServer.sh start

    logstash 配置文件

    [root@centos199 config]# cat logstash.conf
    input {
    kafka {
    bootstrap_servers => "192.168.1.99:9092"
    topics => ["guo"]
    codec => "json"
    }
    }
    output {
    elasticsearch {
    hosts => ["192.168.1.99:9200"]
    index => "mysql-%{+YYYY.MM.dd}"
    template_overwrite => true
    }
    }

    启动logstash  ./bin/logstash -f conf/logstash.conf

    elasticsearch 配置文件

    [root@centos199 config]# cat elasticsearch.yml |grep -E -v "^#|^$"
    path.data: /home/test/elk/elastic/data
    path.logs: /home/test/elk/elastic/logs
    bootstrap.memory_lock: false
    bootstrap.system_call_filter: false
    network.host: 192.168.1.99
    http.port: 9200

    [elk@centos199 elasticsearch-6.0.0]$./bin/elasticsearch -d

    kibana 配置文件

    [root@centos199 config]# cat kibana.yml |grep -E -v "^#|^$"
    server.port: 5601
    server.host: "192.168.1.99"
    elasticsearch.url: "http://192.168.1.99:9200"

    nohup ./bin/kibana &

  • 相关阅读:
    hdu 母牛的故事 递推题
    并查集
    又是矩阵 Uva上的一道 经典题目
    poj 3233 矩阵幂取模
    electronvue + elementui构建桌面应用
    主板cmos 映射表
    高级配置与电源接口 acpi 简介
    警告不能读取 AppletViewer 属性文件的解决方法
    高级 Synth(转载)
    vbs 查看硬件信息代码
  • 原文地址:https://www.cnblogs.com/haoge92/p/8659224.html
Copyright © 2011-2022 走看看