启动多个进程收集日志。直接output到kafka,output到不同的topic
cat filebeat.yml
filebeat.prospectors:
- input_type: log
paths:
- /home/test/job/logs/job-20*.log
document_type: job
tail_files: true
multiline.pattern: ^[[0-9]{4}-[0-9]{2}-[0-9]{2}
multiline.negate: true
multiline.match: after
output.kafka:
hosts: ["192.168.1.99:9092"]
topic: job
partition.round_robin:
reachable_only: false
required_acks: 1
compression: gzip
max_message_bytes: 1000000
通过logstash节点会实时去kafka broker集群拉数据,转ES
[root@centos199 config]# cat job.conf
input {
kafka {
bootstrap_servers => "192.168.1.99:9092"
topics => ["job"]
codec => "json"
}
}
output {
elasticsearch {
hosts => ["192.168.1.99:9200"]
index => "job-%{+YYYY.MM.dd}"
template_overwrite => true
}
}