这个过程中,主要用logstash处理数据的时候不好处理。
在logstash-sample.conf这个配置文件中的配置,我用这个监控filebeat的5044端口
# Sample Logstash configuration for creating a simple
# Beats -> Logstash -> Elasticsearch pipeline.
input {
beats {
port => 5044
}
}
filter {
mutate {
gsub => ["message","#"," "] //这个是把#号全部过滤掉
split => ["message"," "] //这个用,分割每个数据
}
mutate {
add_field => {
"website" => "%{[message][10]}"
"websiteGroup" => "%{[message][10]}"
"route" => "%{[message][8]}"
"rrData" => "%{[message][14]}"
"createTime" => "%{[message][0]} %{[message][1]} " //可以这样组合数据
}
}
mutate {
gsub => ["rrData", "[())]", ""] //单独处理一个数据的格式
gsub => ["route", ":", ""] //单独处理一个数据的格式
}
date {
match => ["createTime","dd-MMM-yyyy HH:mm:sss.SSS ","yyyy-MM-dd'T'HH:mm:ss.SSS"] //处理时间的格式
target => "createTime"
locale => "cn" //时区
}
ruby
{
code => "event.set('createTime', event.get('createTime').time.localtime + 8*60*60)" //系统默认时间会少8个小时,添加时间
}
}
output {
elasticsearch {
#action => "update"
hosts => ["esip地址:9200","esip地址:9200","esip地址:9200"]
index => "log_band_dns"
user => "es"
password => "es用户的密码"
}
}
完事后直接启动就可以在es的可视化界面kibana上面看到这个数据已经是处理过的了