[root@log-node1 ~]# cobbler repo add --name=logstash-2.3 --mirror=http://packages.elastic.co/logstash/2.3/centos --arch=x86_64 --breed=yum
[root@log-node1 ~]# cobbler repo add --name=elasticsearch2 --mirror=http://packages.elastic.co/ela ... entos --arch=x86_64 --breed=yum
[root@log-node1 ~]# cobbler repo add --name=kibana4.5 --mirror=http://packages.elastic.co/kibana/4.5/centos --arch=x86_64 --breed=yum
[root@log-node1 ~]# cobbler reposync
[root@node1 /etc/elasticsearch]# grep '^[a-Z]' elasticsearch.yml
cluster.name: myes
node.name: node1
path.data: /data/es-data
path.logs: /var/log/elasticsearch
bootstrap.memory_lock: true
network.host: 192.168.3.3
http.port: 9200
[root@node1 /etc/elasticsearch]# curl -i -XGET 'http://192.168.3.3:9200/_count?';echo
HTTP/1.1 200 OK
Content-Type: application/json; charset=UTF-8
Content-Length: 59
{"count":0,"_shards":{"total":0,"successful":0,"failed":0}}
[root@node1 /etc/elasticsearch]# /usr/share/elasticsearch/bin/plugin install mobz/elasticsearch-head
http://192.168.3.3:9200/_plugin/head/
这样访问
/usr/share/elasticsearch/bin/plugin install lukas-vlcek/bigdesk
上github上面搜索插件
然后直接安装
/usr/share/elasticsearch/bin/plugin install lukas-vlcek/bigdesk
发现模式改成单播
只改node2,node1不改,只要有一个知道就可以了
discovery.zen.ping.unicast.hosts: ["192.168.3.3", "192.168.3.4"]
https://www.elastic.co/learn
[root@node1 /data]# curl http://192.168.3.3:9200/_cluster/health?pretty=true
{
"cluster_name" : "myes",
"status" : "green",
"timed_out" : false,
"number_of_nodes" : 2,
"number_of_data_nodes" : 2,
"active_primary_shards" : 7,
"active_shards" : 14,
"relocating_shards" : 0,
"initializing_shards" : 0,
"unassigned_shards" : 0,
"delayed_unassigned_shards" : 0,
"number_of_pending_tasks" : 0,
"number_of_in_flight_fetch" : 0,
"task_max_waiting_in_queue_millis" : 0,
"active_shards_percent_as_number" : 100.0
}
[root@node2 elasticsearch]# /opt/logstash/bin/logstash -e 'input{ stdin{} } output{ stdout{} }'
[root@node2 elasticsearch]# /opt/logstash/bin/logstash -e 'input{ stdin{} } output{ stdout{ codec => rubydebug } }'
Settings: Default pipeline workers: 4
Pipeline main started
hello world
{
"message" => "hello world",
"@version" => "1",
"@timestamp" => "2017-01-28T11:06:23.310Z",
"host" => "node2.com"
}
/opt/logstash/bin/logstash -e 'input{ stdin{} } output{ elasticsearch { hosts => ["192.168.3.3:9200"] index => "logstash-%{+YYYY.MM.dd}" } }'
[root@node1 ~]# cat /etc/logstash/conf.d/demo.conf
input {
stdin{}
}
filter{
}
output{
elasticsearch {
hosts => ["192.168.3.3:9200"]
index => "logstash-%{+YYYY.MM.dd}"
}
stdout {
codec => rubydebug
}
}
收集系统日志rsyslog es
file es
tcp es
1,行 - 事件
2,input output
3, 事件 - input - codec - filter - codec - output
https://es.xiaoleilu.com/
/opt/logstash/bin/logstash -f /etc/logstash/conf.d/demo.conf
[root@node1 /opt/kibana/config]# egrep -v "#|^$" kibana.yml
server.port: 5601
server.host: "0.0.0.0"
elasticsearch.url: "http://192.168.3.3:9200"
kibana.index: ".kibana"
[root@node1 /opt/kibana/config]# /etc/init.d/kibana start
/var/log/elasticsearch/myes.log
input {
file {
path => ["/var/log/messages","/var/log/secure"]
type => "system-log"
start_position => "beginning"
}
file {
path => "/var/log/elasticsearch/myes.log"
type => "es-log"
start_position => "beginning"
}
file {
path => "/var/log/elasticsearch/myes.log.2017-01-27"
type => "es1-log"
start_position => "beginning"
}
}
filter{
}
output{
if [type] == "system-log" {
elasticsearch {
hosts => ["192.168.3.3:9200"]
index => "system-log-%{+YYYY.MM}"
}
}
if [type] == "es-log" {
elasticsearch {
hosts => ["192.168.3.3:9200"]
index => "es-log-%{+YYYY.MM}"
}
}
if [type] == "es1-log" {
elasticsearch {
hosts => ["192.168.3.3:9200"]
index => "es1-log-%{+YYYY.MM}"
}
}
}
[root@node1 ~]# for i in `ls .since*`; do echo $i;cat $i; done
.sincedb_1fb922e15ccea4ac0d028d33639ba3ea
86446130 0 64768 54548
86446131 0 64768 924
.sincedb_2a52db197011b7a611fb7594c513ff67
0 0 0
.sincedb_a9b9fed7edff6fd888ffe131a05b5397
210651098 0 64768 4520
210651086 0 64768 4973
.sincedb_b5712b028c2d902c97f521ccf91d1ea8
210651087 0 64768 10086
.sincedb_ec411afaed82c6e15509db4e6d8d51e3
[root@node1 ~]# ls -li /var/log/messages
86446130 -rw------- 1 root root 58431 Feb 3 06:57 /var/log/messages
[root@node1 ~]# ls -li /var/log/elasticsearch/myes.log.2017-01-27
210651087 -rw-r--r-- 1 elasticsearch elasticsearch 10086 Feb 3 06:39 /var/log/elasticsearch/myes.log.2017-01-27
[root@node1 ~]# rm -f .sincedb_*
[root@node1 ~]# pwd
/root
[2017-01-27 23:53:54,741][INFO ][plugins ] [node1] modules [reindex, lang-expression, lang-groovy], plugins [], sites []
[2017-01-27 23:53:54,762][ERROR][bootstrap ] Exception
java.lang.IllegalStateException: Failed to created node environment
at org.elasticsearch.node.Node.<init>(Node.java:167)
at org.elasticsearch.node.Node.<init>(Node.java:140)
... 5 more
[2017-01-27 23:56:29,132][INFO ][node ] [node1] version[2.3.5], pid[6215], build[90f439f/2016-07-27T10:36:52Z]
[2017-01-27 23:56:29,133][INFO ][node ] [node1] initializing ...
[2017-01-27 23:56:30,066][INFO ][plugins ] [node1] modules [reindex, lang-expression, lang-groovy], plugins [head], sites [
多行匹配
file {
path => "/var/log/elasticsearch/myes.log.2017-01-27"
type => "es1-log"
start_position => "beginning"
codec => multiline {
pattern => "^["
negate => true
what => "previous"
}
}
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
1 nginx 日志改成json格式
2 文件直接收取。 redis, python脚本读取redis,写成json,写入es
[root@node2 logstash]# cat /var/lib/logstash/.sincedb_0ba90fec979d14f3e8e5ab1191218736
68231552 0 64768 202989
http://192.168.3.3:5601/app/kibana#/discover?_g=(refreshInterval:(display:Off,pause:!f,value:0),time:(from:now-30d,mode:quick,to:now))&_a=(columns:!(_index,host,http_code),index:%5Bnginx-access-log-%5DYYYY.MM.DD,interval:auto,query:(query_string:(analyze_wildcard:!t,query:'http_code:404')),sort:!('@timestamp',desc),uiState:())
http_code:404