zoukankan      html  css  js  c++  java
  • ELK搭建

    环境准备

    #https://www.elastic.co
    1.安装java环境(java环境必须是1.8版本以上的)
    wget http://download.oracle.com/otn-pub/java/jdk/8u131-b11/d54c1d3a095b4ff2b6607d096fa80163/jdk-8u131-linux-x64.rpm
    rpm -ivh jdk-8u131-linux-x64.rpm
    验证java是否安装成功
    java --version
    
    ------------------------------------------------
    手动安装JDK
    export JAVA_HOME=/usr/java/jdk
    export JRE_HOME=$JAVA_HOME/jre
    export CLASSPATH=.:$JAVA_HOME/LIB:$JRE_HOME/LIB:$CLASSPATH
    export PATH=$JAVA_HOME/bin:$JRE_HOME/bin:$PATH
    -----------------------------------------------
    
    vi /etc/security/limits.conf
    * soft nofile 655350
    * hard nofile 655350
    root             soft    nproc           65535
    root             hard    nproc           65535
    

    安装elasticsearch

    2.安装elasticsearch
    yum install -y elasticsearch
    新建数据存放目录
    mkdir /db/es-data
    mkdir /db/es-log
    chown -R elasticsearch:elasticsearch /var/log/elasticsearch/
    
    java内存调优
    # 官方文档建议配置在物理内存的一半,最大32G
    # vi /usr/lib/systemd/system/elasticsearch.service修改内存限制
    LimitMEMLOCK=INFINITY 
    vim /etc/elasticsearch/jvm.options
    -Xms1g
    -Xmx1g

    修改elasticsearch配置文件:vim /etc/elasticsearch/elasticsearch.yml
    找到配置文件中的cluster.name,打开该配置并设置集群名称
    cluster.name: elk-cluster1
    
    找到配置文件中的node.name,打开该配置并设置节点名称
    node.name: elk-node1
    
    修改data存放的路径
    path.data: /db/es-data
    
    修改logs日志的路径
    path.logs: /db/es-log
    
    配置内存使用用交换分区,数据多的时候使用
    bootstrap.memory_lock: true
    
    监听的网络地址
    network.host: 0.0.0.0
    
    开启监听的端口(9200是客户端访问的,9300)
    http.port: 9200
    
    #组播功能
    discovery.zen.ping.unicast.hosts: ["192.168.168.33", "192.168.168.35"]
    
    增加新的参数,这样head插件可以访问es (5.x版本,如果没有可以自己手动加)
    http.cors.enabled: true
    http.cors.allow-origin: "*"
    启动elasticsearch服务
    systemctl start elasticsearch
    ps -ef | grep java
    # 可能启动不了,去看messages或者/db/es-log里的日志:1.data目录权限问题;2.java内存问题
    

    访问

    浏览器访问:
    http://192.168.168.33:9200
    {
      "name" : "test-1",
      "cluster_name" : "elk-cluster1",
      "cluster_uuid" : "XHgt_IOKTfaV0c0ky41kfA",
      "version" : {
        "number" : "6.1.0",
        "build_hash" : "c0c1ba0",
        "build_date" : "2017-12-12T12:32:54.550Z",
        "build_snapshot" : false,
        "lucene_version" : "7.1.0",
        "minimum_wire_compatibility_version" : "5.6.0",
        "minimum_index_compatibility_version" : "5.0.0"
      },
      "tagline" : "You Know, for Search"
    }
    

    客户端修改elasticsearch配置文件

    客户端节点配置:
    将服务端elasticsearch.yml传输过去,修改:
    node.name elk-node2
    
    目录权限修改后,启动elasticsearch服务
    

    安装插件elasticsearch-head

    https://github.com/mobz/elasticsearch-head

    # kopf插件很久不更新了
    #使用git安装elasticsearch-head ;还有一种就是通过docker来启动
        # yum install -y npm
        # git clone git://github.com/mobz/elasticsearch-head.git
        # cd elasticsearch-head
        # npm install grunt-save  #可以直接npm install
        # ll node_modules/grunt 确认生成这个文件
        # npm install
        # npm run start &
        检查端口是否起来
        netstat -antp |grep 9100
        浏览器访问:http://192.168.168.33:9100
        #链接 一个节点后,星形代表主节点,圆形代表节点;集群健康值:green代表正常,黄色副节点数据丢失无法使用,红色节点出现问题
    
    报错记录,升级node
    wget https://nodejs.org/dist/v12.18.3/node-v12.18.3-linux-x64.tar.xz
    tar xvf node-v12.18.3-linux-x64.tar.xz
    加PATH路径:vi /etc/profile
    export NODE_HOME=/usr/local/src/node-v12.18.3-linux-x64
    export PATH=$NODE_HOME/bin:$PATH
    source /etc/profile
    [root@izuf6a63lixro2bleuwvzwz elasticsearch
    -head]# npm install npm ERR! Linux 3.10.0-514.26.2.el7.x86_64 npm ERR! argv "/usr/bin/node" "/bin/npm" "install" "grunt-save" npm ERR! node v6.17.1 npm ERR! npm v3.10.10 npm ERR! code E404 npm ERR! 404 Not found : grunt-save npm ERR! 404 npm ERR! 404 'grunt-save' is not in the npm registry. npm ERR! 404 You should bug the author to publish it (or use the name yourself!) npm ERR! 404 npm ERR! 404 Note that you can also install from a npm ERR! 404 tarball, folder, http url, or git url. npm ERR! Please include the following file with any support request: npm ERR! /usr/local/src/elasticsearch-head/npm-debug.log

    监控elasticsearch状态

    curl -sXGET http://192.168.168.33:9200/_cluster/health?pretty=true
    {
      "cluster_name" : "elk-cluster1",
      "status" : "yellow",
      "timed_out" : false,
      "number_of_nodes" : 1,
      "number_of_data_nodes" : 1,
      "active_primary_shards" : 5,
      "active_shards" : 5,
      "relocating_shards" : 0,
      "initializing_shards" : 0,
      "unassigned_shards" : 5,
      "delayed_unassigned_shards" : 0,
      "number_of_pending_tasks" : 0,
      "number_of_in_flight_fetch" : 0,
      "task_max_waiting_in_queue_millis" : 0,
      "active_shards_percent_as_number" : 50.0
    #监控脚本
    #状态监控,根据返回看看是100还是50做zabbix监控 #!/usr/bin/env python import subprocess #import smtplib #from email.mime.text import MIMEText #from email.utils import formataddr body="" false="false" obj = subprocess.Popen(("curl -sXGET http://192.168.168.33:9200/_cluster/health?pretty=true"),shell=True,stdout=subprocess.PIPE) data = obj.stdout.read() data1 = eval(data) #print (data1) status = data1.get("status") #print (status) if status == "green": print(50) else: print(100)

    Logstash安装

    yum install logstash
    logstash -f nginx_access.conf -t 配置文件 #测试配置是否正常
    /usr/share/logstash/bin/logstash -e 'input { stdin {type => stdin }} output{ stdout {codec => "rubydebug"}}'
    logstash -e 'input { stdin { } } output { stdout {codec => rubydebug} }'
    
    # 修改vi /etc/logstash/logstash.yml数据和日志文件放置位置
    path.data: /db/ls-data
    path.config: /etc/logstash/conf.d/*.conf
    path.logs: /db/ls-log
    
    #定义logstash监控文件,如果是系统文件,需要将该文件的属组改为logstash
    vi /etc/logstash/conf.d/messages.conf
    input {
      file{
        path => "/var/log/messages"
        start =>"beginning"  # 第一次安装,是否从头开始
        type => "systemlog-222.55"
        start_interval => "5"
      }
    }
    
    output {
      elasticsearch {
        hosts => ["192.168.168.33:9200"]
        index => "logstash-messages-22255--%{+YYYY.MM.dd}"
      }
      # 保存日志到文件,可以不要
      file {
        path => "/tmp/123.txt"
      }
    }

    kibana安装 # 修改配置文件 vi /etc/kibana/kibana.yml server.port: 5601 server.host: "127.0.0.1" elasticsearch.url: "http://localhost:9200" 修改完成后页面访问:http://192.168.168.33:5601/

    nginx安装(选)

    安装nginx
    yum install nginx httpd-tools -y
    增加登录密码
    htpasswd -bc /usr/local/nginx/etc/htpass.txt kibana 123456
    chown nginx.nginx /usr/local/nginx/etc/htpass.txt 
    
    增加加配置:
    server {
            listen 80;
            server_name www.kibana33.com;
            auth_basic "Restricted Access";
            auth_basic_user_file /usr/local/nginx/htpasss.txt
    
            location / {
                proxy_pass http://127.0.0.1:5601;
                proxy_http_version 1.1;
                proxy_set_header Upgrade $http_upgrade;
                proxy_set_header Connection 'upgrade';
                proxy_set_header Host $host;
                proxy_cache_bypass $http_upgrade;
            }
    }

    Logstash模板

    cd /etc/logstash/conf.d/nginx.conf
    input {
        file {
            path => "/var/log/messages"
            type => "system"
            start_position => "beginning"
        }   
    
        file {
            path => "/var/log/secure"
            type => "secure"
            start_position => "beginning"
        }   
    
        file {
            path => "/var/log/httpd/access_log"
            type => "http"
            start_position => "beginning"
        }   
    
        file {
            path => "/usr/local/nginx/logs/elk.access.log"
            type => "nginx"
            start_position => "beginning"
        }   
        
        file {
            path => "/var/log/mysql/mysql.slow.log"
            type => "mysql"
            start_position => "beginning"   
        codec => multiline {
                pattern => "^# User@Host:"
                negate => true
                what => "previous"
            }
        }
    }
    
    filter {
     
        grok {
            match => { "message" => "SELECT SLEEP" }
                add_tag => [ "sleep_drop" ]
                tag_on_failure => []
        }
     
     
        if "sleep_drop" in [tags] {
                drop {}
        }
        
        grok {
            match => { "message" => "(?m)^# User@Host: %{USER:User}[[^]]+] @ (?:(?<clienthost>S*) )?[(?:%{IP:Client_IP})?]s.*# Query_time: %{NUMBER:Query_Time:float}s+Lock_time: %{NUMBER:Lock_Time:float}s+Rows_sent: %{NUMBER:Rows_Sent:int}s+Rows_examined: %{NUMBER:Rows_Examined:int}s*(?:use %{DATA:Database};s*)?SET timestamp=%{NUMBER:timestamp};s*(?<Query>(?<Action>w+)s+.*)
    # Time:.*$" }
            }
     
         date {
                match => [ "timestamp", "UNIX" ]
                remove_field => [ "timestamp" ]
        }
     
     
    }
    
    
       
    output {
    
        if [type] == "system" { 
    
            elasticsearch {
                hosts => ["192.168.168.33:9200"]
                index => "nagios-system-%{+YYYY.MM.dd}"
            }       
        }   
    
        if [type] == "secure" {
    
            elasticsearch {
                hosts => ["192.168.168.33:9200"]
                index => "nagios-secure-%{+YYYY.MM.dd}"
            }
        }
    
        if [type] == "http" {
    
            elasticsearch {
                hosts => ["192.168.168.33:9200"]
                index => "nagios-http-%{+YYYY.MM.dd}"
            }
        }
    
        if [type] == "nginx" {
    
            elasticsearch {
                hosts => ["192.168.168.33:9200"]
                index => "nagios-nginx-%{+YYYY.MM.dd}"
            }
        }
        
        if [type] == "mysql" {
    
            elasticsearch {
                hosts => ["192.168.168.33:9200"]
                index => "nagios-mysql-slow-%{+YYYY.MM.dd}"
            }
        }
    }

    将需要保留时间较长的重要日志写入数据库

    日志写入数据库
    logstash-output-jdbc安装
    git clone https://github.com/theangryangel/logstash-output-jdbc
    cd logstash-output-jdbc
    /usr/share/logstash/bin/logstash-plugin install logstash-output-jdbc
    安装完成:
    Validating logstash-output-jdbc
    Installing logstash-output-jdbc
    Installation successful
    查看是否安装命令:/usr/share/logstash/bin/logstash-plugin list | grep jdbc
    
    安装mysql并创建数据库
    
    #下载数据库的JDBC驱动-https://dev.mysql.com/downloads/connector/j/
    # 各类版本下载:https://mvnrepository.com/artifact/mysql/mysql-connector-java
    mkdir /usr/share/logstash/vendor/jar/jdbc
    cp mysql-connector-java.jar /usr/share/logstash/vendor/jar/jdbc/
    chown -R logstash.logstash /usr/share/logstash/vendor/jar
    
    
    在数据库中新建对应表nginx_log:字段就是(host,status,client,recordtime,url,responsetime)
    jdbc{
        connection_string => "jdbc:mysql://192.168.168.33/elk?user=elk&password=elk@123&useUnicode=true&characterEncoding=UTF8"
        statement => ["insert into nginx_log(host,status,client,recordtime,url,responsetime) VALUES(?,?,?,?,?,?)", "host","status","client","recordtime","url","responsetime"]
    }
    
    input {
      file {
        path => "/var/log/elk.access.log"
        type => "nginx-access-log-65"
        start_position =>"beginning"
        stat_interval => "2"
      }
    }
    
    output {
      elasticsearch {
        hosts => ["192.168.168.33:9200"]
        index => "logstash-nginx-accesslog-65-%{+YY.MM.dd}"
      }
      jdbc{
        connection_string => "jdbc:mysql://192.168.168.33/elk?user=elk&password=elk@123&useUnicode=true&characterEncoding=UTF8"
        statement => ["insert into nginx_log(host,status,client,time,url,responsetime) VALUES(?,?,?,?,?,?)", "host","status","client","time","url","responsetime"]
      }
    }
    
    
    systemctl restart logstash
    # tcp日志收集
    input { tcp { port
    => "900" type => "tcplog" } } output { elasticsearch { hosts => "[192.168.168.33:9200]" index => "tcplog-65-%{+YYYY.MM.dd}" } }

     windows客户端配置

    下载jdk-8u261-windows-x64.exe
    以及logstash-7.8.1.zip
    安装jdk并加入环境变量
    
    配置:(结果不太清楚)
    
    input {
      file {
        path => "D:/Lova/AgentHost/log/Warning_LovaAgentD.exe_2020-08-18.log"
        type => "lova warning"
      }
    }
    
    output {
      elasticsearch {
        hosts => "192.168.168.33:9200"
        index => "erp_warning_%{index_day}"
      }
    }
    
    放置在logstash/bin目录下
    测试配置文件:
    cmd c:logstashin
    logstash.bat -f test.conf -t
    如果没问题运行:
    logstash.bat -f test.conf
    
    服务端也添加test.conf
    然后查看

      

  • 相关阅读:
    国外摄影网站
    网络基础之子网划分
    Java-多线程第三篇3种创建的线程方式、线程的生命周期、线程控制、线程同步、线程通信
    Java-多线程第二篇多线程相关认识(2)
    设计模式-第八篇之桥接模式
    设计模式-第七篇之门面模式
    设计模式-第六篇之策略模式
    设计模式-第五篇之命令模式
    设计模式-第四篇之代理模式
    设计模式-第九篇之观察者模式
  • 原文地址:https://www.cnblogs.com/yangmeichong/p/13513778.html
Copyright © 2011-2022 走看看