基于logstash+elasticsearch+kibana的日志收集分析方案

文档

参考文档

https://www.pianshen.com/article/6940303218/

https://www.cnblogs.com/kingleft/p/7682678.html

https://www.cnblogs.com/garfieldcgf/p/8325723.html

https://www.linuxidc.com/Linux/2018-11/155518.htm

https://www.elastic.co/cn/downloads/elasticsearch

https://www.elastic.co/cn/downloads/logstash

https://www.elastic.co/cn/downloads/kibana

https://www.cnblogs.com/zhuwenjoyce/category/1433145.html

logstash

https://www.cnblogs.com/xiaobaozi-95/p/9214307.html

https://www.cnblogs.com/zhangan/p/11395056.html

http://grokdebug.herokuapp.com/

下载地址

elasticsearch: https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-6.8.13.tar.gz

kibana: https://artifacts.elastic.co/downloads/kibana/kibana-6.8.13-linux-x86_64.tar.gz

logstash: https://artifacts.elastic.co/downloads/logstash/logstash-6.8.13.tar.gz

安装前准备

 1### 开放防火墙端口
 2###### elasticsearch 端口
 3firewall-cmd --zone=public --add-port=9200/tcp --permanent
 4###### kibana 端口
 5firewall-cmd --zone=public --add-port=5601/tcp --permanent
 6# 配置立即生效
 7firewall-cmd --reload
 8
 9### 创建用户(elasticsearch、kibana 必须以非 root 用户运行,也可以使用系统已有的用户)
10[root@localhost elk]# useradd wisentsoft -g wisentsoft
11[root@localhost elk]# passwd wisentsoft

elasticsearch 安装

 1### 解压
 2[root@localhost elk]# tar -zxf elasticsearch-6.8.13.tar.gz
 3
 4### 配置 elasticsearch(编辑 elasticsearch 安装目录下的 config/elasticsearch.yml 文件),修改内容如下:
 5[root@localhost elk]# vim elasticsearch/config/elasticsearch.yml
 6### 内网访问
 7network.host: 0.0.0.0
 8
 9### 改变目录权限
10[root@localhost elk]# chown -R wisentsoft:wisentsoft elasticsearch
11
12### 调整到非 root 用户
13[root@localhost elk]# su - wisentsoft
14
15### elasticsearch 启动
16[wisentsoft@localhost ~]$ /opt/soft/elk/elasticsearch/bin/elasticsearch -d

在浏览器中访问: http://localhost:9200/ 出现如下结果表示 elasticsearch 启动成功:

 1{
 2  "name" : "2H7XjnT",
 3  "cluster_name" : "elasticsearch",
 4  "cluster_uuid" : "turFhpzIQc6fU4Fyc4C0lQ",
 5  "version" : {
 6    "number" : "6.8.13",
 7    "build_flavor" : "default",
 8    "build_type" : "tar",
 9    "build_hash" : "be13c69",
10    "build_date" : "2020-10-16T09:09:46.555371Z",
11    "build_snapshot" : false,
12    "lucene_version" : "7.7.3",
13    "minimum_wire_compatibility_version" : "5.6.0",
14    "minimum_index_compatibility_version" : "5.0.0"
15  },
16  "tagline" : "You Know, for Search"
17}

elasticsearch 常用命令地址:

查询所有数据:curl http://localhost:9200/_search?pretty

集群健康状态:curl -X GET http://localhost:9200/_cluster/health?pretty

删除所有数据:curl -X DELETE ‘http://localhost:9200/_all’

删除指定索引:curl -X DELETE ‘http://localhost:9200/索引名称’

常见问题

启动出现错误

max file descriptors [4096] for elasticsearch process is too low, increase to at least [65535]

切换到root用户,编辑 /etc/security/limits.conf,在最后面追加下面内容(wisentsoft表示启动elasticsearch服务的用户名):

wisentsoft hard nofile 65536

wisentsoft soft nofile 65536

此文件修改后需要用户重新登录才会生效

启动出现错误

max virtual memory areas vm.max_map_count [65530] is too low, increase to at least [262144]

切换到root用户,编辑 /etc/sysctl.conf,在最后面追加下面内容:

vm.max_map_count=655360

执行 sysctl -p 使之生效

kibana 安装

 1### 解压
 2[root@localhost elk]# tar -zxf kibana-6.8.13-linux-x86_64.tar.gz
 3
 4### 配置 kibana(编辑 kibana 安装目录下的 config/kibana.yml 文件),修改内容如下:
 5[root@localhost elk]# vim kibana/config/kibana.yml
 6### 内网访问
 7server.host: "0.0.0.0"
 8### 指定 elasticsearch 的地址(如果是 elasticsearch 是安装在本机,并且端口没有被更改,则不需要修改)
 9### elasticsearch.hosts: ["http://localhost:9200"]
10
11### 改变目录权限
12[root@localhost elk]# chown -R wisentsoft:wisentsoft kibana
13
14### 调整到非 root 用户
15[root@localhost elk]# su - wisentsoft
16
17### kibana 启动
18[wisentsoft@localhost ~]$ nohup /opt/soft/elk/kibana/bin/kibana >/dev/null 2>&1 &

浏览器访问:http://localhost:5601 即可

logstash 安装

  1### 解压
  2[root@localhost elk]# tar -zxf logstash-6.8.13.tar.gz
  3
  4### 改变目录权限
  5[root@localhost elk]# chown -R wisentsoft:wisentsoft logstash
  6
  7### 调整到非 root 用户
  8[root@localhost elk]# su - wisentsoft
  9
 10### 测试 logstash
 11[wisentsoft@localhost ~]$ /opt/soft/elk/logstash/bin/logstash -e 'input {stdin {}} output {stdout {}}'
 12
 13### logstash.conf 文件案例:
 14[wisentsoft@localhost config]$ vim /opt/soft/elk/logstash/config/logstash.conf
 15input {
 16    file {
 17        path => ["/opt/jygz/idc/logs/logFile.log"]
 18        type => "idc"
 19        codec => multiline {
 20            pattern =>"^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} \["
 21            negate => true
 22            what => "previous"
 23        }
 24    }
 25
 26    file {
 27        path => ["/opt/jygz/adc/logs/logFile.log"]
 28        type => "adc"
 29        codec => multiline {
 30            pattern =>"^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} \["
 31            negate => true
 32            what => "previous"
 33        }
 34    }
 35
 36    file {
 37        path => ["/opt/jygz/pps/logs/logFile.log"]
 38        type => "pps"
 39        codec => multiline {
 40            pattern =>"^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} \["
 41            negate => true
 42            what => "previous"
 43        }
 44    }
 45
 46    file {
 47        path => ["/opt/jygz/dsc/logs/logFile.log"]
 48        type => "dsc"
 49        codec => multiline {
 50            pattern =>"^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} \["
 51            negate => true
 52            what => "previous"
 53        }
 54    }
 55}
 56
 57filter {
 58    grok {
 59        match => { "message" => "%{TIMESTAMP_ISO8601:logtime} \[%{DATA:logthread}\] %{LOGLEVEL:loglevel} (?<logclass>[0-9a-zA-Z.$_]+):%{INT:logline} - %{GREEDYDATA:logcontent}"}
 60    }
 61
 62    date {
 63        locale => "zh_CN"
 64        target => "@timestamp"
 65        match => [ "logtime", "yyyy-MM-dd HH:mm:ss.SSS" ]
 66    }
 67}
 68
 69output {
 70    if[type] == "idc" {
 71        elasticsearch {
 72            hosts => ["http://localhost:9200"]
 73            index => "jygz-idc-%{+YYYY.MM.dd}"
 74        }
 75    }
 76
 77    if[type] == "adc" {
 78        elasticsearch {
 79            hosts => ["http://localhost:9200"]
 80            index => "jygz-adc-%{+YYYY.MM.dd}"
 81        }
 82    }
 83
 84    if[type] == "pps" {
 85        elasticsearch {
 86            hosts => ["http://localhost:9200"]
 87            index => "jygz-pps-%{+YYYY.MM.dd}"
 88        }
 89    }
 90
 91    if[type] == "dsc" {
 92        elasticsearch {
 93            hosts => ["http://localhost:9200"]
 94            index => "jygz-dsc-%{+YYYY.MM.dd}"
 95        }
 96    }
 97}
 98
 99### logstash 启动
100[wisentsoft@localhost ~]$ nohup /opt/soft/elk/logstash/bin/logstash -f /opt/soft/elk/logstash/config/logstash.conf >/dev/null 2>&1 &

案例

配置文件(test.conf):

 1input {
 2    stdin {
 3        codec => multiline {
 4            pattern =>"^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} \["
 5            negate => true
 6            what => "previous"
 7        }
 8    }
 9}
10filter {
11    grok {
12        match => { "message" => "%{TIMESTAMP_ISO8601:logtime} \[%{DATA:logthread}\] %{LOGLEVEL:loglevel} (?<logclass>[0-9a-zA-Z.$_]+):%{INT:logline} - %{GREEDYDATA:logcontent}"}
13    }
14    date {
15        locale => "zh_CN"
16        target => "@timestamp"
17        match => [ "logtime", "yyyy-MM-dd HH:mm:ss.SSS" ]
18    }
19}
20output {
21    stdout {}
22}

在终端中执行:

1[root@localhost test]# /opt/soft/elk/logstash/bin/logstash -f /opt/soft/elk/test/test.conf --path.data=/opt/soft/elk/test/tmp

输入如下日志内容:

 12021-01-05 02:50:45.143 [Thread-2] INFO c.c.i.s.CarPositionCurrentSaveService:53 - MYSQL开始批量更新车辆最新状态53条.(遍历2886条位置,耗时3毫秒)
 22021-01-05 02:50:45.203 [Thread-2] INFO c.c.i.s.CarPositionCurrentSaveService:65 - MYSQL完成批量更新车辆最新状态53条,耗时63毫秒
 32021-01-05 02:50:46.929 [RxNewThreadScheduler-5] ERROR c.i.c.write.events.WriteErrorEvent:58 - The error occurred during writing of data
 4com.influxdb.exceptions.InternalServerErrorException: unexpected error writing points to database: engine: cache-max-memory-size exceeded: (1073741891/1073741824)
 5	at com.influxdb.internal.AbstractRestClient.responseToError(AbstractRestClient.java:118)
 6	at com.influxdb.client.internal.AbstractWriteClient.toInfluxException(AbstractWriteClient.java:574)
 7	at com.influxdb.client.internal.AbstractWriteClient.lambda$new$12(AbstractWriteClient.java:181)
 8	at io.reactivex.internal.subscribers.LambdaSubscriber.onNext(LambdaSubscriber.java:65)
 92021-01-06 17:28:35.006 [scheduling-1] INFO c.c.i.s.CarPositionHistorySaveService:47 - InfluxDB批量保存车辆历史状态线程-7,缓存大小0,共入库0条
102021-01-06 17:28:35.006 [scheduling-1] INFO c.c.i.s.CarPositionHistorySaveService:47 - InfluxDB批量保存车辆历史状态线程-8,缓存大小0,共入库0条
112021-01-06 17:28:35.619 [main] INFO o.s.b.a.l.ConditionEvaluationReportLoggingListener:142 - rror starting ApplicationContext. To display the conditions report re-run your application with 'debug' enabled.
122021-01-06 17:28:35.628 [main] ERROR o.s.boot.SpringApplication:858 - Application run failed
13java.lang.IllegalStateException: Failed to execute ApplicationRunner
14	at org.springframework.boot.SpringApplication.callRunner(SpringApplication.java:807)
15	at org.springframework.boot.SpringApplication.callRunners(SpringApplication.java:794)
16	at org.springframework.boot.SpringApplication.run(SpringApplication.java:324)
17	at org.springframework.boot.SpringApplication.run(SpringApplication.java:1260)
18	at org.springframework.boot.SpringApplication.run(SpringApplication.java:1248)
19	at com.copote.idc.Application.main(Application.java:41)
20Caused by: java.net.ConnectException: Connection refused
21	... 11 common frames omitted
222021-01-06 17:28:35.873 [main] INFO o.s.s.c.ThreadPoolTaskScheduler:208 - Shutting down ExecutorService 'taskScheduler'
232021-01-06 17:28:35.876 [main] INFO o.s.s.c.ThreadPoolTaskExecutor:208 - Shutting down ExecutorService 'carPositionHistorySaveMysqlExecutor'
242021-01-06 17:28:35.876 [main] INFO o.s.s.c.ThreadPoolTaskExecutor:208 - Shutting down ExecutorService 'carPositionHistorySaveInfluxdbExecutor'