fluentd代替logstash搭建EFK日志管理系統(tǒng)

2019年1月8日11:12:27 發(fā)表評(píng)論 13,177 ℃

logstash的替代軟件有很多,常用有的fluentd和Filebeat,這里主要以fluentd替換logstash進(jìn)行日志采集。elasticsearch和kibana安裝可以參考ELK日志分析平臺(tái)集群搭建 ,下面主要介紹fluentd的安裝以及配置。


1、執(zhí)行如下命令(命令將會(huì)自動(dòng)安裝td-agent,td-agent即為fluentd),每個(gè)需要采集的節(jié)點(diǎn)都安裝

curl -L https://toolbelt.treasuredata.com/sh/install-redhat-td-agent2.sh | sh


2、安裝插件

td-agent-gem install fluent-plugin-elasticsearch

td-agent-gem install fluent-plugin-grep #過(guò)濾插件


3、td-agent.conf配置

注意:java日志采集,請(qǐng)參考:fluentd對(duì)java(log4j2)日志多行匹配采集格式化,以下配置format 無(wú)法匹配不規(guī)則的多行java日志,需要使用多行匹配插件。

或者把替換下面java日志采集的format替換為如下三行:

 format multiline

 format_firstline /\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3}/

 format1 /^(?<access_time>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3}) (?<level>\S+)\s+\[(?<thread>\S+)\] (?<message>.*)/

#vim /etc/td-agent/td-agent.conf

添加如下內(nèi)容

#java日志采集

#日志轉(zhuǎn)發(fā)到ES存儲(chǔ),供kibana使用

<match dev.**>

 @type elasticsearch

 host gdg-dev

 port 9200

 flush_interval 10s

 index_name ${tag}-%Y.%m.%d

 type_name ${tag}-%Y.%m.%d

 logstash_format true

 logstash_prefix ${tag}

 include_tag_key true

 tag_key @log_name

 <buffer tag, time>

   timekey 1h

 </buffer>

</match>

#日志采集并格式化,由于log4j2日志格式不規(guī)則,format可以使用none不格式化

<source>

 @type tail

 format /(?<access_time>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d+)\s+(?<level>\w*)\s+(?<thread_id>\S*)\s+(?<message>.*)$/ 

 #time_format %Y-%m-%dT%H:%M:%S.%NZ

 path /usr/local/logs/bms-api.log

 pos_file /var/log/td-agent/bms.api.log.pos

 tag dev.bms-api.log

</source>

<source>

 @type tail

 #time_format %Y-%m-%dT%H:%M:%S.%NZ

 format /(?<access_time>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d+)\s+(?<level>\w*)\s+(?<thread_id>\S*)\s+(?<message>.*)$/ 

 path /usr/local/logs/article-api.log

 pos_file /var/log/td-agent/article.api.log.pos

 tag dev.article-api.log

</source>

<source>

 @type tail

 format /(?<access_time>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d+)\s+(?<level>\w*)\s+(?<thread_id>\S*)\s+(?<message>.*)$/ 

 #time_format %Y-%m-%dT%H:%M:%S.%NZ

 path /usr/local/logs/platform-api.log

 pos_file /var/log/td-agent/platform.api.log.pos

 read_from_head true

 tag dev.platform-api.log

</source>

<source>

 @type tail

 format /(?<access_time>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d+)\s+(?<level>\w*)\s+(?<thread_id>\S*)\s+(?<message>.*)$/ 

 #time_format %Y-%m-%dT%H:%M:%S.%NZ

 path /usr/local/logs/fileserver-api.log

 pos_file /var/log/td-agent/fileserver.api.log.pos

 read_from_head true

 tag dev.fileserver-api.log

</source>


#nginx日志采集設(shè)置

#nginx日志采集

<source>

@type tail

#format nginx

format /^(?<remote>[^ ]*) (?<host>[^ ]*) (?<user>[^ ]*) \[(?<request_time>[^\]]*)\] "(?<method>\S+)(?: +(?<path>[^\"]*?)(?: +\S*)?)?" (?<code>[^ ]*) (?<size>[^ ]*)(?: "(?<referer>[^\"]*)" "(?<agent>[^\"]*)"(?:\s+(?<http_x_forwarded_for>[^ ]+))?)?$/

time_format %d/%b/%Y:%H:%M:%S %z

path /var/log/nginx/*web.access.log

pos_file /var/log/td-agent/nginx.test.web.access.log.pos

read_from_head true

tag nginx.test.web.access

</source>

<source>

@type tail

#format nginx

format /^(?<remote>[^ ]*) (?<host>[^ ]*) (?<user>[^ ]*) \[(?<request_time>[^\]]*)\] "(?<method>\S+)(?: +(?<path>[^\"]*?)(?: +\S*)?)?" (?<code>[^ ]*) (?<size>[^ ]*)(?: "(?<referer>[^\"]*)" "(?<agent>[^\"]*)"(?:\s+(?<http_x_forwarded_for>[^ ]+))?)?$/

path /var/log/nginx/*web.access.log

time_format %d/%b/%Y:%H:%M:%S %z

pos_file /var/log/td-agent/nginx.test.api.access.log.pos

read_from_head true

tag nginx.test.api.access

</source>

<source>

@type tail

format /^(?<request_time>\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2}) \[(?<log_level>\w+)\] (?<pid>\d+).(?<tid>\d+): (?<message>.*)$/

path /var/log/nginx/*web.error.log

pos_file /var/log/td-agent/nginx.test.web.error.log.pos

read_from_head true

tag nginx.test.web.error

</source>

<match nginx.**>

@type elasticsearch

host gdg-test

port 9200

flush_interval 10s

index_name ${tag}-%Y.%m.%d

type_name ${tag}-%Y.%m.%d

logstash_format true

logstash_prefix ${tag}

#include_tag_key true

#tag_key @log_name

<buffer tag, time>

timekey 1h

</buffer>

</match>


配置完以后,重啟fluentd

/etc/init.d/td-agent restart

過(guò)一段時(shí)間查看ES日志索引

fluentd代替logstash搭建EFK日志管理系統(tǒng)

然后通過(guò)kibana創(chuàng)建索引,對(duì)日志進(jìn)行分析查看

fluentd代替logstash搭建EFK日志管理系統(tǒng)


4、配置說(shuō)明

#把日志同步到elasticsearch存儲(chǔ)

#匹配任意數(shù)量 <match test**>

#如果需要按文件名稱進(jìn)行統(tǒng)計(jì),可以配置多個(gè)<match>和<source>,用tag、index_name type_name進(jìn)行區(qū)分

#elasticsearch插件 設(shè)置參考https://docs.fluentd.org/v1.0/articles/out_elasticsearch#index_name-(optional)

<match test.article>

 @type elasticsearch

 host elasticsearch_host

 port 9200

 flush_interval 10s

 logstash_format true  #設(shè)置以后index為logstash-日期,代替index_name的值,并且索引添加@timestamp字段記錄日志讀取時(shí)間

 logstash_prefix ${tag}  #設(shè)置以后索引會(huì)以tag名稱-日期進(jìn)行命名

 index_name ${tag}-%Y.%m.%d

 type_name  ${tag}-%Y.%m.%d

 include_tag_key true  #把tag當(dāng)做字段寫(xiě)入ES

 tag_key @log_name

<buffer tag, time>  #讓index_name ${tag}-%Y.%m.%d 時(shí)間格式生效

   timekey 1h

 </buffer>

</match>

#對(duì)日志進(jìn)行格式化操作

#日志格式2018-12-27 20:41:40,649 ERROR [http-nio-8076-exec-1] com.amd5.community.reward.service.impl.MallServiceImpl.getChosenConsigneeAddress:546 - Not add consignee address about user[id=123] yet

#format 參數(shù)none:表示不格式化日志 。 regexp、apache2、apache_error、nginx、syslog、tsv or csv、ltsv、json、none、mulitline https://docs.fluentd.org/v1.0/articles/parser_nginx

#正則表達(dá)式驗(yàn)證器http://fluentular.herokuapp.com

<source>

 @type tail

 format /(?<access_time>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d+)\s+(?<level>\w*)\s+(?<thread_id>\S*)\s+(?<message>.*)$/ 

 path /var/log/amd5.java.log  #確保fluentd有權(quán)限讀取日志,沒(méi)有權(quán)限會(huì)報(bào)錯(cuò),可以將fluentd用戶添加在日志屬組

 pos_file /var/log/td-agent/dev.api.log.pos  # 將上次讀取的位置記錄到此文件中,提前新建此文件,并添修改屬主為td-agent

 read_from_head true  #從頭部開(kāi)始讀取日志

 time_format %Y-%m-%dT%H:%M:%S.%NZ

 tag test.article

</source>

#對(duì)采集日志進(jìn)行過(guò)濾 這里只顯示level字段包含 ERROR 或者message字段包含error的日志條目,參考https://docs.fluentd.org/v0.12/articles/filter_grep

#對(duì)日志添加刪除 字段 使用@type record_transformer 參考 https://docs.fluentd.org/v1.0/articles/filter_record_transformer

<filter log**>

 @type grep

 <regexp>

   key level

   pattern ERROR

 </regexp>

 <regexp>

   key message

   pattern error

 </regexp>

</filter>

5、相關(guān)命令

#添加模板,所有test*的索引都適用此模板

curl -H "Content-Type: application/json" -XPOST master:9200/_template/duoyun_test -d'

{

 "template": "test*",

 "order":    1,

 "settings": {

   "number_of_shards": 1

 },

 "mappings": {

   "test*" : {

     "properties": {

       "@timestamp":{

         "type":"date"

     }

    }

   }

  }

}'

#刪除所有索引

curl -XDELETE -u elastic:changeme gdg-dev:9200/_all  

#刪除指定索引

curl -XDELETE -u elastic:changeme gdg-dev:9200/test-2019.01.08 

#刪除自定義模板

curl -XDELETE master:9200/_template/temp*

#查看template_1 模板

curl -XGET master:9200/_template/template_1

#獲取指定索引詳細(xì)信息

curl -XGET 'master:9200/system-syslog-2018.12?pretty'

#獲取所有索引詳細(xì)信息

curl -XGET master:9200/_mapping?pretty

【騰訊云】云服務(wù)器、云數(shù)據(jù)庫(kù)、COS、CDN、短信等云產(chǎn)品特惠熱賣中

發(fā)表評(píng)論

:?: :razz: :sad: :evil: :!: :smile: :oops: :grin: :eek: :shock: :???: :cool: :lol: :mad: :twisted: :roll: :wink: :idea: :arrow: :neutral: :cry: :mrgreen: