logstash寫入到kafka和從kafka讀取日誌
阿新 • • 發佈:2019-04-09
使用 read ads oot put ssa tar json acc 收集nginx日誌放到kafka
修改nginx日誌格式:[nginx日誌格式修改](https://blog.51cto.com/9025736/2373483) input { file { type => "nginx-access" path => "/data/wwwlogs/access_nginx.log" start_position => "beginning" codec => json } file { path => "/var/log/messages" start_position => "beginning" type => "system-log-252" } } } output { if [type] == "nginx-access" { kafka { bootstrap_servers => "192.168.1.252:9092" #kafka服務器地址 topic_id => "252nginx-accesslog" batch_size => 5 codec => "json" #寫入的時候使用json編碼,因為logstash收集後會轉換成json格式 } } } if [type] == "system-log-252" { kafka { bootstrap_servers => "192.168.1.252:9092" topic_id => "system-log-252" batch_size => 5 codec => "json" #寫入的時候使用json編碼,因為logstash收集後會轉換成json格式 } } } }
配置logstash從kafka讀取日誌
input { kafka { bootstrap_servers => "192.168.1.252:9092" #kafka服務器地址 topics => "252nginx-accesslog" batch_size => 5 codec => "json" #寫入的時候使用json編碼,因為logstash收集後會轉換成json格式 group_id => "252nginx-access-log" consumer_threads => 1 decorate_events => true } kafka { bootstrap_servers => "192.168.1.252:9092" topics => "system-log-252" consumer_threads => 1 decorate_events => true codec => "json" } } output { if [type] == "252nginx-accesslo" { elasticsearch { hosts => ["192.168.1.252:9200"] index => "252nginx-accesslog-%{+YYYY.MM.dd}" }} if [type] == "system-log-252" { elasticsearch { hosts => ["192.168.1.252:9200"] index => "system-log-1512-%{+YYYY.MM.dd}" } }
logstash寫入到kafka和從kafka讀取日誌