logstash 实现数据源分流

logstash + filebeat 将来源不同的数据分发到不同的地方

logstash配置文件:first-pipeline.conf

 1 input {
 2   beats {
 3     port => 5044
 4   }
 5 }
 6 filter {
 7     if "backend" in  [tag] {   # 如果tags等于backend,就进行数据切割
 8     grok {
 9                 match => ["message", "%{TIMESTAMP_ISO8601:datetime}"]
10         }
11     mutate {
12         split => ["message","|"]
13     }
14     mutate {
15               add_field => {
16               "userID" => "%{[message][3]}"
17               "level" => "%{[message][2]}"
18               "log_path" => "%{[message][1]}"
19               "function" => "%{[message][4]}"
20               "dev_message" => "%{[message][5]}"
21                }
22         }
23     mutate {
24         join => ["message", "|"]
25     }
26     }
27 }
28 output {
29     if "nginx" in [tags]    # 如果tags是nginx,就输出到终端
30     {
31     stdout { codec => rubydebug }   # 输出到终端调试使用
32     }
33 }

filebeat收集配置文件

filebeat.inputs:
- type: log
  enabled: true
  paths:
    - /var/log/nginx/*.log
  tags: ["nginx"]   # logstash根据tags的值,进行判断数据来源于哪里

output.logstash:
        hosts: ["127.0.0.1:5044"]
原文地址:https://www.cnblogs.com/wzy23/p/13384508.html