解决logstash生成日志重复问题

如果线程过多,在插入到logstash的时候会信息重复

input {
    tcp {
        port => 5000
        codec => json {
            charset => "UTF-8"
        }
    }
}
## Add your filters / logstash plugins configuration here
filter {
  dissect {
    mapping => {
      "message" => "%{ts} %{msg}"
    }
  }
  date {
    match => [ "ts", "ISO8601" ]
  }
  fingerprint {
    source => "message"
    target => "[@metadata][fingerprint]"
    method => "MD5"
    key => "test"
  }
  ruby {
    code => "event.set('@metadata[prefix]', event.get('@timestamp').to_i.to_s(16))"
  }
}
output {
    elasticsearch {
        hosts => "elasticsearch:9200"
        user => "elastic"
        password => "1234qwert"
        document_id => "%{[@metadata][prefix]}%{[@metadata][fingerprint]}"
    }
    stdout { codec => rubydebug { metadata => true }}
}
原文地址:https://www.cnblogs.com/fat-girl-spring/p/14335735.html