自动化运维之日志系统Logstash实践(九)

7.案例logstash写入elasticsearch

数据直接写入elasticsearch中(适合日志数量不大,没有Redis)

 
  1. [root@linux-node3 conf.d]# cat input_file_output_es.conf
  2. input {
  3. #system
  4. syslog {
  5. type => "system_rsyslog"
  6. host => "192.168.90.203"
  7. port => "514"
  8. }
  9. #java
  10. file {
  11. path => "/var/log/elasticsearch/xuliangwei.log"
  12. type => "error_es"
  13. start_position => "beginning"
  14. codec => multiline {
  15. pattern => "^["
  16. negate => true
  17. what => "previous"
  18. }
  19. }
  20. #nginx
  21. file {
  22. path => "/var/log/nginx/access_json.log"
  23. type => "access_nginx"
  24. codec => "json"
  25. start_position => "beginning"
  26. }
  27. }
  28. output {
  29. #多行文件判断
  30. if [type] == "system_rsyslog" {
  31. elasticsearch {
  32. hosts => ["192.168.90.201:9200","192.168.90.202:9200"]
  33. index => "system_rsyslog_%{+YYYY.MM}"
  34. }
  35. }
  36. if [type] == "error_es" {
  37. elasticsearch {
  38. hosts => ["192.168.90.201:9200","192.168.90.202:9200"]
  39. index => "error_es_%{+YYYY.MM.dd}"
  40. }
  41. }
  42. if [type] == "access_nginx" {
  43. elasticsearch {
  44. hosts => ["192.168.90.201:9200","192.168.90.202:9200"]
  45. index => "access_nginx_%{+YYYY.MM.dd}"
  46. }
  47. }
  48. }
原文地址:https://www.cnblogs.com/chenshengqun/p/8011910.html