Logstash filter semantic fields not loading in Kibana

I thought my knowledge of the ELK stack was pretty good but this weekend I have tried and tried to figure out what is going on with my deployment. The result that I am not getting is that I create my filters in logstash and they are not being loaded into Kibana. In older versions this worked flawlessly but now it is not. I even forced output to stdout and I could see nothing that was obvious. I also went back to basics and just made one filter with greedydata as the one and only filter.

Is there something I have missed, all that i am trying to capture is an nginx log so nothing special.

logstash.yml

node.name: erp-mon-1
path.data: /var/lib/logstash
pipeline.workers: 2
pipeline.batch.size: 125
pipeline.batch.delay: 5
pipeline.unsafe_shutdown: false
path.config: /etc/logstash/conf.d/*.conf
config.reload.automatic: true
path.logs: /var/log/logstash

conf.d/01-beats-input

input {
  beats {
     port => 2561
     ssl => true
     ssl_certificate => "/etc/ssl/certs/mon-1.crt"
     ssl_key => "/etc/ssl/private/mon-1.p8"
}
}

conf.d/11-nginx-filter

    filter {
  if [program] == "nginx_access" {
    grok {
      patterns_dir => "/etc/logstash/patterns"
      match => { "message" => "%{NGINX_ACCESS}" }
      remove_tag => ["nginx_access", "_grokparsefailure"]
      add_field => {
        "type" => "nginx_access"
      }
      remove_field => ["program"]
    }

date {
  match => ["time_local", "dd/MMM/YYYY:HH:mm:ss Z"]
  target => "@timestamp"
  remove_field => "time_local"
}

useragent {
  source => "user_agent"
  target => "useragent"
  remove_field => "user_agent"
}
  }

  if [program] == "nginx_error" {
grok {
  patterns_dir => "/etc/logstash/patterns"
  match => { "message" => "%{NGINX_ERROR}" }
  remove_tag => ["nginx_error", "_grokparsefailure"]
  add_field => {
    "type" => "nginx_error"
  }
  remove_field => ["program"]
}

date {
  match => ["time_local", "YYYY/MM/dd HH:mm:ss"]
  target => "@timestamp"
  remove_field => "time_local"
}
  }
}

conf.d/30-elastic-output

    output {
  if [source] == "/var/log/apache2/ssl_access.log" or [source] == "/var/log/apache2/access.log" or [source] == "/var/log/apache2/ssl_error.log" or [source] == "/var/log/apache2/error.log" {
    elasticsearch {
      hosts => "localhost:9200"
      index => "var_log_apache2-%{+YYYY.MM.dd}"
    }
  } 
  else if [source] == "/var/log/nginx/access.log" or [source] == "/var/log/nginx/error.log" {
    elasticsearch {
      hosts => "localhost:9200"
      index => "var_log_nginx-%{+YYYY.MM.dd}"
    }
  } 
  else {
    elasticsearch {
      hosts => "localhost:9200"
      index => "var_log_comon-%{+YYYY.MM.dd}"
    }
  }
  stdout { codec => rubydebug }
}

pattern/nginx_access

    METHOD (OPTIONS|GET|HEAD|POST|PUT|DELETE|TRACE|CONNECT)
NGINX_ACCESS %{IPORHOST:visitor_ip} - %{USERNAME:remote_user} \[%{HTTPDATE:time_local}\] "%{DATA:server_name}" "%{METHOD:method} %{URIPATHPARAM:path} HTTP/%{NUMBER:http_version}" %{INT:status} %{INT:body_bytes_sent} "%{URI:referer}" %{QS:user_agent}

patterns/nginx_error

    ERRORDATE %{YEAR}/%{MONTHNUM}/%{MONTHDAY} %{TIME}
NGINX_ERROR %{ERRORDATE:time_local} \[%{LOGLEVEL:level}\] %{INT:process_id}#%{INT:thread_id}: \*(%{INT:connection_id})? %{GREEDYDATA:description} %{IP:ClientIP}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.