hi,
I have had success pushing logs using filebeat to elasticsearch and visualising them in grafana, but when introduced logstash between filebeat and elasticsearch graphs are gone. in grafana i get this error:
Fielddata is disabled on text fields by default. Set fielddata=true on [nginx.access.response_code] in order to load fielddata in memory by uninverting the inverted index. Note that this can however use significant memory. Alternatively use a keyword field instead.
my pipeline:
input {
beats {
port => "5044"
}
}
filter {
if [fileset][module] == "nginx" {
if [fileset][name] == "access" {
grok {
match => { "message" => ["%{IPORHOST:[nginx][access][http_host]} %{IPORHOST:[nginx][access][remote_ip]} - %{DATA:[nginx][access][user_name]} [%{HTTPDATE:[nginx][access][time]}] "%{WORD:[nginx][access][method]} %{DATA:[nginx][access][url]} HTTP/%{NUMBER:[nginx][access][http_version]}" %{NUMBER:[nginx][access][response_code]} %{NUMBER:[nginx][access][body_sent][bytes]} "%{DATA:[nginx][access][referrer]}" "%{DATA:[nginx][access][agent]}" %{NUMBER:[nginx][access][request_time]} %{NUMBER:[nginx][access][upstream_time]}"] }
remove_field => "message"
}
mutate {
add_field => { "read_timestamp" => "%{@timestamp}" }
convert => [ "[nginx][access][response_code]", "integer" ]
convert => [ "[nginx][access][body_sent][bytes]","integer" ]
convert => [ "[nginx][access][request_time]","float" ]
convert => [ "[nginx][access][upstream_time]","float" ]
}
date {
match => [ "[nginx][access][time]", "dd/MMM/YYYY:H:m:s Z" ]
remove_field => "[nginx][access][time]"
}
useragent {
source => "[nginx][access][agent]"
target => "[nginx][access][user_agent]"
remove_field => "[nginx][access][agent]"
}
geoip {
source => "[nginx][access][remote_ip]"
target => "[nginx][access][geoip]"
}
}
else if [fileset][name] == "error" {
grok {
match => { "message" => ["%{DATA:[nginx][error][time]} [%{DATA:[nginx][error][level]}] %{NUMBER:[nginx][error][pid]}#%{NUMBER:[nginx][error][tid]}: (*%{NUMBER:[nginx][error][connection_id]} )?%{GREEDYDATA:[nginx][error][message]}"] }
remove_field => "message"
}
mutate {
rename => { "@timestamp" => "read_timestamp" }
}
date {
match => [ "[nginx][error][time]", "YYYY/MM/dd H:m:s" ]
remove_field => "[nginx][error][time]"
}
}
}
}
output {
elasticsearch {
manage_template => false
hosts => [ "ELASTICSERVER" ]
}
}
i can view logs in kibana fine, with all required fields for me. hopefully someone can point me to the right direction..
thank you in advance!