Tried inboth Kibana 4.4.1/2 version. It is not recognizing integer as int. Also tried with float.
However logstash is processing as integer. Have tried with mutating the same to a different field but same result.
input {
file {
path => "/u01/MyLagInfo/.txt."
type => "MyLagInfo"
#sincedb_path => "/dev/null"
}
}
filter {
if [type] =~ "MyLagInfo" {
grok {
match => { "path" => "%{GREEDYDATA}/%{GREEDYDATA:SrvName}.txt" }
#add_field => { "RepSrvName" => "%{SrvName}" }
}
grok {
match => { "message" => "%{COMBINEDAPACHELOG}" }
#match => { "message" => "%{GREEDYDATA:SchemaName}\|%{GREEDYDATA:DestinationTS}\|%{GREEDYDATA:SourceTS}\|%{INT:ReplicationLag}" }
match => { "message" => "%{GREEDYDATA:SchemaName}\|%{GREEDYDATA:DestinationTS}\|%{GREEDYDATA:SourceTS}\|(?<RepLag>%{NUMBER})" }
}
date {
match => [ "SourceTS", "yyyy-MM-dd HH:mm:ss.SSS", "yyyy-MM-dd HH:mm:ss:SSS" ]
}
date {
match => [ "DestinationTS", "yyyy-MM-dd HH:mm:ss.SSS", "yyyy-MM-dd HH:mm:ss:SSS" ]
target => "@timestamp"
#locale => "en"
}
mutate {
convert => { "RepLag" => "integer" }
#add_field =>
# {
# "MyLagtime" => "%{ReplicationLag}"
# }
#remove_field => [ "ReplicationLag", "DestinationTS" ]
}
}
}
output {
stdout { codec => rubydebug }
elasticsearch {
hosts => ["localhost:9200"]
#index => "MyLag-%{+YYYY.MM.dd}"
#template => "/u01/logstash/template/replatency_index_template.json"
#template_overwrite => true
}
}