I try aggregate data by componentname a-and number how many times it appear in log
input {
beats{
type => beats
port => 5044
}
}
filter{
if [type] == "beats" {
grok {
break_on_match => true
keep_empty_captures => false
match => {
message => [
"%{NUMBER:threadid};%{TIMESTAMP_ISO8601:eventutctime};%{DATA:username};%{JAVACLASS:apiname};%{WORD:apireturnstatus};%{WORD:componentname};%{BASE10NUM:customisation-level};%{BASE10NUM:cputimeinmillisec};%{WORD:runmode};%{WORD:errorcodeiffailure}",
"%{NUMBER:threadid};%{TIMESTAMP_ISO8601:eventutctime};%{DATA:username};%{JAVACLASS:apiname};%{WORD:apireturnstatus};%{WORD:componentname};%{BASE10NUM:customisation-level};%{BASE10NUM:cputimeinmillisec};%{WORD:runmode};(%{GREEDYDATA:errorcodeiffailure})\s*%{GREEDYDATA:error_description}"
]
}
patterns_dir => "C:\DoLense\Patterns\patterns"
}
aggregate {
task_id => "%{username}_%{threadid}"
code => "map['componentname']||=0 ; map['componentname']+= 1; "
push_map_as_event_on_timeout => true
timeout_task_id_field => "componentname"
timeout =>300 #5 minutes
timeout_tags => ['_aggregatetimeout']
timeout_code => "event.set('componentname', event.get('componentname') > 1)"
}
date {
match => ["eventutctime" , "yyyy-MM-dd HH:mm:ss.SSS"]
}
}
}
output {
elasticsearch {
hosts=>"localhost:9200"
index => "logstash%{+YYYY.MM.dd.HH.mm}%{type}-%{task_id}"
}
stdout {codec=> rubydebug }
}
at index I receive raw data and instead task_id -I get "task_id" as text not converted
logstash2017.09.11.00.44beats-%{task_id}