Aggregation filter half work

Hi, i need to aggregate 6 log file , i have 6 grok parser, each for one of my logs. This is my conf

filter {
  if [type] == "application_log" {
    grok {
       match => [ "message", "%{DATA:jcaption_id}\s+%{TIME:orario}[\s]+%{LOGLEVEL:log_level}[\s]+\[%{USERNAME:class}\][\s]+\[?%{USERNAME:correlation_id}][\s]+\[COUNT]\s+\[SQL]+%{GREEDYDATA:sqlcount}" ]
      match => [ "message", "%{DATA:jcaption_id}\s+%{TIME:orario}[\s]+%{LOGLEVEL:log_level}[\s]+\[%{USERNAME:class}\][\s]+\[?%{USERNAME:correlation_id}][\s]+\[COUNT]\s+\[PARAM][\s]+%{GREEDYDATA:sqlcount_param}" ]
      match => [ "message", "%{DATA:jcaption_id}\s+%{TIME:orario}[\s]+%{LOGLEVEL:log_level}[\s]+\[%{USERNAME:class}\][\s]+\[RICERCA][\s]+\[?%{USERNAME:correlation_id}][\s]+\[COUNT][\s]+%{DATA:}:[\s]+%{BASE10NUM:sqlcount_time:float}" ]
      match => [ "message", "%{DATA:jcaption_id}\s+%{TIME:orario}[\s]+%{LOGLEVEL:log_level}[\s]+\[%{USERNAME:class}\][\s]+\[?%{USERNAME:correlation_id}][\s]+\[FETCH]\s+\[SQL][\s]+%{GREEDYDATA:sqlfetch}" ]
      match => [ "message", "%{DATA:jcaption_id}\s+%{TIME:orario}[\s]+%{LOGLEVEL:log_level}[\s]+\[%{USERNAME:class}\][\s]+\[?%{USERNAME:correlation_id}][\s]+\[FETCH]\s+\[PARAM][\s]+%{GREEDYDATA:sqlfetch_param}" ]
      match => [ "message", "%{DATA:jcaption_id}\s+%{TIME:orario}[\s]+%{LOGLEVEL:log_level}[\s]+\[%{USERNAME:class}\][\s]+\[RICERCA][\s]+\[?%{USERNAME:correlation_id}][\s]+\[FETCH][\s]+%{DATA:}:[\s]+%{BASE10NUM:sqlfetch_time:float}" ]
    }
    date {
        match => ["orario","YYYY-MM-dd HH:mm:ss,SSS","YYYY-MM-d HH:mm:ss,SSS","MM-dd-YY HH:mm:ss" ,"MMddYY HH:mm:ss","MMddyy HH:mm:ss","MM-dd-yy HH:mm:ss"]
        target => "orario"
         }

   if [sqlcount] {
     aggregate {
       task_id => "%{correlation_id}"
       code => "map['execution_time_temp'] = 0"
       map_action => "create"
     }
   }

   if [sqlcount_param] {
     aggregate {
       task_id => "%{correlation_id}"
       code => "event.set('sqlcount_param', map['sqlcount_param'])"
       map_action => "update"
     }
   }

   if [sqlcount_time] {
     aggregate {
       task_id => "%{correlation_id}"
       code => "map['execution_time_temp'] += event.get('sqlcount_time')"
       map_action => "update"
     }
   }

   if [sqlfetch] {
     aggregate {
       task_id => "%{correlation_id}"
       code => ""
       map_action => "update"
     }
   }

   if [sqlfetch_param] {
     aggregate {
       task_id => "%{correlation_id}"
       code => ""
       map_action => "update"
     }
   }

   if [sqlfetch_time] {
     aggregate {
       task_id => "%{correlation_id}"
       code => "map['execution_time_temp'] += event.get('sqlfetch_time')"
       map_action => "update"
     }
   }

   if [sqlfetch_time] {
     aggregate {
       task_id => "%{correlation_id}"
       code => "event.set('execution_time', map['execution_time_temp'])"
       map_action => "update"
       end_of_task => true
       timeout => 120
     }
   }

   if !("" in [correlation_id]) {
     drop { }
   }
  }
}

If i check kibana i see 6 log, not aggregated , but i see in the last log, the sum of my sqlcount_time and sqlfetch_time.
This are my type of log i need to aggregate :

[] 09:38:24,510 INFO  [RicercaRichiestaManagerBean][0159351239239862] [COUNT] [SQL] SELECT COUNT(ID) FROM RichiestaLight r  WHERE  r.idRichiesta =:id
[] 09:38:24,521 INFO  [RicercaRichiestaManagerBean][0159351239239862] [COUNT] [PARAM] key: id value: 000052192988
[] 09:38:24,790 INFO  [RicercaRichiestaManagerBean][RICERCA] [0159351239239862] [COUNT]  execution time: 0.269 seconds
[] 09:38:24,825 INFO  [RicercaRichiestaManagerBean][0159351239239862] [FETCH] [SQL] SELECT r FROM RichiestaLight r  WHERE  r.idRichiesta =:id  AND  ROWNUM <= 500 ORDER BY r.dataInserimento DESC
[] 09:38:24,832 INFO  [RicercaRichiestaManagerBean][0159351239239862] [FETCH] [PARAM] key: id value: 000052192988
[] 09:38:25,166 INFO  [RicercaRichiestaManagerBean][RICERCA] [0159351239239862] [FETCH]  execution time: 0.334 seconds

After aggregation, i need to have 1 line with :
09:38:24,832 INFO [RicercaRichiestaManagerBean][0159351239239862] SELECT COUNT(ID) FROM RichiestaLight r WHERE r.idRichiesta =:id key: id value: 000052192988 execution time: 0.269 seconds SELECT r FROM RichiestaLight r WHERE r.idRichiesta =:id AND ROWNUM <= 500 ORDER BY r.dataInserimento DESC key: id value: 000052192988 execution time: 0.334 seconds

What issues did you have with the answer I posted the last time you asked this question?

I change grok parser, i can't use unique grok parser therefore i I made 6 grok patterns, one specific for each log line.

If i start like this, in my Kibana i see all my logs and in the last log i see the execution_time filed with the sum. I don't see any aggregation

Could you help me please? @Badger

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.