Import aggregate data for each record

Hi,

I am trying to achieve following search record -
{
"hits": {
"total": 148178,
"max_score": 1.0,
"hits": [
{
"_index": "ofc",
"_type": "pers_desc",
"_id": "141234",
"_score": 1.0,
"_routing": "141234",
"_parent": "141234",
"_source": {
"user_id": "141234",
"@version": "1",
"@timestamp": "2018-01-09T18:42:11.301Z",
"pdr": [
{
"gender": "MALE",
"date_of_birth": "1965-04-20",
"eye_color": "BLUE",
"hair_color": "BLACK"
}
],
"tags": [
"aggregated"
]
}
}
]
}
}

And my aggregate filter config file is as below -
input {
jdbc {
jdbc_driver_library => "C:\cfg\ojdbc6.jar"
jdbc_driver_class => "Java::oracle.jdbc.driver.OracleDriver"
jdbc_connection_string => "<<>>"
jdbc_user => "<<>>"
jdbc_password => "<<>>"
statement_filepath => "query.sql"
}
}

filter {
aggregate {
task_id => "%{user_id}"
code =>
"
map['pdr'] ||= [];
map['pdr'] << {
'date_of_birth' => event.get('date_of_birth'),
'eye_color' => event.get('eye_color'),
'gender' => event.get('gender'),
'hair_color' => event.get('hair_color')
}
event.cancel()
"
push_previous_map_as_event => true
timeout => 3
add_tag => [ "aggregate" ]
}
}

output {
stdout {
codec => rubydebug
}

}

Pls. someone help me to achieve this. I am new to elastic search and log stash.

Thanks You.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.