Hello,
i have a pipeline that reads from postgres and create index for every event. It is really hard to manage which event will occur and what index will be created. Is there any way to write a template, which will be used by pipeline to create new index and assignee it to index lifecycle policy?
Below is my pipeline configuration:
input {
jdbc {
jdbc_driver_class => "Java::org.postgresql.Driver"
jdbc_connection_string => "jdbc:postgresql://db_ip:5432/db_name"
jdbc_user => "user"
jdbc_password => "${userpass}"
statement => "select * from T_AUDIT_LOG_EVENT where id > :sql_last_value"
use_column_value => true
tracking_column => id
tracking_column_type => "numeric"
#clean_run => true
schedule => "* * * * *"
last_run_metadata_path => "/u01/data/ls_logs/dev/.logstash_jdbc_last_run"
}
}
filter {
json {
source => "value"
target => "event"
}
mutate {
add_field => {
"[@metadata][event_type]" => "%{[event][event_type]}"
}
}
mutate {
lowercase => ["[@metadata][event_type]"]
}
date {
timezone => "Europe/Warsaw"
match => ["[event][event_date]", "YYYY-MM-dd HH:mm:ss.SSS", "ISO8601"]
target => "@timestamp"
}
}
output {
elasticsearch {
hosts => "elastic_ip:9200"
user => elasticuser
password => "${elasticuserpass}"
index => "dev-focus-audit-log--%{[@metadata][event_type]}"
document_id => "%{id}"
}
# stdout {
# codec => "rubydebug"
# }
}