Good morning,
i have an issue with logstash. I use jdbc input to collect data from postgres and then forward them to elastic. I've noticed that there is a difference between what is forwarded to elastic (i use kibana to display indicies) and what is in database.
This is how my configuration looks like:
pipeline.yml
- pipeline.id: dev
path.config: "/u01/app/logstash/logstash-7.4.2/pipelines-config/focus_dev.conf"
pipeline.workers: 1
pipeline.batch.size: 125
pipeline.batch.delay: 50
queue.type: persisted
queue.page_capacity: 1mb
queue.max_events: 100
queue.max_bytes: 16mb
pipeline config file:
input {
jdbc {
jdbc_driver_class => "Java::org.postgresql.Driver"
jdbc_connection_string => "jdbc:postgresql://db_host:db_port/db_name"
jdbc_user => "focus_dev"
jdbc_password => "${devpass}"
#jdbc_default_timezone => "CET"
#plugin_timezone => "utc"
statement => "select * from T_AUDIT_LOG_EVENT where id > :sql_last_value"
use_column_value => true
tracking_column => id
tracking_column_type => "numeric"
#clean_run => true
schedule => "* * * * *"
last_run_metadata_path => "/u01/app/logstash/logstash_logs/dev/.logstash_jdbc_last_run"
}
}
filter {
json {
source => "value"
target => "event"
}
mutate {
add_field => {
"[@metadata][event_type]" => "%{[event][event_type]}"
}
}
mutate {
lowercase => ["[@metadata][event_type]"]
}
date {
timezone => "UTC"
match => ["[event][event_date]", "YYYY-MM-dd HH:mm:ss.SSS", "ISO8601"]
target => "@timestamp"
}
}
output {
elasticsearch {
hosts => "elastic_host:port"
user => focusaudit
password => "${focauditpass}"
index => "dev-focus-audit-log--%{[@metadata][event_type]}"
#document_type => "audit-log-events"
document_id => "%{id}"
}
stdout {
codec => "rubydebug"
}
}
I will appreciate all the help.
Best Regards,
Norbert