I've created a pipeline to get data directly from Mysql to kibana using logstash and Jdbc input plugin
But when I do the actual execution on .conf file I'm getting the same values multiple time in the elastic index.
here's my .conf file
input {
jdbc {
jdbc_driver_library => "/usr/share/java/mysql-connector-java-8.0.30.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://127.0.0.1:3306/database"
jdbc_user => "root"
jdbc_password => "root"
tracking_column => "id"
use_column_value => true
tracking_column_type => "numeric"
schedule => "* * * * *"
clean_run => true
statement => "SELECT * from products"
}
}
filter {
mutate {
copy => { "id" => "[@metadata][_id]"}
remove_field => ["@version"]
}
}
output {
elasticsearch {
hosts=>["http://localhost:9200"]
user=>"elastic"
password=>"*******"
index=>"mysql" }
stdout { codec => "rubydebug"}
Is there something that I'm missing here?