logstash.conf
input {
kafka {
...
}
}
filter {
json {
source => "message"
}
}
output {
influxdb {
id => "influxdb080"
db => "AMERXX01"
user => "urxk1me1"
password => "HA6H67yjHcp6XiX4"
host => "tech-vl-a-rxx-62"
port => "8086"
#codec => "json"
measurement => "K8S_E4"
data_points => {
"message" => "%{[message]}"
}
#exclude_fields => ["@timestamp", "@Version"]
use_event_fields_for_data_points => true
data_points => {}
data_points => {'column1' => 'available', 'column2' => '@timestamp'}
data_points => {'available' => 'column1'}
data_points => {'column1' => 'available'}
}
}
I have tried all kind of combination , looking at other people experience on internet,
whatever I do, I get an HTTP 400 error on InfluxDB:
Mar 04 11:24:22 vl-a-rxx-62 influxd[20490]: [httpd] 10.215.74.239 - urxk1me1 [04/Mar/2020:11:24:22 +0100] "POST /write?db=AMERXX01&p=%5BREDACTED%5D&precision=ms&rp=autogen&u=urxk1me1 HTTP/1.1" 400 192459 "-" "Ruby" 50e0bdc0-5e02-11ea-ab4d-005056be1aa9 138978
I use versions Logstash 7.6.0 and InfluxDB 1.7.6
Any help or share experience welcome