Json data to be ingested: /var/log/file.json
{"event_type":"temp","time":"2023-01-07 23:30:12","temp":64.0,"fan":2291}
{"event_type":"temp","time":"2023-01-07 23:30:22","temp":63.0,"fan":2308}
{"event_type":"temp","time":"2023-01-07 23:30:32","temp":63.0,"fan":2295}
{"event_type":"temp","time":"2023-01-07 23:30:42","temp":67.0,"fan":2299}
{"event_type":"temp","time":"2023-01-07 23:30:52","temp":61.0,"fan":2291}
logstash config:
input {
beats {
port => 5044
}
}
filter {
json {
#skip_on_invalid_json => true
source => "message"
target => "json"
}
}
output {
elasticsearch {
hosts => "<private_ip>:9200"
user => "<redacted>"
password => "<redacted>"
}
stdout { }
}
Filebeat config:
filebeat.inputs:
- type: log
enabled: true
- /var/log/file.json
json.keys_under_root: true
json.overwrite_keys: true
json.add_error_key: true
json.expand_keys: true
output.logstash:
hosts: ["localhost:5044"]
Errors while using filebeat:
[WARN ][logstash.filters.json ][main][20c071f94122e35758b5f80f63972b111cecff39a62dbe8a583f702663a206fa] Error parsing json {:source=>\\\"message\\\", :raw=>\\\"[2023-01-07T13:36:45,326]
Sending a new message for the listener, sequence: 540\\\", :exception=>#<LogStash::Json::ParserError: Unexpected character ('-' (code 45)): was expecting comma to separate Array entries\\n at [Source: (byte[])\\\"[2023-01-07T13:36:45,326]
If i dont use filebeat, and just use the following logstash config instead it works perfect:
input {
file {
start_position=>"beginning"
path => "/var/log/file.json"
sincedb_path => "/dev/null"
}
}
filter {
json {
source => "message"
target => "json"
}
}
output {
elasticsearch {
hosts => "<private_ip>:9200"
user => "<redacted>"
password => "<redacted>"
}
stdout {}
}
Why is Filebeat producing these Json issues?