I'm trying to get Logstash to ingest a JSON file created by a mongoexport call. The file looks like this:
{
"_id": "3c51d008add94422abf107f0",
"name": "Pulse Get SVN By ID",
"type": "automation",
"tasks": {
"5336": {
"name": "svnGetByIdHandler",
"canvasName": "svnGetByIdHandler",
"summary": "Get SVN by ID",
"description": "svn.getByIdHandler",
"location": "Adapter",
"locationType": "Pulse2",
"app": "Pulse2",
"type": "automatic",
"displayName": "Pulse2",
"variables": {
"incoming": {
"objId": "$var.job.objId",
"queryActivedb": "",
"adapter_id": "Pulse"
},
"outgoing": {
"result": null
},
"error": "",
"decorators": []
},
"start_time": "2023-08-24T19:59:12.130Z",
"end_time": 1.692907152253E+12,
"finish_state": "error"
}
},
"last_updated": {
"$date": "2023-08-24T19:59:12.261Z"
}
}
I tried using codec=>json in my logstash pipeline config but Logstash indexed each line as its own document in Elasticsearch, which is not what we want.
I then tried using codec=>json_lines. The JSON data does not show up in Elasticsearch. Also, there is no indication of a parse error or anything like that in logstash-plain.log
Is there another codec I should be using? Here is the config:
input {
file {
path => "/var/log/mongodb/errored-jobs.json"
start_position => "beginning"
codec => json_lines
sincedb_path => "/dev/null"
}
}
filter {
mutate {
remove_field => ["_id"]
}
}
output {
elasticsearch {
hosts => ["http://ourhost.com:9200"]
index => "itential-jobs-%{+yyyyMMdd}"
#user => "elastic"
#password => "changeme"
}
}