Logstash shows parsing error for json files

Hi,
I am getting json parsing error with logstash intermittently while it tries to read and load to Elastic. Below are the details. Any lead would really be helpful. I tried isolating the file and it works fine

Error:
:ParserError: Unexpected character ('-' (code 45)): Expected space separating root-level values
at [Source: (String)"21-04-21T17:09:26.543493", "cluster_name": "GF-US-Dev-Mfg-MA-EMR-V7.0", "account": "963633305700", "env": "Dev", "cluster_id": "j-2O3L2OZ72RSSB", "cluster_status": "RUNNING", "cluster_starttime": "20201210234344", "cluster_endtime": "", "fab": "GLOBAL", "capability": "shared-analytics", "functionality": "INLINE", "job_id": "s-3A7H8B0R035F", "job_name": "Inline Data Load FAB1 Backlog", "job_status": "CANCELLED", "job_creationtime": "20210421140142", "job_starttime": "", "job_endtime": "", "job_ru"[truncated 166 chars]; line: 1, column: 4]>, :data=>"21-04-21T17:09:26.543493", "cluster_name": "GF-US-Dev-Mfg-MA-EMR-V7.0", "account": "963633305700", "env": "Dev", "cluster_id": "j-2O3L2OZ72RSSB", "cluster_status": "RUNNING", "cluster_starttime": "20201210234344", "cluster_endtime": "", "fab": "GLOBAL", "capability": "shared-analytics", "functionality": "INLINE", "job_id": "s-3A7H8B0R035F", "job_name": "Inline Data Load FAB1 Backlog", "job_status": "CANCELLED", "job_creationtime": "20210421140142", "job_starttime": "", "job_endtime": "", "job_run_duration": 0, "executor-memory": "8G", "driver-memory": "8G", "num-executors": "12", "total_memory": 123.19999999999999, "ram_minutes": 0.0, "memory_source": "job"}"}

logstash.conf:
input {
file {
type => "emr"
codec => "json"
path => "/data/clo/emcs*.json"
start_position => "beginning"
sincedb_path=>"/data/psri/sincedb_psrivas2.txt"
}
}

filter{}

output {
elasticsearch { hosts => [""]
ssl => true
user => "myUser"
password => "Password"
index => "ptest-%{+yyyy.MM.dd}"
ilm_enabled => false
}
}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.