Logstash JSON Parse error.... With valid JSON data

Hi All,

We have an issue where data from Filebeat (1.2.2) is being read from files, sent to Logstash (2.3.1). We are receiving, regularly, errors stating that the data can not be parsed. I verified that the JSON is, in fact JSON via jsonlint.com.

MOST of the other entries are making it without issue. Is it perhaps the "\t" in the sd key pair that is causing the issue i.e.
"sd":"Yamaha\tXVZ 1300 A Royal Star (4YP3) (Europe)\t1999 Goldfren S33 Rear Brake Pads"

and if so, how would we resolve this?

Thanks in advance!!!

So the JSON data looks like this:
{"ts":"2016-05-19 06:59:29.427 -0700","opp":"GRC","ti":"e2169334-ec99-46bd-bad5-0a36f2a7a67c","dv":1286,"flg":0,"cc":1,"ctyc":3,"ctgc":1,"idc":3,"attc":1,"chsl":[["871410"]],"ctl":[[["177925"]]],"csil":[["3"]],"ew":2.8900,"emv":2,"ebayid":[["301961033655"]],"al":[[[{"n":"CDN","v":"1000"}]]],"chss":5,"cszs":13,"nacchs":1,"naccsz":1,"naccwgt":1,"emd":"ESTIMATE_CURRENT_VERSION","sd":"Yamaha\tXVZ 1300 A Royal Star (4YP3) (Europe)\t1999 Goldfren S33 Rear Brake Pads","idl":[[[{"rfid":"301961033655","t":"COMMODITYID"},{"rfid":"301961033655","t":"EBAYITEMID"},{"rfid":"Goldfren","t":"BRAND"}]]],"accci":{"acoi":"301961033655","acohs":"871410","acohsq":0.2903,"acov":2},"szsm":10,"hssm":4,"spr":[[14.50]],"accdu":2,"atc":71,"mrc":"EBAY_0002","ccl":[[[{"t":"SALEPRICE","v":14.5}]]],"dssl":[[["309","314"]]],"lt":"BIN_ONLY","du":4,"cct":1,"rl":[{"sid":"STANDARD_UK","cty":"US","rstl":["IMPORT"]},{"sid":"STANDARD_UK","cty":"MY","rstl":["IMPORT"]},{"sid":"STANDARD_UK","cty":"IL","rstl":["IMPORT"]}],"frl":["US","MY","IL"]}

The error seen is:
{:timestamp=>"2016-05-19T06:59:34.387000-0700", :message=>"JSON parse failure. Falling back to plain-text", :error=>#<LogStash::Json::ParserError: Illegal unquoted character ((CTRL-CHAR, code 9)): has to be escaped using backslash to be included in string value
at [Source: [B@435bde90; line: 1, column: 80]>, :data=>"{"ts":"2016-05-19 06:59:29.457 -0700","opp":"ACC","ipl":{"rcom":{"ttl":"Yamaha\tXVZ 1300 A Royal Star (4YP3) (Europe)\t1999 Goldfren S33 Rear Brake Pads",":ctlt":[{"ctcd":"003-177925","ispr":"true"}],":ids":[{"ref":"TRANSACTIONID","val":"e2169334-ec99-46bd-bad5-0a36f2a7a67c"},{"ref":"LISTING_TYPE","val":"BIN_ONLY"},{"ref":"EBAYITEMID","val":"301961033655"},{"ref":"BRAND","val":"Goldfren"}],"prc":14.5000,"ever":"CURRENT","elvl":"SIX_DIGIT","nump":null,"merc":"EBAY_0002","pret":"[HSCODE, WEIGHT, SIZE]"}},"rpl":{"com":{":clrt":[{"hs":"871410","hsqf":null,"hsq6":0.29031159591716540000,"wgt":null,"wq":null,"sz":null,"szq":null}],"pkv":2,"clst":1}}}", :level=>:error}

The filebeat.yml config looks like this:

filebeat:
prospectors:
-
paths:
- C:\pb\apache-tomcat-7.0.22\logs\xbec_events.log
- C:\docume~1\alluse~1\filebeat\logs\mybeat.log
- C:\pb\apache-tomcat-7.0.22\logs\xbec_transactions.log
input_type: log
ignore_older: 10m
close_older: 10m
scan_frequency: 2s
harvester_buffer_size: 200
spool_size: 200
idle_timeout: 10s
registry_file: "C:/docume~1/alluse~1/filebeat/registry/registry.file"

output:
logstash:
hosts: ["10.94.126.49:9990", "10.94.166.20:9990", "10.86.174.18:9990", "10.86.170.19:9990"]
worker: 3
loadbalance: true
max_retries: 0

logging:
to_files: true
files:
path: "c:/DOCUME~1/ALLUSE~1/filebeat/logs"
name: mybeat.log
rotateeverybytes: 10485760 # = 20MB
level: warning

And the logstash.conf looks like this:

input {
beats {
type=> beats
port => 9990
codec => "json"
}
beats {
type=> beats
port => 9991
codec => "json"
}

}

filter {
json {
source => message
}
}

output {
s3{
access_key_id => "xyz"
secret_access_key => "xyz"
region => "us-west-2"
bucket => "bucketname"
canned_acl => "authenticated_read"
size_file => 50000000
time_file => 1
upload_workers_count => 20
prefix => "1-server1/"
codec => "json_lines"
temporary_directory => "/data/logstash/forwarder-other"
restore => true
}

Anyone else seen an issue like this?

Thanks

The same issue with JSON sent over TCP

has any one managed to solve this>

I am not sure if this was solved, but the use of both a json codec for the input as well as a separate json filter looks odd to me. When you encounter an issue like this, it usually helps to replace all outputs with a stdout output filter with a rubydebug codec set, as this makes it fast and easy to debug. Then add a codec and/or filter at a time and observe the result.

If you want assistance with this it is probably better to open a new thread and provide your config.