JSON parse error, original data now in message field

Hello, I am getting the following error with looking at edgemax router logs:

[2017-07-14T09:52:00,502][ERROR][logstash.codecs.json     ] JSON parse error, original data now in message field {:error=>#<LogStash::Json::ParserError: Unexpected character ('<' (code 60)): expected a valid value (number, String, array, object, 'true', 'false' or 'null')
 at [Source: <4>Jul 14 09:52:00 192.168.1.10 ("U7PG2,44d9e7fc1de2,v3.4.18.346 kernel: [6292100.680000] [wifi1] FWLOG: [616713] WAL_DBGID_TX_BA_SETUP ( 0x435940, 0x6, 0x0, 0x2, 0x0 ); line: 1, column: 2]>, :data=>"<4>Jul 14 09:52:00 192.168.1.10 (\"U7PG2,44d9e7fc1de2,v3.4.18.346 kernel: [6292100.680000] [wifi1] FWLOG: [616713] WAL_DBGID_TX_BA_SETUP ( 0x435940, 0x6, 0x0, 0x2, 0x0 )"}

However when I use a grok debugger:

https://grokdebug.herokuapp.com/

I get no reported errors?

My config for the logs are as follows:

input {
  udp {
    host => "xxx"
    port => 10514
    codec => "json"
    type => "rsyslog"
  }
}

filter {
  if [type] == "rsyslog" {
    grok {
      match => { "message" => "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
      add_field => [ "received_at", "%{@timestamp}" ]
      add_field => [ "received_from", "%{host}" ]
    }
    grok {
      match => { "message" => "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{GREEDYDATA:syslog_message}" }
      add_field => [ "received_at", "%{@timestamp}" ]
      add_field => [ "received_from", "%{host}" ]
    }
    syslog_pri { }
    date {
      match => [ "syslog_timestamp", "MMM  d HH:mm:ss", "MMM dd HH:mm:ss" ]
    }
    mutate {
    add_tag => [ "syslog_default_filter" ]
    }
  }
}

  if [message] =~ /(WAN_OUT|WAN_LOCAL|WAN_IN)/ {
    grok {
    match => { "message" => "\[%{DATA:firewall_rule}\]IN=%{DATA:firewall_in_interface} OUT=%{DATA:firewall_out_interface} MAC=%{DATA:firewall_src_mac} SRC=%{IPV4:firewall_src_ip} DST=%{IPV4:firewall_dst_ip} LEN=%{NUMBER:firewall_len} TOS=%{DATA:firewall_tos} PREC=%{DATA:firewall_prec} TTL=%{NUMBER:firewall_ttl} ID=%{NUMBER:firewall_id} DF PROTO=%{WORD:firewall_protocol} SPT=%{NUMBER:firewall_source_port} DPT=%{NUMBER:firewall_destination_port} WINDOW=%{NUMBER:firewall_window} RES=%{DATA:firewall_res} SYN URGP=%{NUMBER:firewall_urgp}" }
    match => { "message" => "\[%{DATA:firewall_rule}\]IN=%{DATA:firewall_in_interface} OUT=%{DATA:firewall_out_interface} MAC=%{DATA:firewall_src_mac} SRC=%{IPV4:firewall_src_ip} DST=%{IPV4:firewall_dst_ip} LEN=%{NUMBER:firewall_len} TOS=%{DATA:firewall_tos} PREC=%{DATA:firewall_prec} TTL=%{NUMBER:firewall_ttl} ID=%{NUMBER:firewall_id} DF PROTO=%{WORD:firewall_protocol} SPT=%{NUMBER:firewall_source_port} DPT=%{NUMBER:firewall_destination_port} LEN=%{NUMBER:firewall_len}" }
    match => { "message" => "\[%{DATA:firewall_rule}\]IN=%{DATA:firewall_in_interface} OUT=%{DATA:firewall_out_interface} MAC=%{DATA:firewall_src_mac} SRC=%{IPV4:firewall_src_ip} DST=%{IPV4:firewall_dst_ip} LEN=%{NUMBER:firewall_len} TOS=%{DATA:firewall_tos} TTL=%{NUMBER:firewall_ttl} ID=%{NUMBER:firewall_id} PROTO=%{WORD:firewall_protocol} SPT=%{NUMBER:firewall_source_port} DPT=%{NUMBER:firewall_destination_port} LEN=%{NUMBER:firewall_len2}" }
    }
    mutate {
      add_tag => [ "router_firewall_filter" ]
    }
  }
}

Any idea why Logstash doesn't like this? The filter in question is:

<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{GREEDYDATA:syslog_message}

<4>Jul 14 09:52:00 192.168.1.10 (\"U7PG2,44d9e7fc1de2,v3.4.18.346 kernel: [6292100.680000] [wifi1] FWLOG: [616713] WAL_DBGID_TX_BA_SETUP ( 0x435940, 0x6, 0x0, 0x2, 0x0 )

I am assuming it's a JSON error, any ideas how to prvent this.

The log is definitely not in JSON format, can I just drop the codec option?

The log is definitely not in JSON format, can I just drop the codec option?

If the incoming logs aren't JSON you shouldn't use the json codec, no.

Yeh, that was exactly it. I have dropped that and the errors are gone.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.