Logstash not creating index - json parse failure

Hello,

Suddenly logstash is not creating an index in ES. And I'm seeing errors like this in the debug output of logstash:

JSON parse failure. Falling back to plain-text {:error=>#<LogStash::Json::ParserError: Unexpected character ('-' (code 45)): Expected space separating root-level values
 at [Source: [B@267c9565; line: 1, column: 6]>, :data=>"2016-04-08 18:40:27 [I] processing report for logs.jokefire.com", :level=>:error, :file=>"logstash/codecs/json.rb", :l
ine=>"67", :method=>"legacy_parse"}

And I'm also seeing these errors as well:

Beats input: the pipeline is blocked, temporary refusing new connection. {:reconnect_backoff_sleep=>0.5, :level=>:warn, :file=>"logstash/inputs/beats.rb", :line=>"164", :method=>"run"}
Beats input: the pipeline is blocked, temporary refusing new connection. {:reconnect_backoff_sleep=>0.5, :level=>:warn, :file=>"logstash/inputs/beats.rb", :line=>"164", :method=>"run"}

The configs had been in place both for logstash and beats, and everything was working fine. But now I just went to restart logstash after fixing a disk space issue. And those errors started popping up and now it won't create it's index.

Here's my configs:

cat 10-logstash-input.conf
input {

     beats {
     port => 5000
     tags => "beats"
     codec => "json"
     ssl  => true
     ssl_certificate => "/etc/pki/tls/certs/logstash.crt"
     ssl_key => "/etc/pki/tls/private/logstash.key"
     congestion_threshold => 10
     }


     syslog {
        type => "syslog"
        port => "5514"

    }

     #redis {
     #host => "216.120.248.98"
     #type => "redis-input"
     #data_type => "list"
     #key => "logstash"
    #}
}

root@logs:/etc/logstash/conf.d] #cat 10-logstash-input.conf
input {

 beats {
 port => 5000
 tags => "beats"
 codec => "json"
 ssl  => true
 ssl_certificate => "/etc/pki/tls/certs/logstash.crt"
 ssl_key => "/etc/pki/tls/private/logstash.key"
 congestion_threshold => 10
 }


 syslog {
    type => "syslog"
    port => "5514"

}

 #redis {
 #host => "216.120.248.98"
 #type => "redis-input"
 #data_type => "list"
 #key => "logstash"
#}

}

#cat 30-logstash-output.conf
output {
  elasticsearch {
    hosts => ["xx.xx.xx.xx", "xx.xx.xx.xx", "xx.xx.xx.xx"]
    index => "logstash-%{+YYYY.MM.dd}"
    user => "ls_user"
    password => "secret"
  }


   # Alerting
   if [log_message] == "POSSIBLE BREAK-IN ATTEMPT!" {
       email {
          body => "Triggered in: %{message}"
          subject => "This is a Logstash alert for POSSIBLE BREAK-IN ATTEMPTS!"
          from => "logstash.alert@jokefire.com"
          to => "admin@example.com"
          via => "sendmail"
       }
    }

}

Filter file appears in the reply since it's too long for this post,

Any ideas on why this isn't working?

Thanks

#cat 20-filters.conf
filter {
if "_jsonparsefailure" in [tags] {
mutate{
remove_tag => ["_jsonparsefailure"]
}

    grok {
            match => { 'message' => '%{SYSLOGTIMESTAMP:event_timestamp} %{HOSTNAME:jf_host} %{SYSLOGPROG}: %{GREEDYDATA:log_message}'}
        }
    }
    if "_jsonparsefailure" in [tags] {
          mutate{
          remove_tag => ["_jsonparsefailure"]
     }
     grok {
            match => { 'message' => '%{SYSLOGTIMESTAMP:event_timestamp} %{HOSTNAME:jf_host} %{SYSLOGPROG}: %{GREEDYDATA:log_message}'}
     }
    }
    if "_grokparsefailure" in [tags] {
          mutate{
          remove_tag => ["_grokparsefailure"]
      }
      grok {
            match => { 'message' => '%{IP:ipaddress} - - \[%{HTTPDATE:http_date}] %{QS:http_message}%{GREEDYDATA:log_message}'}
            add_tag => ["http_query"]
        }
     }
    if "_grokparsefailure" in [tags] {
          mutate{
          remove_tag => ["_grokparsefailure"]
      }
      grok {
            match => { 'message' => '\[%{CISCO_REASON:date}%{NOTSPACE} %{YEAR:year}] %{NOTSPACE:loglevel} %{SYSLOG5424SD} %{NOTSPACE:apache_module} %{SYSLOG5424SD:client_ip_and_port} %{WORD:apache_code}: %{GREEDYDATA:message}'}
            add_tag => ["apache_log"]
        }
     }
    if "_grokparsefailure" in [tags] {
          mutate{
          remove_tag => ["_grokparsefailure"]
      }
      grok {
            match => { 'message' => 'File %{QS:data_dog_file}, line %{BASE10NUM:line_number}, in %{WORD:option}'}
            add_tag => ["dada_dog_file", "datadog"]
        }
    }
   if "_grokparsefailure" in [tags] {
          mutate{
          remove_tag => ["_grokparsefailure"]
      }
      grok {
            match => { 'message' => '%{NOTSPACE} %{SYSLOG5424SD} %{CISCO_REASON:log_message}'}
            add_tag => ["io_error"]
        }
    }
    if "_grokparsefailure" in [tags] {
          mutate{
          remove_tag => ["_grokparsefailure"]
      }
      grok {
            match => { 'message' => '%{WORD} \(%{JAVAFILE}\):'}
            add_tag => "Traceback"
        }
    }
    if "_grokparsefailure" in [tags] {
          mutate{
          remove_tag => ["_grokparsefailure"]
      }
      grok {
            match => { 'message' => '%{SYSLOGTIMESTAMP:event_timestamp} %{HOSTNAME:jf_host} %{SYSLOGPROG}: %{WORD:request}: invalid user %{USERNAME:user} %{NOTSPACE:auth_method}'}
            add_tag => ["invalid_login"]
        }
    }
    if "_grokparsefailure_sysloginput" in [tags] or " _grokparsefailure" in [tags] {
          mutate{
          remove_tag => ["_grokparsefailure", "_grokparsefailure_sysloginput"]
      }
      grok {
            match => { 'message' => '# semanage fcontext -a -t FILE_TYPE %{QS:log_file}'}
        }
    }
     if "_grokparsefailure_sysloginput, _grokparsefailure" in [tags] {
          mutate{
          remove_tag => ["_grokparsefailure_sysloginput", "_grokparsefailure"]
      }
      grok {
            match => { 'message' => '# semodule -i %{PROG:module}'}
        }
    }
## Add GeoIP to apache logs
if [type] == "apache_ref_access" or [type] == "apache_dev_access" or [type] == "apache_beta_access" {
     geoip {
      source => "clientip"
      target => "geoip"
      database => "/etc/logstash/data.d/GeoLiteCity.dat"
      add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ]
      add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}"  ]
    }
    mutate {
      convert => [ "[geoip][coordinates]", "float"]
    }
   }
     ## Tagging section begins here
    if [program] == "dd.forwarder" or [program] == "dd.dogstatsd"  {
      mutate{
       add_tag => "datadog"
     }
    }  else if [program] == "sshd" {
       mutate{
       add_tag => "ssh"
     }
    } else  if [program] == "puppet-master" or [program] == "puppet" or [program] == "puppet-agent" {
      mutate{
       add_tag => "puppet"
     }
    } else if [_type] == "security" {
     mutate{
      add_tag => "security"
     }
    } else if [program] == "NetworkManager" {
     mutate{
        add_tag => "NetworkManager"
     }
    } else if [log_message] == "password check failed for user (root)" or [log_message] == "Failed password for root"  {
      mutate{
       add_tag => "password failed for root"
      }
    } else if [program] == "systemd" {
     mutate{
       add_tag => "systemd"
     }
    } else if [program] == "run-parts(/etc/cron.daily)" or  [program] == "run-parts(/etc/cron.hourly)" or [program] == "cron" or [program] == "CROND" or [program] == "crontab" or [program] == "anacron" {
       mutate{
         add_tag => "cron"
      }
    } else if [program] == "dbus" {
      mutate{
        add_tag => "dbus"
      }
    } else if [program] == "dhclient" or [program] == "avahi-daemon" {
      mutate{
         add_tag => "DHCP"
      }
    } else if [type] == "security" {
      mutate{
      add_tag => "security"
     }
     } else if [program] == "mcollectived" {
         mutate{
         add_tag => "mcollectived"
       }
     } else if [type] == "postfix" {
         mutate{
         add_tag => "postfix"
       }
     }
}

Hi Guys,

I am getting below error
LogStash::Json::ParserError: Unexpected character ('.' (code 46)): Expected space separating root-level values

Below is my logstash.conf

input {
beats {
port => 5044
type => beats
codec => json_lines
}
gelf {
port =>5043
type => xxx
}
tcp {
port => 5045
codec => json_lines
type => xxx
}
}
filter {
if [type] == "beats" {
json {
source => "message"
}}

if [type] == "beats" {
geoip {
source => "remote_ip"
target => "geoip"
database => "/etc/logstash/GeoLiteCity.dat"
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ]
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ]
}
mutate {
convert => [ "[geoip][coordinates]", "float"]
}}
}
output {
if [type] == "beats" {
amazon_es {
hosts => [“xxx”]
region => “xx”
index => "%{[@metadata][beat]}-%{+YYYY.MM.dd}"
document_type => "%{[@metadata][type]}"
codec => "json"
}
}
if [type] == "ecs" {
amazon_es {
hosts => [“xx”]
region => “x”
index => "%{[tag]}-%{+YYYY.MM.dd}"
#index => "testing-%{+YYYY.MM.dd}"
document_type => "%{[type]}"
codec => "json"
}
}
if [type] == "ecstcp" {
amazon_es {
hosts => [“xx”]
region => “xx”
index => "%{[logstash_index]}-%{+YYYY.MM.dd}"
#index => "filetesting-%{+YYYY.MM.dd}"
document_type => "%{[type]}"
codec => "json"
}
}
}

Please suggest ASAP. unable to figure it out.