"Expected [START_OBJECT] but found [VALUE_STRING]"


(Joana Rita Pereira) #1

I'm really new to the logstash concept and it's use so the problems i'm dealing with, are probably really easy.
I have a folder with json files where logstash is getting the info and sending it to elasticsearch. At this point everything is working and appearing in kibana without any problems.

So, in order to enrich information from my json files, i need to add fields that i can retrieve through
So my goal would be to have an entry in elasticsearch with all the information from my json file, that is already working, and, with all the fields from the metricbeat query where the hostname ( in metricbeat) is identical to the asset ( in json file).

My current Configuration file in logstash:

 input {
file {
path => "/opt/vulnwhisperer/openvas/*.json"
type => json
codec => json
start_position => "beginning"
tags => [ "openvas_scan", "openvas" ]
} 
elasticsearch {
hosts => "10.XXX.XXX.XXX:9200"  
index => "metricbeat-*"
query => '"query": { "match": {"host.name" : "prom01"} }, "size": 1 }' 
size => 1
}
}
filter {
if "openvas_scan" in [tags] {
mutate {
replace => [ "message", "%{message}" ]
gsub => [
"message", "\|\|\|", " ",
"message", "\t\t", " ",
"message", "    ", " ",
"message", "   ", " ",
"message", "  ", " ",
"message", "nan", " ",
"message",'\n',''
  ]
}
grok {
    match => { "path" => "openvas_scan_%{DATA:scan_id}_%{INT:last_updated}.json$" }
    tag_on_failure => []
}
mutate {
  add_field => { "risk_score" => "%{cvss}" }
}
if [risk] == "1" {
mutate { add_field => { "risk_number" => 0 }}
mutate { replace => { "risk" => "info" }}
}
if [risk] == "2" {
  mutate { add_field => { "risk_number" => 1 }}
  mutate { replace => { "risk" => "low" }}
}
if [risk] == "3" {
  mutate { add_field => { "risk_number" => 2 }}
  mutate { replace => { "risk" => "medium" }}
}
if [risk] == "4" {
  mutate { add_field => { "risk_number" => 3 }}
   mutate { replace => { "risk" => "high" }}
}
if [risk] == "5" {
  mutate { add_field => { "risk_number" => 4 }}
  mutate { replace => { "risk" => "critical" }}
}
mutate {
  remove_field => "message"
}
if [first_time_detected] {
  date {
    match => [ "first_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'"        ]
    target => "first_time_detected"
  }
}
if [first_time_tested] {
  date {
    match => [ "first_time_tested", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
    target => "first_time_tested"
  }
}
 if [last_time_detected] {
  date {
    match => [ "last_time_detected", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
    target => "last_time_detected"
  }
}
if [last_time_tested] {
  date {
    match => [ "last_time_tested", "dd MMM yyyy HH:mma 'GMT'ZZ", "dd MMM yyyy HH:mma 'GMT'" ]
    target => "last_time_tested"
  }
}
 date {
  match => [ "last_updated", "UNIX" ]
  target => "@timestamp"
  remove_field => "last_updated"
}
mutate {
  convert => { "plugin_id" => "integer"}
  convert => { "id" => "integer"}
  convert => { "risk_number" => "integer"}
  convert => { "risk_score" => "float"}
  convert => { "total_times_detected" => "integer"}
  convert => { "cvss_temporal" => "float"}
  convert => { "cvss" => "float"}
    }
    if [risk_score] == 0 {
      mutate {
    add_field => { "risk_score_name" => "info" }
  }
}
if [risk_score] > 0 and [risk_score] < 3 {
  mutate {
    add_field => { "risk_score_name" => "low" }
  }
}
if [risk_score] >= 3 and [risk_score] < 6 {
  mutate {
    add_field => { "risk_score_name" => "medium" }
  }
 }
 if [risk_score] >=6 and [risk_score] < 9 {
  mutate {
    add_field => { "risk_score_name" => "high" }
  }
}
if [risk_score] >= 9 {
  mutate {
    add_field => { "risk_score_name" => "critical" }
  }
}
# Add your critical assets by subnet or by hostname. Comment this field out if you don't want to tag        any, but the asset panel will break.
if [asset] =~ "^10\.0\.100\." {
  mutate {
    add_tag => [ "critical_asset" ]
  }
}
  }
}
output {
  if "openvas" in [tags] {
stdout { codec => rubydebug }
elasticsearch {
  hosts => [ "10.XXX.XXX.XXX:9200" ]
  index => "logstash-vulnwhisperer-%{+YYYY.MM}"
}
  }
}`

And the error i'm obtaining in logs is:

[2018-12-05T11:55:35,964][ERROR][logstash.pipeline        ] A plugin had an unrecoverable error. Will    restart this plugin.
  Pipeline_id:main
 Plugin: <LogStash::Inputs::Elasticsearch index=>"metricbeat-*",    id=>"c336807420ba65d2db32de12fdba83899fc2634f351021dbade1b1703d4a3053", size=>1, hosts=>    ["10.XXX.XXX.XXX:9200"], query=>"\"query\": { \"match\": {\"host.name\" : \"prom01\"} }, \"size\": 1 }",     enable_metric=>true, codec=><LogStash::Codecs::JSON id=>"json_9f367b7a-c9dd-47da-9114-679b1a3875f7", enable_metric=>true, charset=>"UTF-8">, scroll=>"1m", docinfo=>false, docinfo_target=>"@metadata", docinfo_fields=>["_index", "_type", "_id"], ssl=>false>
 Error: [400] {"error":{"root_cause":[{"type":"parsing_exception","reason":"Expected [START_OBJECT]     but found [VALUE_STRING]","line":1,"col":1}],"type":"parsing_exception","reason":"Expected        [START_OBJECT] but found [VALUE_STRING]","line":1,"col":1},"status":400}
  Exception: Elasticsearch::Transport::Transport::Errors::BadRequest
  Stack: /usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/elasticsearch-transport-5.0.5/lib    /elasticsearch/transport/transport/base.rb:202:in `__raise_transport_error'
/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/elasticsearch-transport-5.0.5/lib/elasticsearch    /transport/transport/base.rb:319:in `perform_request'
/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/elasticsearch-transport-5.0.5/lib/elasticsearch    /transport/transport/http/faraday.rb:20:in `perform_request'
/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/elasticsearch-transport-5.0.5/lib/elasticsearch/transport/client.rb:131:in `perform_request'
/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/elasticsearch-api-5.0.5/lib/elasticsearch    /api/actions/search.rb:183:in `search'
/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/logstash-input-elasticsearch-4.2.1/lib/logstash    /inputs/elasticsearch.rb:200:in `do_run'
/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/logstash-input-elasticsearch-4.2.1/lib/logstash    /inputs/elasticsearch.rb:188:in `run'
/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:409:in `inputworker'
/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:403:in `block in start_input'