Please help me with below error and understanding why the messages are not being indexed

Hi Folks,

I am trying to ingest few messages but those are failing with below error. Can someone please help me rectifying it?

[WARN ] 2020-03-19 10:17:31.977 [[main]>worker2] elasticsearch - Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"problox-2020.03.19", :_type=>"doc", :routing=>nil}, #<LogStash::Event:0x52be2f5>], :response=>{"index"=>{"_index"=>"problox-2020.03.19", "_type"=>"doc", "_id"=>"OdQe8XABjMrTL_9ALhNV", "status"=>400, "error"=>{"type"=>"illegal_argument_exception", "reason"=>"Rejecting mapping update to [problox-2020.03.19] as the final mapping would have more than 1 type: [_doc, doc]"}}}}

Here is my stdout

{
       "request" => "AAAAQk5JX1JPVVRFAAIoAgAAAAEAAAAqAAAAFjIwMi4xNDkuMjIyLjE2NgAzMjk5AAAxMC4wLjAuMTAwAHNhcGRwMDEAAA==",
          "host" => "traplox",
          "data" => "",
     "trap_type" => "SAP-Router-Trap",
          "type" => "saptrapexternal",
      "@version" => "1",
      "src_port" => 49762,
         "event" => "Received packet",
       "service" => "saprouter",
          "tags" => [
        [0] "_jsonparsefailure"
    ],
       "session" => "9784cda8-2454-4873-bb90-099018a96f21",
    "@timestamp" => 2020-03-19T04:31:36.575Z,
        "src_ip" => "45.112.146.246",
     "dest_port" => 3299,
     "target_ip" => "0.0.0.0",
      "response" => ""
}

And here is my logstash config

input {
# SAP Trap Internal
#        file {
#   path => ["/var/log/data/saphp/honeysap-internal.log"]
#    codec => json
#    type => "saptrapinternal"
#        }

# SAP Trap External
#        file {
#    path => ["/var/log/data/saphp/honeysap-external.log"]
#    codec => json
#    type => "saptrapexternal"
#        }
#}
        stdin {
        codec => json
        type => "saptrapexternal"
        }
}
filter {
#SAP Internal
        if [type] == "saptrapinternal"{
    mutate {
    remove_field => [ "path" ]
    gsub => [ "[message]", "^[^\{]+", "" ]
#                rename => {
#                "source_ip" => "src_ip"
#                "target_port" => "dest_port"
#                "source_port" => "src_port"
#                }
  }
  json {
    skip_on_invalid_json => true
    source => "[message]"
    remove_field => [ "[message]" ]
        remove_field => [ "timestamp" ]
  }
 mutate {
#    remove_field => [ "path" ]
#    gsub => [ "[message]", "^[^\{]+", "" ]
                        rename => {
                "source_ip" => "src_ip"
                "target_port" => "dest_port"
                "source_port" => "src_port"
                }
  }

  # Set @timestamp based on the timestamp field from the JSON payload. If successful remove the datetime field. Set the timezone as needed.
  date {
    match => [ "[timestamp]", "ISO8601" ]
    remove_field => [ "[timestamp]" ]
    timezone => "UTC"
  }
}
############
        if [type] == "saptrapexternal" {
                  mutate {
    remove_field => [ "path" ]
    gsub => [ "[message]", "^[^\{]+", "" ]
#                        rename => {
#                "source_ip" => "src_ip"
#                "target_port" => "dest_port"
#                "source_port" => "src_port"
#                }
  }
  json {
    skip_on_invalid_json => "true"
    source => "[message]"
    remove_field => [ "[message]" ]
    remove_field => [ "timestamp" ]
  }
 mutate {
#    remove_field => [ "path" ]
#    gsub => [ "[message]", "^[^\{]+", "" ]
                        rename => {
                "source_ip" => "src_ip"
                "target_port" => "dest_port"
                "source_port" => "src_port"
                }
  }
# Set @timestamp based on the timestamp field from the JSON payload. If successful remove the datetime field. Set the timezone as needed.
  date {
    match => [ "[timestamp]", "ISO8601" ]
    remove_field => [ "[timestamp]" ]
    timezone => "UTC"
        }
}
#### SAP Trap
if [type] == "saptrapinternal" {
        mutate {
                add_field => {
                        trap_type => "SAP-Internal-Trap"
                        }
                }
                        }

if [type] == "saptrapexternal" {
        mutate {
               add_field => {
                        trap_type => "SAP-Router-Trap"
                        }
                }
                        }
}
output {
  elasticsearch {
    hosts => ["https://127.0.0.1:16577"]
    user => xxxx
    password => xxxxx
    ssl => true
    ssl_certificate_verification => false
    template => "/etc/logstash/elasticsearch-template-es7x.json"
    ilm_enabled => false
    index => "problox-%{+YYYY.MM.dd}"
    document_type => "doc"
  }
}

#output {stdout {}}

You seem to be using elastic stack version 7.x Note that document_type option has been deprecated and for version 7.x its value should be set as _doc or you can altogether remove this option and it will be taken by default as _doc.

Read more here.

I am sorry what went wrong in my logstash config?If you could suggest me pls?

In your logstash config, for the elasticsearch output plugin change document_type as below:

 elasticsearch {
    hosts => ["https://127.0.0.1:16577"]
    user => xxxx
    password => xxxxx
    ssl => true
    ssl_certificate_verification => false
    template => "/etc/logstash/elasticsearch-template-es7x.json"
    ilm_enabled => false
    index => "problox-%{+YYYY.MM.dd}"
    document_type => "_doc"
  }

The reason for this is that elasticsearch no more support mapping types and hence document_type is no more relevant. It should be set to default value _doc or you should remove the document_type param altogether.

I see; thanks for pointing that out let me see that.

Yep that did work!! Thanks for the help.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.