Logstash cannot Index to Elasticsearch for http_poller

I am running ELK 6.5.4 and trying to use the http_poller to poll data into ELK but my logstash is unable to index to Elasticsearch and throws error message ]logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>"%{[id]}", :_index=>"index-2019.02.11", :_type=>"bikestatus", :routing=>nil}, #LogStash::Event:0x1ac6e8e1], :response=>{"index"=>{"_index"=>"bikestatus-dc-2019.02.11", "_type"=>"bikestatus", "_id"=>"%{[id]}", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse", "caused_by"=>{"type"=>"illegal_argument_exception", "reason"=>"Could not convert [location.index] to boolean", "caused_by"=>{"type"=>"illegal_argument_exception", "reason"=>"Failed to parse value [not_analyzed] as only [true] or [false] are allowed."}}}}}}

Here's my conf

input {
  ## pull data from Capital Bikeshare every 60 seconds
  http_poller {
    urls => {
      bikeshare_dc => "https://www.capitalbikeshare.com/data/stations/bikeStations.xml"
    }
    request_timeout => 30
   #interval => 30
    schedule => { every => "30s" }
    codec => "plain"
    metadata_target => "http_poller_metadata"
  }
}

filter {

  ## interpret the message payload as XML
  xml {
    source => "message"
    target => "parsed"
  }

  ## Split out each "station" record in the XML into a different event
  #split {
   # field => "[parsed][station]"
    #add_field => {
      ## generate a unique id for the station # X the sensor time to prevent duplicates
     # id                  => "%{[parsed][station][id]}-%{[parsed][station][lastCommWithServer]}"
      #stationName                => "%{[parsed][station][name]}"
      #lastCommWithServer  => "%{[parsed][station][lastCommWithServer]}"
      #lat                 => "%{[parsed][station][lat]}"
      #long                => "%{[parsed][station][long]}"
      #numBikes             => "%{[parsed][station][nbBikes]}"
      #numEmptyDocks        => "%{[parsed][station][nbEmptyDocks]}"
 #   }
#  }


  mutate {
    ## Convert the numeric fileds to the appropriate data type from strings
    convert => {
      "numBikes"       => "integer"
      "numEmptyDocks"  => "integer"
      "lat"           => "float"
      "long"          => "float"
    }
    ## put the geospatial value in the correct [ longitude, latitude ] format
    add_field => { "location" => [ "%{[long]}", "%{[lat]}" ]}
    ## get rid of the extra fields we don't need
    remove_field => [ "message", "parsed", "lat", "long", "host", "http_poller_metadata"]
  }

## use the embedded Unix timestamp 
 date {
    match => ["lastCommWithServer", "UNIX_MS"]
    remove_field => ["lastCommWithServer"]
  }

}

output {
  # stdout { codec => rubydebug }
  stdout { codec => dots }
  elasticsearch {
      hosts => "localhost:9200"
      template => "/etc/logstash/template/bikestatus.json"
      template_name => "bikestatus"
      template_overwrite => true
    ## use a time aware index name
    index => "bikestatus-dc-%{+YYYY.MM.dd}"
   #protocol => "http"
    ## not super important, but it makes sense to override the default which is "log"
    document_type => "bikestatus"
    ## use the generated id as the document id to prevent duplicates
	document_id => "%{[id]}"
  }
}

OK, so elasticsearch seems to think location.index is mapped as a boolean. Can you check the mapping that exists on the index? Use something like

GET /bikestatus-dc-2019.02.11/_mapping/

Then you need to figure out where that field is coming from. The configuration you included does not appear to add it.

Here's the mapping

{
  "mapping": {
    "_default_": {
      "_all": {
        "enabled": false
      },
      "dynamic_templates": [
        {
          "string_fields": {
            "match": "*",
            "match_mapping_type": "string",
            "mapping": {
              "analyzer": "keyword",
              "doc_values": true,
              "ignore_above": 256,
              "index": "not_analyzed",
              "omit_norms": true,
              "type": "keyword"
            }
          }
        }
      ],
      "properties": {
        "@timestamp": {
          "type": "date",
          "format": "dateOptionalTime"
        },
        "numBikes": {
          "type": "integer"
        },
        "numEmptyDocks": {
          "type": "integer"
        }
      }
    }
  }
}

The template does have the location type "geo_point" and in the conf i do have add_field => { "location" => [ "%{[long]}", "%{[lat]}" ]}

Interesting that the two strings that appear in the error message also appear in the mapping.

Anyways, what you have here is fundamentally an elasticsearch question, not a logstash question. There should be an additional error message in the elasticsearch log when logstash logs an error. I would add that to the question and move it to the elasticsearch forum.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.