Can't visualize GeoIP locations on a coordinate map

A few months ago I wanted to implement Bro-IDS with Kibana.
Since I didn't know much about both, I followed this tutorial: https://medium.com/@utham/bro-elk-stack-integration-part-3-4fe02b5ff519

Everything worked, except for the coordinate map.
Most of the ip addresses are getting resolved to their properties (names, countries etc), so that part is working.

{
  "_index": "logstash-2018.02.28",
  "_type": "doc",
  "_id": "M0v_3WEBqb-C69Pe_xOR",
  "_score": 1,
  "_source": {
    "uid": "CtADaGKKVPJpVk8Mg",
    "orig_bytes": 931,
    "proto": "tcp",
    "missed_bytes": 0,
    "@timestamp": "2018-02-28T19:59:50.812Z",
    "id_orig_port": "65033",
    "conn_state": "SF",
    "type": "bro-conn_log",
    "id_orig_host": "[LOCAL IP ADDRESS]",
    "orig_ip_bytes": 16,
    "@version": "1",
    "resp_bytes": 10669,
    "id_resp_port": "443",
    "duration": 0.426291,
    "column21": "(empty)",
    "orig_pkts": 0,
    "history": "0",
    "host": "[HOST HIDDEN]",
    "resp_pkts": 1775,
    "service": "ssl",
    "message": "[MESSAGE HIDDEN]",
    "tags": [
      "_geoip_lookup_failure"
    ],
    "resp_ip_bytes": 13,
    "resp_geoip": {
      "postal_code": "94043",
      "country_code2": "US",
      "location": {
        "lat": 37.419200000000004,
        "lon": -122.0574
      },
      "country_name": "United States",
      "ip": "74.125.97.233",
      "city_name": "Mountain View",
      "dma_code": 807,
      "region_code": "CA",
      "country_code3": "US",
      "continent_code": "NA",
      "latitude": 37.419200000000004,
      "timezone": "America/Los_Angeles",
      "region_name": "California",
      "longitude": -122.0574
    },
    "path": "/usr/local/bro/logs/current/conn.log",
    "local_orig": "T",
    "tunnel_parents": "11353",
    "conn_state_full": "Normal SYN/FIN completion",
    "id_resp_host": "74.125.97.233",
    "ts": "1519847990.812600",
    "orig_geoip": {}
  },
  "fields": {
    "@timestamp": [
      "2018-02-28T19:59:50.812Z"
    ]
  }
}

As you can see, there is no field called geoip.location in the json, but the geoip.location option is the only one I can select.

I tried to change the index pattern, but it wouldn't allow me too.

What is going wrong?

I am using ELK 6.2.2

Any suggestions on what to do?

What is the mapping ?
I guess you didn't define it correctly.

This is the Logstash config file:

########################
# logstash Configuration Files - Bro IDS Logs
#
# For use with logstash, elasticsearch, and kibana to analyze logs
#
# Usage: Reference this config file for your instance of logstash to parse Bro conn logs
#
# Limitations: Standard Bro log delimiter is tab.
#
# Dependencies: Utilizing the logstash 'translate' filter requires having the logstash contrib plugins added, which are community supported and not part of the official release. Visit logstash.net to find out how to install these
#
#######################

input {
  file {
    type => "bro-conn_log"
    start_position => "end"
    sincedb_path => "/var/tmp/.bro_conn_sincedb"

    #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
    path => "/opt/nsm/bro/logs/current/conn.log"
  }
}

filter {

  #Let's get rid of those header lines; they begin with a hash
  if [message] =~ /^#/ {
    drop { }
  }

  #Now, using the csv filter, we can define the Bro log fields
  if [type] == "bro-conn_log" {
    csv {
      columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","proto","service","duration","orig_bytes","resp_bytes","conn_state","local_orig","missed_bytes","history","orig_pkts","orig_ip_bytes","resp_pkts","resp_ip_bytes","tunnel_parents"]

      #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, insert a literal <tab> in between the two quotes on your logstash system, use a text editor like nano that doesn't convert tabs to spaces.
      separator => "	"
    }

    #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
    date {
      match => [ "ts", "UNIX" ]
    }

    # add geoip attributes
    geoip {
      source => "id.orig_h"
      target => "orig_geoip"
    }
    geoip {
      source => "id.resp_h"
      target => "resp_geoip"
    }

    #The following makes use of the translate filter (logstash contrib) to convert conn_state into human text. Saves having to look up values for packet introspection
    translate {
      field => "conn_state"

      destination => "conn_state_full"

      dictionary => [
                    "S0", "Connection attempt seen, no reply",
                    "S1", "Connection established, not terminated",
                    "S2", "Connection established and close attempt by originator seen (but no reply from responder)",
                    "S3", "Connection established and close attempt by responder seen (but no reply from originator)",
                    "SF", "Normal SYN/FIN completion",
                    "REJ", "Connection attempt rejected",
                    "RSTO", "Connection established, originator aborted (sent a RST)",
                    "RSTR", "Established, responder aborted",
                    "RSTOS0", "Originator sent a SYN followed by a RST, we never saw a SYN-ACK from the responder",
                    "RSTRH", "Responder sent a SYN ACK followed by a RST, we never saw a SYN from the (purported) originator",
                    "SH", "Originator sent a SYN followed by a FIN, we never saw a SYN ACK from the responder (hence the connection was 'half' open)",
		                "SHR", "Responder sent a SYN ACK followed by a FIN, we never saw a SYN from the originator",
                    "OTH", "No SYN seen, just midstream traffic (a 'partial connection' that was not later closed)"
                    ]
    }

    mutate {
      convert => [ "id.orig_p", "integer" ]
      convert => [ "id.resp_p", "integer" ]
      convert => [ "orig_bytes", "integer" ]
      convert => [ "duration", "float" ]
      convert => [ "resp_bytes", "integer" ]
      convert => [ "missed_bytes", "integer" ]
      convert => [ "orig_pkts", "integer" ]
      convert => [ "orig_ip_bytes", "integer" ]
      convert => [ "resp_pkts", "integer" ]
      convert => [ "resp_ip_bytes", "integer" ]
      rename =>  [ "id.orig_h", "id_orig_host" ]
      rename =>  [ "id.orig_p", "id_orig_port" ]
      rename =>  [ "id.resp_h", "id_resp_host" ]
      rename =>  [ "id.resp_p", "id_resp_port" ]
    }
  }
}

output {
  # stdout { codec => rubydebug }
  elasticsearch { hosts => localhost }
}

When adding the map to the Kibana dashboard, it allows me to chose geoip.location as the data source for the location.

As you can see, the geo ip locations are loaded correctly, but Kibana thinks that they are not.

I can't select the resp_geoip.location as an source for the map, should I change the Logstash configuration from resp_geoip to geoip?

Yes. Or change the mapping

The mapping in Kibana was inserted automatically, I didn't do a thing there.
Should I change the logstash config file to rename resp_geoip to geoip?
Just making sure that I won't break things when I do so

Did you try going to Kibana -> Management -> Index Patterns -> Refresh Field List? If it's definitely a geoip type already then maybe Kibana just needs a refresh.

If it isn't, I would recommend explicitly setting the Elasticsearch mapping for that field to be geoip which should make sure it will always work in the future.

I meant change elasticsearch mapping.

If you run

GET index/_mapping

You'll see that your lat/Lon fields have been set as float instead of the parent field being geo_point.

1 Like

That is correct, it shows the mapping for every individual logstash item.

What I do notice is that in every mapping this section is present

"orig_geoip": {
            "properties": {
              "city_name": {
                "type": "text",
                "norms": false,
                "fields": {
                  "keyword": {
                    "type": "keyword",
                    "ignore_above": 256
                  }
                }
              },
              "continent_code": {
                "type": "text",
                "norms": false,
                "fields": {
                  "keyword": {
                    "type": "keyword",
                    "ignore_above": 256
                  }
                }
              },
              "country_code2": {
                "type": "text",
                "norms": false,
                "fields": {
                  "keyword": {
                    "type": "keyword",
                    "ignore_above": 256
                  }
                }
              },
              "country_code3": {
                "type": "text",
                "norms": false,
                "fields": {
                  "keyword": {
                    "type": "keyword",
                    "ignore_above": 256
                  }
                }
              },
              "country_name": {
                "type": "text",
                "norms": false,
                "fields": {
                  "keyword": {
                    "type": "keyword",
                    "ignore_above": 256
                  }
                }
              },
              "dma_code": {
                "type": "long"
              },
              "ip": {
                "type": "text",
                "norms": false,
                "fields": {
                  "keyword": {
                    "type": "keyword",
                    "ignore_above": 256
                  }
                }
              },
              "latitude": {
                "type": "float"
              },
              "location": {
                "properties": {
                  "lat": {
                    "type": "float"
                  },
                  "lon": {
                    "type": "float"
                  }
                }
              },
              "longitude": {
                "type": "float"
              },
              "postal_code": {
                "type": "text",
                "norms": false,
                "fields": {
                  "keyword": {
                    "type": "keyword",
                    "ignore_above": 256
                  }
                }
              },
              "region_code": {
                "type": "text",
                "norms": false,
                "fields": {
                  "keyword": {
                    "type": "keyword",
                    "ignore_above": 256
                  }
                }
              },
              "region_name": {
                "type": "text",
                "norms": false,
                "fields": {
                  "keyword": {
                    "type": "keyword",
                    "ignore_above": 256
                  }
                }
              },
              "timezone": {
                "type": "text",
                "norms": false,
                "fields": {
                  "keyword": {
                    "type": "keyword",
                    "ignore_above": 256
                  }
                }
              }
            }
          }

Now my guess is that the location properties need to be formatted as following

"geoip": {
            "dynamic": "true",
            "properties": {
              "ip": {
                "type": "ip"
              },
              "latitude": {
                "type": "half_float"
              },
              "location": {
                "type": "geo_point"
              },
              "longitude": {
                "type": "half_float"
              }
            }
          }

Especially the part in the middle, where the longitude and latitude are defined as half_float and location is defined as geo_point.

I have no idea how to change those mappings to be really honest. The ELK stach added these mappings by itself so I haven't touched a single word of it

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.