I'm trying to use a custom MaxMind private IP .mmdb geoip2 database with Logstash. My config is pretty simple, and I'm trying to decode netflow:
filter{
geoip {
database => "/usr/share/logstash/geolite2/test.mmdb"
source => "ipv4_src_addr"
}
}
{
"_index": "netflow-2018.11.13",
"_type": "doc",
"_id": "UZNKDWcBb3n6NizX4hSC",
"_version": 1,
"_score": null,
"_source": {
"@version": "1",
"@timestamp": "2018-11-13T13:38:22.000Z",
"netflow": {
"output_snmp": 17,
"tcp_flags": 24,
"l4_src_port": 48908,
"in_bytes": 685,
"ipv4_dst_addr": "69.26.161.7",
"flow_sampler_id": 0,
"in_pkts": 5,
"last_switched": "2018-11-13T13:38:22.999Z",
"input_snmp": 7,
"src_tos": 0,
"src_mask": 23,
"src_as": 0,
"flowset_id": 257,
"version": 9,
"protocol": 6,
"dst_mask": 0,
"flow_seq_num": 205368180,
"dst_as": 0,
"first_switched": "2018-11-13T13:37:22.999Z",
"ipv4_next_hop": "10.230.2.102",
"ipv4_src_addr": "10.50.172.174",
"l4_dst_port": 444
},
I can see it being read OK in the filebeat logstash docker logs:
"message": "[2018-11-13T11:27:14,041][INFO ][logstash.filters.geoip ] Using geoip database {:path=>\"/usr/share/logstash/geolite2/test.mmdb\"}"
But then everything is being tagged with _geoip_lookup_failure.
I know the DB is valid as I can query using the python API:
import geoip2.database
reader = geoip2.database.Reader('/Users/me/Downloads/test.mmdb')
response = reader.city('10.50.172.174')
response.location.latitude
51.445324
response.location.longitude
-0.443962
response
geoip2.models.City({'city': {'names': {'en': ' FELTHAM'}}, 'continent': {'code': 'something'}, 'country': {'iso_code': ' UNITED KINGDOM', 'names': {'en': ' UNITED KINGDOM'}}, 'location': {'accuracy_radius': 1000, 'latitude': 51.445324, 'longitude': -0.443962}, 'traits': {'ip_address': '10.50.172.174'}}, ['en'])