My goal is to be able to geolocate on a kibana map the connections that interest me.
The problem is that the generated indices do not create the correct type of data for kibana to draw. It would be Geopoints.
The data way would be
Firewall logs –> logstash (filter)-> elasticsearch(index-patern) <-kibana (map)
Logstash (Basically - sort data into fields for later use and use geoip when it's a public ip.)
input {
# Firewall
udp {
port => 5000
type => "logs"
tags => ["mikrotik","firewall"]
id => "firewall"
}
}
filter {
if "firewall" in [message] {
grok {
patterns_dir => ["/etc/logstash/conf.d/patterns/"]
match => { "message" => "%{MIKROTIKFIREWALL}"}
}
# src_ip first check if clientip field even exists
if [src_ip] {
# then check if it's a internal IP space and if it is add the internalIP tag
if [src_ip] !~ /localhost|\-/ {
cidr {
add_tag => [ "src_internalIP" ]
address => [ "%{src_ip}" ]
network => [ "192.168.0.0/8", "127.0.0.1" , "255.255.255.255", "0.0.0.0", "239.255.255.250", "172.16.0.0/12", "10.0.0.0/8", "224.0.0.0/4", "240.0.0.0/4"]
}
}
# don't run geoip if it's internalIP or localhost (aka only external IPs)
if "src_internalIP" not in [tags] and [src_ip] !~ /localhost|\-/ {
geoip {
source => "src_ip"
target => "src_geoip"
}
}
}
# dst_ip first check if clientip field even exists
if [dst_ip] {
# then check if it's in the 192.168.x.x space and if it is add the internalIP tag
if [dst_ip] !~ /localhost|\-/ {
cidr {
add_tag => [ "dst_internalIP" ]
address => [ "%{dst_ip}" ]
network => [ "192.168.0.0/8", "127.0.0.1" , "255.255.255.255", "0.0.0.0", "239.255.255.250", "172.16.0.0/12", "10.0.0.0/8", "224.0.0.0/4", "240.0.0.0/4"]
}
}
# don't run geoip if it's internalIP or localhost (aka only external IPs)
if "dst_internalIP" not in [tags] and [dst_ip] !~ /localhost|\-/ {
geoip {
source => "dst_ip"
target => "dst_geoip"
}
}
}
}
}
output {
if "firewall" in [message] {
elasticsearch {
hosts => ["https://elastic-vm-1.viten.net:9200"]
cacert => '/etc/logstash/certs/ca/ca.crt'
user => 'logstash_internal'
password => 'B3watermyfriend.'
manage_template => true
# template => "/path/to/logstash/logstash-apache.json"
template_name => "connections"
ilm_rollover_alias => "connection-%{+YYYY.MM.dd}"
ilm_pattern => "000001"
ilm_policy => "prso_30"
#index => "connection-%{+YYYY.MM.dd}"
action => "create"
}
}
}
Elasticsearch
index templates
PUT _index_template/connections
{
"version": 2,
"template": {
"settings": {
"index": {
"number_of_replicas": "1"
}
},
"mappings": {
"properties": {
"acction": {
"type": "keyword"
},
"dst_ip": {
"type": "ip"
},
"dst_port": {
"type": "integer"
},
"dst_zone": {
"type": "keyword"
},
"fw_chain": {
"eager_global_ordinals": false,
"norms": false,
"index": true,
"store": false,
"type": "keyword",
"split_queries_on_whitespace": false,
"index_options": "freqs",
"doc_values": true
},
"host.ip": {
"type": "ip"
},
"infrastructure": {
"type": "keyword"
},
"length": {
"type": "integer"
},
"loglevel": {
"eager_global_ordinals": false,
"norms": false,
"index": true,
"store": false,
"type": "keyword",
"split_queries_on_whitespace": false,
"index_options": "freqs",
"doc_values": true
},
"message": {
"type": "text"
},
"nat_type": {
"type": "keyword"
},
"natdst_ip": {
"type": "ip"
},
"natdst_port": {
"type": "integer"
},
"natsrc_ip": {
"type": "ip"
},
"natsrc_port": {
"type": "integer"
},
"proto": {
"eager_global_ordinals": false,
"norms": false,
"index": true,
"store": false,
"type": "keyword",
"split_queries_on_whitespace": false,
"index_options": "freqs",
"doc_values": true
},
"src_ip": {
"type": "ip"
},
"src_mac": {
"eager_global_ordinals": false,
"norms": false,
"index": true,
"store": false,
"type": "keyword",
"split_queries_on_whitespace": false,
"index_options": "freqs",
"doc_values": true
},
"src_port": {
"type": "integer"
},
"src_zone": {
"type": "keyword"
},
"tcp_flags": {
"type": "text"
}
}
},
"aliases": {
"conexiones": {}
}
},
"index_patterns": [
"conn*"
],
"data_stream": {
"hidden": false,
"allow_custom_routing": false
},
"composed_of": [
"**dst_geoip**",
"**src_geoip**",
"registros-settings"
]
}
Use component templates
dst_geoip
{
"properties": {
"geo": {
"type": "object",
"properties": {
"region_name": {
"type": "keyword"
},
"city_name": {
"type": "keyword"
},
"region_iso_code": {
"type": "integer"
},
"timezone": {
"type": "text"
},
"latitude": {
"type": "float"
},
"country_name": {
"type": "keyword"
},
"continent_code": {
"type": "byte"
},
"location": {
"type": "geo_point"
},
"country_iso_code": {
"type": "integer"
},
"postal_code": {
"type": "integer"
},
"longitude": {
"type": "float"
}
}
},
"ip": {
"index": true,
"store": false,
"type": "ip",
"doc_values": true
},
"mmdb": {
"type": "object",
"properties": {
"dma_code": {
"type": "integer"
}
}
}
}
}
src_geoip
{
"properties": {
"geo": {
"type": "object",
"properties": {
"region_name": {
"type": "keyword"
},
"city_name": {
"type": "keyword"
},
"region_iso_code": {
"type": "integer"
},
"timezone": {
"type": "text"
},
"latitude": {
"type": "float"
},
"country_name": {
"type": "keyword"
},
"continent_code": {
"type": "byte"
},
"location": {
"type": "geo_point"
},
"country_iso_code": {
"type": "integer"
},
"postal_code": {
"type": "integer"
},
"longitude": {
"type": "float"
}
}
},
"ip": {
"index": true,
"store": false,
"type": "ip",
"doc_values": true
},
"mmdb": {
"type": "object",
"properties": {
"dma_code": {
"type": "integer"
}
}
}
}
}
Some time ago the mistake was that the data type had to be of a specific mathematical type. Latitude and longitude - both floats or long integers, I can't remember.
I have tried to change them from the mathematical type to the pair that make up the geo point type. Or directly let the logstash transformer treat it as a geo point... I've tried it in several ways months ago because I got desperate and left it for a while.
Does anyone have clear what would be the most normal use case and easy to use? - some example ...
thank you so much for reading this far