Hello i'm fairly new to logstash, so far i've managed to set up 2 servers, one is running the elastic stack and the other one is sending logs with beats to my elk stack. Now I have 2 different logs that i'm filtering both containing IPs, I managed to get one working with the geomap. Now my question is the following, Is it possible to setup a second geomap only containing the ips from my second log?
My logstash config files if those help:
input/output:
input {
beats {
port => 5044
ssl => false
}
}
output {
elasticsearch {
hosts => "localhost:9200"
manage_template => false
index => "%{[@metadata][beat]}-%{+YYYY.MM.dd}"
document_type => "%{[@metadata][type]}"
template => "/etc/logstash/templates/filebeat-index-template.json"
template_name => "filebeat"
}
}
geoip filter:
filter{
grok {
match => { "message" => " %{IP:client}"}
}
geoip{
source => "client"
target => "geoip"
database => "/etc/logstash/GeoLite2-City.mmdb"
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ]
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ]
}
mutate {
convert => [ "[geoip][coordinates]", "float" ]
}
cowrie filter:
filter {
if [fields][log_type] == "cowrie" {
grok {
match => { "message" => "%{GREEDYDATA:request}"}
}
json{
source => "request"
target => "parsedJson"
remove_field=>["request"]
}
mutate {
add_field => {
"src_ip" => "%{[parsedJson][src_ip]}"
"input" => "%{[parsedJson][input]}"
"eventid" => "%{[parsedJson][eventid]}"
"message" => "%{[parsedJson][message]}"
"system" => "%{[parsedJson][system]}"
}
}
}
}
second log filter:
filter {
if [fields][log_type] == "messages" {
grok{
match=>{"message"=>"%{SYSLOGTIMESTAMP:nf_timestamp}\s*%{HOSTNAME:nf_host}\s*kernel\S+\s*%{WORD:nf_action}?.*IN=%{USERNAME:nf_in_interface}?.*OUT=%{USERNAME:nf_out_interface}?.*MAC=%{COMMONMAC:nf_dst_mac}:%{COMMONMAC:nf_src_mac}?.*SRC=%{IPV4:nf_src_ip}.*DST=%{IPV4:nf_dst_ip}.*PROTO=%{WORD:nf_protocol}.?*SPT=%{INT:nf_src_port}?.*DPT=%{INT:nf_dst_port}?.*"}
add_field=>{"eventName"=>"groke"}
}
}
}
Thanks in advance