Updated geoip

I configured the geoip in logstash. I have it updated.
Can you help me with a sample configuration?

A sample config for what sorry?

below my logstash configuration:

input {
tcp {
port => "5141"
codec => json
type => "syslog"
}
}

filter {
geoip {
source => "src"
target => "src_geoip"
}
}

output {
stdout { codec => rubydebug }
elasticsearch {
hosts => ["https://xxxx:9200","https://xxxx:9200"]
user => "elastic"
password => "xxxxxxx"
cacert => "/etc/logstash/certs/ca.crt"
index => "jstest-%{+YYYY.MM.dd}"
action => "index"
}
}

I want the Geoip to be updated.
How can I add it to my configuration?

You have it there, does that not work?

It works.
Here is the result in kibana:

{
  "_index": "jstest-2021.02.03",
  "_type": "_doc",
  "_id": "IIiuZ3cB-VKs2ad2qUFR",
  "_version": 1,
  "_score": null,
  "_source": {
    "logSourceIdentifier": "192.168.0.3",
    "hasIdentity": "false",
    "src_geoip": {},
    "src": "10.226.34.211",
    "syslog5424_pri": "01",
    "type": "Event",
    "startTimeISO": "2021-02-03T12:37:22.484+01:00",
    "storageTimeEpoch": "1612352242484",
    "srcPostNATPort": "0",
    "domainName": "Decathlon_N1",
    "credibility": "5",
    "isoTimeFormat": "yyyy-MM-dd'T'HH:mm:ss.SSSZ",
    "lowLevelCategory": "Firewall Permit",
    "payload": "<189>logver=604021723 timestamp=1612352241 tz=\"UTC+1:00\" devname=\"PARTNER_NTT_PARIS-FW1\" devid=\"FGT40FTK20028517\" vd=\"root\" date=2021-02-03 time=12:37:21 logid=\"0000000013\" type=\"traffic\" subtype=\"forward\" level=\"notice\" eventtime=1612352241845788391 tz=\"+0100\" srcip=10.226.34.211 srcport=25218 srcintf=\"To-DECATH\" srcintfrole=\"undefined\" dstip=192.168.56.114 dstport=80 dstintf=\"a\" dstintfrole=\"lan\" srccountry=\"Reserved\" dstcountry=\"Reserved\" sessionid=1299068879 proto=6 action=\"close\" policyid=22 policytype=\"policy\" poluuid=\"0e54417c-1222-51eb-0e8e-350c4986579d\" policyname=\"DECATHLON-1\" service=\"HTTP\" trandisp=\"noop\" duration=2 sentbyte=1861 rcvdbyte=1076 sentpkt=8 rcvdpkt=8 vpn=\"To-DECATH\" vpntype=\"ipsec-static\" appcat=\"unscanned\" masterdstmac=\"00:09:0f:09:2a:12\" dstmac=\"00:09:0f:09:2a:12\" dstserver=0\n",
    "@version": "1",
    "domainID": "4",
    "dst": "192.168.56.114",
    "highLevelCategory": "Access",
    "@timestamp": "2021-02-03T11:37:09.974Z",
    "eventCnt": "1",
    "category": "close",
    "name": "DefaultProfile",
    "BytesReceived": "1076",
    "protocolName": "tcp",
    "tags": [
      "_jsonparsefailure",
      "_geoip_lookup_failure"
    ],
    "dstPort": "80",
    "startTimeEpoch": "1612352242484",
    "Application Category": "unscanned",
    "relevance": "5",
    "srcPort": "25218",
    "logSource": "FortiGate @ 192.168.0.3",
    "srcPreNATPort": "0",
    "logSourceType": "Fortinet FortiGate Security Gateway",
    "port": 57282,
    "storageTimeISO": "2021-02-03T12:37:22.484+01:00",
    "syslog_index": "<01>",
    "host": "198.18.112.140",
    "dstPreNATPort": "0",
    "sev": "0",
    "eventName": "Forward Traffic",
    "dstPostNATPort": "0",
    "syslog_hostname": "hostname",
    "deploymentID": "5c15c102-a647-11ea-8226-00505601062b",
    "hasOffense": "false",
    "version": "1.0",
    "eventDescription": "Forward Traffic",
    "dstNetName": "Net-10-172-192.Net_192_168_0_0",
    "BytesSent": "1861",
    "protocolID": "6",
    "srcNetName": "Net-10-172-192.Net_10_0_0_0",
    "devTimeISO": "2021-02-03T12:37:21.000+01:00",
    "devTimeEpoch": "1612352241000",
    "logSourceGroup": "Other"
  },
  "fields": {
    "devTimeISO": [
      "2021-02-03T11:37:21.000Z"
    ],
    "@timestamp": [
      "2021-02-03T11:37:09.974Z"
    ],
    "storageTimeISO": [
      "2021-02-03T11:37:22.484Z"
    ],
    "startTimeISO": [
      "2021-02-03T11:37:22.484Z"
    ]
  },
  "sort": [
    1612352229974
  ]
}

I don't know why the tag is:
"_jsonparsefailure",
"_geoip_lookup_failure"

IP addresses starting with 10 are private IP addresses and can not be looked up using group as they are not public.

So what is the solution?

Private IP addresses correspond to different locations for every customer so there is no standard solution. You will need to create a mapping file and create a custom config to do this type of translation.

Can you help me to create this file?

I would recommend searching this forum as I believe this has been asked before.

Ok I'll look for it. Please don't hesitate to send me the link as soon as you find it.

I saw that someone used the database like below.
As soon as I add this setting, nothing works anymore:

input {
tcp {
port => "5141"
codec => json
type => "syslog"
}
}

filter {
grok {
match => { "message" => "%{SYSLOG5424PRI:syslog_index}-\s*%{SYSLOGHOST:syslog_hostname} %{GREEDYDATA:syslog_message}" }
}
json {
source => "syslog_message"
}
if [src] !~ /^(10.|172.1[6-9]|172.2[0-9]|172.3[0-1]|192.168|127.)/ {
geoip {
database => "/data/logstash/geo/GeoLite2.mmdb"
source => "src"
target => "src_geoip"
add_tag => [ "private-ip" ]
}
mutate {
remove_tag => [ "_geoip_lookup_failure" ]
remove_field => [ "message", "syslog_message" ]
}
}

output {
stdout { codec => rubydebug }
elasticsearch {
hosts => ["https://xxxx:9200", "https://xxxx:9200"]
user => "elastic"
password => "xxxxxx"
cacert => "/etc/logstash/certs/ca.crt"
index => "jstest-%{+YYYY.MM.dd}"
action => "index"
}
}

I need help with the parsing settings.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.