I'm linking ELK to Snort.
there's been an error with the logstash
But I can't figure out a way to deal with this error.
Help me. I Use ELK 6.2.2, Centos 7, and snort 2.9.16.
configure file code!
I'm linking ELK to Snort.
there's been an error with the logstash
But I can't figure out a way to deal with this error.
Help me. I Use ELK 6.2.2, Centos 7, and snort 2.9.16.
configure file code!
Please do not post pictures of text, just post the text itself.
logstash is looking for a filter called data, which does not exist. Do you have a typo in the config for a date filter?
Here you go. Code!
input
{
file
{
path => "/var/log/snort/alert"
type => "snort_tcp"
start_position => beginning
ignore_older => 0
sincedb_path => "/dev/null"
}
file {
path => "/opt/lampp/logs/access_log"
type => "access_log"
start_position => beginning
ignore_older => 0
sincedb_path => "/dev/null"
}
}
filter{
if [type] == "snort_tcp" {
grok {
add_tag => ["IDS"]
match => ["message", "%{SNORTIME:snort_time}\s+[**]\s+[%{INT:ids_gid}:%{INT:ids_sid}:%{INT:ids_rev}]\s+[%{DATA:Attk_Category}]\s+[%{DATA:Attk_Level}]\s+%{DATA:Attk_Name}\s+[**]\s+[Classification:\s+%{DATA:ids_classification}]\s+[Priority:\s+%{INT:priority}]\s+{%{WORD:ids_proto}}\s+%{IP:src_ip}:%{INT:src_port}\s+->\s+%{IP:dst_ip}:%{INT:dst_port}"]
}
}
date {
match => ["snort_time", "MM/dd-HH:mm:ss.SSSSSS"]
}
geoip {
source => "src_ip"
target => "geoip_snort_src"
}
geoip {
source => "dst_ip"
target => "geoip_snort_dst"
}
if [priority] == "1" {
mutate {
add_field => {"severity" => "High & Medium & Low"}
}
}
if [priority] == "2" {
mutate {
add_field => {"severity" => "High & Medium"}
}
}
if [ptiority] == " 3" {
mutate {
add_field => {"severity" => "High & Low"}
}
}
if [priority] == "4" {
mutate {
add_field => {"severity" => "Medium & Low"}
}
}
if [priority] == "5" {
mutate {
add_field => {"severity" => "High"}
}
}
if [priority] == "6" {
mutate {
add_field => {"severity" => "Medium"}
}
}
if [priority] == "7" {
mutate {
add_field => {"severity" => "Low"}
}
}
if [priority] == "8" {
mutate {
add_field => {"severity" => "ETC"}
}
}
}
filter {
if [type] == "access_log" {
grok {
add_tag => ["httpd"]
match => ["message", "%{IPORHOST:clientip} %{USER:ident} %{USER:auth}[%{HTTPDATE:timestamp}] "(?:%{WORD:verb} %{NOTSPACE:request}(?:HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest})" %{NUMBER:response}(?:%{NUMBER:bytes}I-) %{QS:referrer} %{QS:agent}"]
}
data {
match => ["timestamp", "dd/MMM/YYYY:HH:mm:ss Z"]
}
mutate {
convert => {"bytes" => "integer"}
}
geoip {
source => "clientip"
}
mutate {
convert => {"response" => "integer"}
}
}
}
output
{
if [type] == "snort_tcp" {
elasticsearch {
hosts => ["localhost:9200"]
# manage_template => true
index => "logstash-snort"
}
}
if [type] == "access_log" {
elasticsearch {
hosts => ["localhost:9200"]
index => "logstash-httpd"
}
}
}
there's no filter plugin named data (as mentioned in your logstash error). this should be date, i believe
You could simplify the priority lookups using a translate filter.
This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.
© 2020. All Rights Reserved - Elasticsearch
Apache, Apache Lucene, Apache Hadoop, Hadoop, HDFS and the yellow elephant logo are trademarks of the Apache Software Foundation in the United States and/or other countries.