First, I am not good at English, so I am writing a question with the help of Google Translator. Please understand if the grammar is wrong or strange.
I'm trying to visualize IDS detections by linking ELK and suricata as a small task for school.
pc1 has ELK version 7.1.1 installed and pc2 has suricata 5.0.3 installed.
pc2 /var/log/suricata/eve.json is mounted on pc1 using nfs.
in the /usr/share/logstash/bin/ directory on pc1
Running the command sudo ./logstash -f /usr/share/bin/logstash/logstash.conf gives the following error:
[ERROR] 2020-07-01 10:04:20.718 [Ruby-0-Thread-5: :1] elasticsearch - Failed to install template. {:message=>"Got response code '400' contacting Elasticsearch at URL 'http://localhost:9200/_template/logstash'",
:class=>"LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError", :backtrace=>["/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.1.0-java/lib/logstash/outputs/elasticsearch/http_client/manticore_adapter.rb:80:in `perform_request'",
"/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.1.0-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:291:in `perform_request_to_url'",
"/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.1.0-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:278:in `block in perform_request'",
"/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.1.0-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:373:in `with_connection'",
"/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.1.0-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:277:in `perform_request'",
"/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.1.0-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:285:in `block in Pool'",
"/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.1.0-java/lib/logstash/outputs/elasticsearch/http_client.rb:352:in `template_put'",
"/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.1.0-java/lib/logstash/outputs/elasticsearch/http_client.rb:86:in `template_install'",
"/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.1.0-java/lib/logstash/outputs/elasticsearch/template_manager.rb:28:in `install'",
"/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.1.0-java/lib/logstash/outputs/elasticsearch/template_manager.rb:16:in `install_template'",
"/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.1.0-java/lib/logstash/outputs/elasticsearch/common.rb:130:in `install_template'",
"/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-10.1.0-java/lib/logstash/outputs/elasticsearch/common.rb:51:in `block in setup_after_successful_connection'"]}
elasticsearch is working fine.
The contents of curl -XGET "localhost:9200".
{
"name": "elk",
"cluster_name": "elasticsearch",
"cluster_uuid": "xb9K53kMTHKnmf0i_eXi4Q",
"version": {
"number": "7.1.1",
"build_flavor": "default",
"build_type": "deb",
"build_hash": "7a013de",
"build_date": "2019-05-23T14:04:00.380842Z",
"build_snapshot": false,
"lucene_version": "8.0.0",
"minimum_wire_compatibility_version": "6.8.0",
"minimum_index_compatibility_version": "6.0.0-beta1"
},
"tagline": "You Know, for Search"
}
I am not sure how to fix this.
The contents of logstash.conf.
input {
file {
path => ["/var/log/suricata/eve.json"]
sincedb_path => ["/var/lib/logstash/since.db"]
codec => json
type => "SuricataIDPS"
}
}
filter {
if [type] == "SuricataIDPS"{
date {
match => [ "timestamp", "ISO8601" ]
}
ruby {
code => "
if event.get('[event_type]') == 'fileinfo'
event.set('[fileinfo][type]', event.get('[fileinfo][magic]').to_s.split(',')[0])
end
"
}
ruby {
code => "
if event.get('[event_type]') == 'alert'
sp = event.get('[alter][signature]').to_s.split(' group ')
if (sp.length == 2) and /\A\d+\z/.match(sp[1])
event.set('[alert][signature]'. sp[0])
end
end
"
}
metrics {
meter => [ "eve_insert" ]
add_tag => "metric"
flush_interval => 30
}
}
if [http] {
useragent {
source => "[http][http_user_agent]"
target => "[http][user_agent]"
}
}
if [src_ip] {
geoip {
source => "src_ip"
target => "geoip"
#database => "/usr/share/GeoIp/GeoLite2-City.mmdb"
#add_field => [ "[geoip][cpprdonates]", "%{[geoip][longitude]}" ]
#add_field => [ "[geoip][cpprdonates]", "%{[geoip][latitude]}" ]
}
}
}
output {
if [event_type] and [event_type] != 'stats' {
elasticsearch {
hosts => "127.0.0.1"
index => "logstash-%{event_type}-%{+YYYY.MM.dd}"
template_overwrite => true
template => "/usr/share/logstash/bin/KTS6/es-template/elasticsearch6-template.json"
}
}
else {
elasticsearch {
hosts => "127.0.0.1"
index => "logstash-%{+YYYY.MM.dd}"
template_overwrite => true
template => "/usr/share/logstash/bin/KTS6/es-template/elasticsearch6-template.json"
}
}
}
and The contents of elasticsearch6-template.json
{
"template" : "logstash-*",
"version" : 60001,
"settings" : {
"number_of_replicas": 0,
"index.refresh_interval" : "5s"
},
"mappings" : {
"_default_" : {
"dynamic_templates" : [ {
"message_field" : {
"path_match" : "message",
"match_mapping_type" : "string",
"mapping" : {
"type" : "text",
"norms" : false
}
}
}, {
"string_fields" : {
"match" : "*",
"match_mapping_type" : "string",
"mapping" : {
"type" : "text", "norms" : false,
"fields" : {
"keyword" : { "type": "keyword", "ignore_above": 256 }
}
}
}
} ],
"properties" : {
"@timestamp": { "type": "date"},
"@version": { "type": "keyword"},
"geoip" : {
"dynamic": true,
"properties" : {
"ip": { "type": "ip" },
"location" : { "type" : "geo_point" },
"latitude" : { "type" : "half_float" },
"longitude" : { "type" : "half_float" }
}
}
}
}
}
}
Please help me solve this problem.
Thank you.