Hi,
Somehow using those parsers I am unable to index my data in elastic search any idea why?
[WARN ] 2020-03-18 18:48:35.675 [[main]>worker3] elasticsearch - Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"problox-2020.03.18", :_type=>"_doc", :routing=>nil}, #<LogStash::Event:0x2da962da>], :response=>{"index"=>{"_index"=>"problox-2020.03.18", "_type"=>"_doc", "_id"=>"-tPL7XABjMrTL_9AtrF2", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse field [timestamp] of type [date] in document with id '-tPL7XABjMrTL_9AtrF2'", "caused_by"=>{"type"=>"illegal_argument_exception", "reason"=>"failed to parse date field [2020-03-18 07:11:22.911641] with format [strict_date_optional_time||epoch_millis]", "caused_by"=>{"type"=>"date_time_parse_exception", "reason"=>"Failed to parse with all enclosed parsers"}}}}}}
````
here is my final file
````
input {
# SAP Trap Internal
# file {
# path => ["/var/log/data/saphp/honeysap-internal.log"]
# codec => json
# type => "saptrapinternal"
# }
# SAP Trap External
# file {
# path => ["/var/log/data/saphp/honeysap-external.log"]
# codec => json
# type => "saptrapexternal"
# }
#}
stdin {
codec => json
type => "saptrapexternal"
}
}
filter {
#SAP Internal
if [type] == "saptrapinternal"{
mutate {
remove_field => [ "path" ]
gsub => [ "[message]", "^[^\{]+", "" ]
rename => {
"source_ip" => "src_ip"
"target_port" => "dest_port"
"source_port" => "src_port"
}
}
json {
skip_on_invalid_json => true
source => "[message]"
remove_field => [ "[message]" ]
}
# Set @timestamp based on the timestamp field from the JSON payload. If successful remove the datetime field. Set the timezone as needed.
date {
match => [ "[timestamp]", "ISO8601" ]
remove_field => [ "[timestamp]" ]
timezone => "UTC"
}
}
############
if [type] == "saptrapexternal" {
mutate {
remove_field => [ "path" ]
gsub => [ "[message]", "^[^\{]+", "" ]
rename => {
"source_ip" => "src_ip"
"target_port" => "dest_port"
"source_port" => "src_port"
}
}
json {
skip_on_invalid_json => "true"
source => "[message]"
remove_field => [ "[message]" ]
}
# mutate {
# remove_field => [ "path" ]
# gsub => [ "[message]", "^[^\{]+", "" ]
# rename => {
# "source_ip" => "src_ip"
# "target_port" => "dest_port"
# "source_port" => "src_port"
# }
# }
# Set @timestamp based on the timestamp field from the JSON payload. If successful remove the datetime field. Set the timezone as needed.
date {
match => [ "[timestamp]", "ISO8601" ]
remove_field => [ "[timestamp]" ]
timezone => "UTC"
}
}
#### SAP Trap
if [type] == "saptrapinternal" {
mutate {
add_field => {
trap_type => "SAP-iNTERNAL"
}
}
}
if [type] == "saptrapexternal" {
mutate {
add_field => {
trap_type => "SAP-Router"
}
}
}
}
output {
elasticsearch {
hosts => ["https://127.0.0.1:16577"]
user => xxxx
password => XXXX
ssl => true
ssl_certificate_verification => false
template => "/etc/logstash/elasticsearch-template-es7x.json"
ilm_enabled => false
index => "problox-%{+YYYY.MM.dd}"
# document_type => "doc"
}
#if [type] == "Suricata" {
# file {
# file_mode => 0770
# path => "/data/suricata/log/suricata_ews.log"
# }
#}
# Debug output
#if [type] == "XYZ" {
# stdout {
# codec => rubydebug
# }
#}
# Debug output
#stdout {
# codec => rubydebug
#}
}
#output {stdout {}}
And my mutate filter is now working as well