I have been trying to get a CSV file parsed into logstash and it works fine. How ever I need to add either a new tag or a new field based on certain conditions.
I have validated that in the logs I have the text that matches the conditions (Below). but it doesn't add any tags nor fields based on my condition. Any help will be much appreciated. (Both the commented section of conditions and uncommitted simpler one doesn't seem to make a difference)
input
{
file {
path => "C:/ELK/Data_Landing/*.csv"
start_position => "beginning"
sincedb_path => "NUL"
}
}
filter
{
csv{
separator => ","
skip_header=> true
skip_empty_columns => true
autogenerate_column_names =>false
#31-01-2019 10:13
columns => [ "Severity" , "AlertReceived" , "Node" , "Application" , "MessageGroup" , "Object" , "TemplateName" , "ConditionMatched" , "MessageText" , "OpsAck" ]
}
#if [Node] =~ /^"tm"*/ or [Node] =~ /^"tq"*/ or [Node] =~ /^"bp"*/ or [Node] =~ /^"ob"*/ or [Node] =~ /^"le"*/
#{
# mutate { add_field => "ProductType" => "EXXXXX"}
#}
#else if [Node] =~ /^dv*/ AND [Application] != "XXXX"
#{
# mutate { add_field => "ProductType" => "SXXX"}
#}
# else
# {
# mutate { add_tag => "Undefined"}
# }
if [Node] == "X-XXXX-XXXX.XX.XXXX.XXXX.local"
{
mutate { add_field => "ProductType" => "SXXX"}
}
mutate{
gsub => ["AlertReceived", "/", "-"]
#gsub => ["AlertReceived", " ", ";"]
}
date {
match => ["AlertReceived", "dd-MM-yyyy HH:mm:ss"]
target => "@timestamp"
}
}
output
{
elasticsearch
{
hosts => "localhost:9200"
index => "ito"
document_type => "csv"
}
stdout { codec => rubydebug }
}
Sample Messages