Hi,
I am trying to have 2 outputs in my config file. One elastic search and other csv. But my csv file is not getting generated.
Below is my conf file -
input{
kafka{
bootstrap_servers => "10.100.208.97:9092"
topics => ['logstash']
decorate_events => true
}
}
filter{
grok{
match => ['message', '%{SYSLOGTIMESTAMP:@timestamp} %{DATA:Arch} %{DATA:Server}: %{DATA:Junk} %{WORD:Info} %{GREEDYDATA:log_message}']
break_on_match => false
}
grok{
match => ['message', '..Workaround..']
add_tag => 'workaround'
tag_on_failure => []
break_on_match => true
}
grok{
match => ['message', '..ZTR..']
add_tag => 'ztr'
tag_on_failure => []
break_on_match => true
}
}
output{
elasticsearch{
hosts => ['localhost:9200']
index => "logs"
user => logstash_internal_logs
password => changeme
}
if [tags] == "ztr"{
csv {
fields => ["message"]
path => "/home/elk/ztr.csv"
}
}
stdout{ codec => rubydebug}
}