I have two filebeat inputs which have tags and fields.
In my pipeline.conf I used filter logs by theirs tags.
but when the time of index create logstash takes index name as a %{[fields][log_type]}-2020-10-07.
How can I solve this? Can I able to create two separate index?
here are my files.
filebeat.yml :
- type: log
enabled: true
paths:
- D:\Git\gbase.API\Logs\*.log
tags: ["gbaseapi"]
fields: {log_type: gbase}
- type: log
enabled: true
paths:
- D:\Git\finance.api\FinanceAPI\logs\*.log
tags: ["financeapi"]
fields: {log_type: finance}
multiline.pattern: '^[[:space:]]'
multiline.negate: false
multiline.match: after
mypipeline.conf :
input {
beats {
type=>"mytest"
port => 5044
}
}
filter{
if "gbaseapi" in [tags]
{
if [level] in [ "Error", "Fatal" ]
{
grok { match=> ["message","%{DATESTAMP:timestamp} \[%{WORD:processId}\] %{LOGLEVEL:level} %{USERNAME:logger} %{USER:user} %{IPV4:clientIp} %{URI:requestUrl} %{USER:method} %{GREEDYDATA:message}"] }
}
else
{
grok { match=> ["message","%{DATESTAMP:timestamp} \[%{WORD:processId}\] %{LOGLEVEL:level} %{USERNAME:logger} %{USER:user} %{IPV4:clientIp} %{GREEDYDATA:message}" ] }
}
mutate { gsub => ["message", "\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{4} ",""]}
mutate { gsub => ["message", "%{level}",""]}
mutate { gsub => ["message", "%{logger}",""]}
mutate { gsub => ["message", "%{clientIp}",""]}
}
if "financeapi" in [tags]
{
if [level] in [ "Error", "Fatal" ]
{
grok { match=> ["message","%{DATESTAMP:time} \[%{WORD:processId}\] %{LOGLEVEL:level} %{USERNAME:logger} %{USER:user} %{IPV4:clientIp} %{URI:requestUrl} %{USER:method} %{GREEDYDATA:message}"]}
}
else
{
grok { match=> ["message","%{DATESTAMP:time} \[%{WORD:processId}\] %{LOGLEVEL:level} %{USERNAME:logger} %{USER:user} %{IPV4:clientIp} %{GREEDYDATA:message}" ]}
}
mutate { gsub => ["message", "\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{4} ",""]}
mutate { gsub => ["message", "%{level}",""]}
mutate { gsub => ["message", "%{logger}",""]}
mutate { gsub => ["message", "%{clientIp}",""]}
}
date {
match => [ "time" , "dd/MMM/yyyy:HH:mm:ss Z" ]
target=> "@time"
}
}
output {
elasticsearch
{
hosts => ["http://localhost:9200"]
index => "%{[fields][log_type]}-%{+YYYY.MM.dd}"
user => "something"
password => "something"
}
stdout { codec => rubydebug }
}