Hello, my logstash configurations create a separate index for each log file, I want to add them all together in a single elasticsearch index, how can I do this?
Here's how my logstash.conf looks like:
input {
tcp {
port => 5000
codec => multiline {
pattern => "^%{TIMESTAMP_ISO8601} "
negate => true
what => previous
}
}
}
filter {
## Finished merchants
## Stock status (in, out)
grok{
match => [ "message", "'in_stock_items_count': %{NUMBER:instock_items:int}" ]
}
grok{
match => [ "message", "'out_stock_items_count': %{NUMBER:outofstock_items:int}" ]
}
## Scraped items, invalid items
grok{
match => [ "message", "'item_scraped_count': %{NUMBER:scraped_items:int}" ]
}
grok{
match => [ "message", "'invalid_items_count': %{NUMBER:invalid_items:int}" ]
}
## Zero Priced
grok{
match => [ "message", "'zero_price_items_count': %{NUMBER:zero_priced_items:int}" ]
}
## Item Duration
grok{
match => [ "message", "'iteration_duration': %{NUMBER:iteration_duration:float}" ]
}
## timestamp
grok{
match => [ "message", "%{DATE_EU:timestamp}" ]
}
date{
match => [ "timestamp", "yy-MM-dd" ]
target => "@timestamp"
}
}
output {
if "_grokparsefailure" not in [tags]{
elasticsearch {
hosts => "elasticsearch:9200"
}
}
}