I just converted a single pipeline to the mutli-pipeline as suggested here
When I stopped elasticsearch the file output stops working, the same problem as when it was a single pipeline. Am I missing something here?
Logstash Version 7.1.1
bash-4.2$ cat config/pipelines.yml
- pipeline.id: kube-audit-intake
queue.type: persisted
config.string: |
input {
http {
port => 8888
enable_metric => false
type => "kube-http-audit"
}
}
filter{
if [type] == "kube-http-audit" {
split {
# Webhook audit backend sends several events together with EventList
# split each event here.
field=>[items]
# We only need event subelement, remove others.
remove_field=>[headers, metadata, apiVersion, kind, "@version", host]
}
mutate {
rename => {items=>event}
add_tag => [ "${ENVIRONMENT}" ]
}
}
}
output { pipeline { send_to => [file, es] } }
- pipeline.id: es
queue.type: persisted
config.string: |
input { pipeline { address => es } }
output {
elasticsearch {
hosts => ["http://elasticsearch.kube-audit.svc.cluster.local:9200"]
index => "kubeaudit-%{+YYYY.MM.dd}"
#user => "elastic"
#password => "changeme"
id => "elastic"
}
}
- pipeline.id: file
queue.type: persisted
config.string: |
input { pipeline { address => file } }
output {
file {
path => "/var/log/kube-audit/audit_%{+YYYY-MM-dd-HH}.log"
create_if_deleted => true
id => "file"
}
}