Running the Elastic Stack on v6.3.2, recently got X-Pack, but am now having issues with authenticating logstash/filebeat with SSL. I've created a username and password for each of the services, but nothing is appearing in kibana. I'm not seeing any errors with Filebeat either whenever I run it in debug mode.
Elasticsearch.yml
path.data: /var/lib/elasticsearch
path.logs: /var/log/elasticsearch
network.host: 0.0.0.0
http.port: 9200
xpack.security.enabled: true
xpack.security.transport.ssl.enabled: true
xpack.security.transport.ssl.verification_mode: certificate
xpack.security.transport.ssl.keystore.path: /etc/elasticsearch/certs/elastic-certificates.p12
xpack.security.transport.ssl.truststore.path: /etc/elasticsearch/certs/elastic-certificates.p12
Kibana.yml
server.port: 5601
server.host: "0.0.0.0"
elasticsearch.username: "kibana"
elasticsearch.password: "test"
xpack.security.enabled: true
logstash.conf
input {
elasticsearch {
user => logstash_internal
password => test
}
beats {
port => 5044
host => ["vkd01.scw.local:5044"]
}
}
filter {
elasticsearch {
user => logstash_internal
password => test
}
if [fileset][module] == "nginx" {
if [fileset][name] == "access" {
grok {
match => { "message" => ["%{IPORHOST:[nginx][access][remote_ip]} - %{DATA:[nginx][access][user_name]} [%{HTTPDATE:[nginx][access][time]}] "%{WORD:[nginx][access][method]} %{DATA:[nginx][access][url]} HTTP/%{NUMBER:[nginx][access][http_version]}" %{NUMBER:[nginx][access][response_code]} %{NUMBER:[nginx][access][body_sent][bytes]} "%{DATA:[nginx][access][referrer]}" "%{DATA:[nginx][access][agent]}""] }
remove_field => "message"
}
mutate {
add_field => { "read_timestamp" => "%{@timestamp}" }
}
date {
match => [ "[nginx][access][time]", "dd/MMM/YYYY:H:m:s Z" ]
remove_field => "[nginx][access][time]"
}
useragent {
source => "[nginx][access][agent]"
target => "[nginx][access][user_agent]"
remove_field => "[nginx][access][agent]"
}
geoip {
source => "[nginx][access][remote_ip]"
target => "[nginx][access][geoip]"
}
}
else if [fileset][name] == "error" {
grok {
match => { "message" => ["%{DATA:[nginx][error][time]} [%{DATA:[nginx][error][level]}] %{NUMBER:[nginx][error][pid]}#%{NUMBER:[nginx][error][tid]}: (*%{NUMBER:[nginx][error][connection_id]} )?%{GREEDYDATA:[nginx][error][message]}"] }
remove_field => "message"
}
mutate {
rename => { "@timestamp" => "read_timestamp" }
}
date {
match => [ "[nginx][error][time]", "YYYY/MM/dd H:m:s" ]
remove_field => "[nginx][error][time]"
}
}
}
}
output {
elasticsearch {
hosts => localhost
user => logstash_internal
password => test
manage_template => false
index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}"
}
}
filebeat.yml
filebeat:
prospectors:
-
paths:
- /var/log/auth.log
- /var/log/syslog
- /opt/rails/farad/current/log/.log
# - /var/log/.log
document_type: syslog- paths: - /var/log/nginx/access.log fields: nginx: true fields_under_root: true document_type: nginx input_type: log
registry_file: /var/lib/filebeat/registry
output:
logstash:
hosts: ["elkd01.scw.local:5044"]
username: "filebeat_internal"
password: "test"
bulk_max_size: 1024tls: certificate_authorities: ["/etc/pki/tls/certs/logstash-forwarder.crt"]
shipper:
logging:
files:
rotateeverybytes: 10485760 # = 10MB
I have had filebeat communicating to logstash, and then logstash communicating the parsed information to Elasticsearch so that it could be viewed within Kibana. I'm not sure what I'm missing here, but I feel like it has to do with authentication (since it worked perfectly fine before).I've been following this guide.