Hello! I'm running Kibana 6.2.1 on a Centos 7 server. I'm able to receive logs from this same server but not from my other servers, can you guys tell me what i'm missing? I'm sharing my config files and logs. Thanks in advance!
/etc/nginx/conf.d/kibana.conf
server {
listen 80 default_server; # Listen on port 80
server_name myaddress.com;
auth_basic "Restricted Access";
auth_basic_user_file /etc/nginx/.kibana-user;
location / {
proxy_pass http://localhost:5601;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
}
}
/etc/logstash/conf.d/syslog-filter.conf
filter {
if [type] == "syslog" {
grok {
match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
add_field => [ "received_at", "%{@timestamp}" ]
add_field => [ "received_from", "%{host}" ]
}
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
}
}
}
/etc/logstash/conf.d/logstash.conf
input {
beats {
port => 5044
}
}
output {
elasticsearch {
hosts => "localhost:9200"
manage_template => false
index => "%{[@metadata][beat]}-%{+YYYY.MM.dd}"
document_type => "%{[@metadata][type]}"
}
}
/etc/kibana/kibana.yml
# Kibana is served by a back end server. This setting specifies the port to use.
server.port: 5601
server.host: "localhost"
# The URL of the Elasticsearch instance to use for all your queries.
elasticsearch.url: "http://localhost:9200"
/etc/filebeat/filebeat.yml
#=========================== Filebeat prospectors =============================
filebeat.prospectors:
# Each - is a prospector. Most options can be set at the prospector level, so
# you can use different prospectors for various configurations.
# Below are the prospector specific configurations.
- type: log
# Change to true to enable this prospector configuration.
enabled: false
# Paths that should be crawled and fetched. Glob based paths.
paths:
- /var/log/*.log
#- c:\programdata\elasticsearch\logs\*
#============================= Filebeat modules ===============================
filebeat.config.modules:
# Glob pattern for configuration loading
path: ${path.config}/modules.d/*.yml
# Set to true to enable config reloading
reload.enabled: false
# Period on which files under path should be checked for changes
#reload.period: 10s
#==================== Elasticsearch template setting ==========================
setup.template.settings:
index.number_of_shards: 3
#index.codec: best_compression
#_source.enabled: false
setup.kibana:
output.logstash:
# The Logstash hosts
hosts: ["myaddress.com:5044"]
Now I started receiving this (/var/log/filebeat/filebeat) from kibana server
2018-02-20T15:43:00.563-0500 INFO [monitoring] log/log.go:124 Non-zero metrics in the last 30s {"monitoring": {"metrics": {"beat":{"cpu":{"system":{"ticks":340,"time":344},"total":{"ticks":1920,"time":1933,"value":1920},"user":{"ticks":1580,"time":1589}},"info":{"ephemeral_id":"30bb4383-57cc-4745-8b86-94e35cd27edd","uptime":{"ms":1530078}},"memstats":{"gc_next":8298816,"memory_alloc":4318224,"memory_total":267606536}},"filebeat":{"harvester":{"open_files":1,"running":1}},"libbeat":{"config":{"module":{"running":1}},"output":{"events":{"batches":4,"failed":23,"total":23},"read":{"errors":1},"write":{"bytes":2919}},"pipeline":{"clients":4,"events":{"active":30,"retry":24}}},"registrar":{"states":{"current":4}},"system":{"load":{"1":1.5,"15":0.93,"5":1.18,"norm":{"1":0.1875,"15":0.1163,"5":0.1475}}}}}}
2018-02-20T15:43:05.306-0500 ERROR logstash/async.go:235 Failed to publish events caused by: read tcp 192.168.82.98:53384->192.168.82.98:5044: i/o timeout
2018-02-20T15:43:05.306-0500 ERROR logstash/async.go:235 Failed to publish events caused by: read tcp 192.168.82.98:53384->192.168.82.98:5044: i/o timeout
2018-02-20T15:43:05.306-0500 ERROR logstash/async.go:235 Failed to publish events caused by: read tcp 192.168.82.98:53384->192.168.82.98:5044: i/o timeout
/var/log/filebeat/filebeat (from server 1)
2018-02-20T15:44:48.880-0500 INFO [monitoring] log/log.go:124 Non-zero metrics in the last 30s {"monitoring": {"metrics": {"beat":{"cpu":{"system":{"ticks":250,"time":259},"total":{"ticks":480,"time":495,"value":480},"user":{"ticks":230,"time":236}},"info":{"ephemeral_id":"7e165afa-baa3-437e-8243-e6ecb783aee3","uptime":{"ms":2340019}},"memstats":{"gc_next":4194304,"memory_alloc":1370248,"memory_total":17396488}},"filebeat":{"harvester":{"open_files":0,"running":0}},"libbeat":{"config":{"module":{"running":0}},"pipeline":{"clients":0,"events":{"active":0}}},"registrar":{"states":{"current":0}},"system":{"load":{"1":0.25,"15":0.49,"5":0.39,"norm":{"1":0.0156,"15":0.0306,"5":0.0244}}}}}}
2018-02-20T15:45:18.880-0500 INFO [monitoring] log/log.go:124 Non-zero metrics in the last 30s {"monitoring": {"metrics": {"beat":{"cpu":{"system":{"ticks":260,"time":262},"total":{"ticks":490,"time":499,"value":490},"user":{"ticks":230,"time":237}},"info":{"ephemeral_id":"7e165afa-baa3-437e-8243-e6ecb783aee3","uptime":{"ms":2370018}},"memstats":{"gc_next":4194304,"memory_alloc":1573136,"memory_total":17599376}},"filebeat":{"harvester":{"open_files":0,"running":0}},"libbeat":{"config":{"module":{"running":0}},"pipeline":{"clients":0,"events":{"active":0}}},"registrar":{"states":{"current":0}},"system":{"load":{"1":0.37,"15":0.49,"5":0.4,"norm":{"1":0.0231,"15":0.0306,"5":0.025}}}}}}
2018-02-20T15:45:48.880-0500 INFO [monitoring] log/log.go:124 Non-zero metrics in the last 30s {"monitoring": {"metrics": {"beat":{"cpu":{"system":{"ticks":260,"time":265},"total":{"ticks":490,"time":503,"value":490},"user":{"ticks":230,"time":238}},"info":{"ephemeral_id":"7e165afa-baa3-437e-8243-e6ecb783aee3","uptime":{"ms":2400018}},"memstats":{"gc_next":4194304,"memory_alloc":1761024,"memory_total":17787264}},"filebeat":{"harvester":{"open_files":0,"running":0}},"libbeat":{"config":{"module":{"running":0}},"pipeline":{"clients":0,"events":{"active":0}}},"registrar":{"states":{"current":0}},"system":{"load":{"1":1.02,"15":0.54,"5":0.55,"norm":{"1":0.0638,"15":0.0338,"5":0.0344}}}}}}