Filebeat Failed to publish events

CentOS release 6.6 (Final)

filebeat-6.6.1-linux-x86_64.tar.gz

filebeat config:

type: log

  # Change to true to enable this input configuration.
  enabled: true 

  # Paths that should be crawled and fetched. Glob based paths.
  paths:
    - /var/log/dnsmasq



#-------------------------- Elasticsearch output ------------------------------
#output.elasticsearch: 


#----------------------------- Logstash output -------------------------------
-
output.logstash:  
  # The Logstash hosts
  hosts: ["10.x.x.50:5044"]


logging.level: info
logging.to_files: true
logging.files:
  path: /var/log/filebeat
  name: filebeat
  keepfiles: 7
  permissions: 0644

Error follow:

2019-07-17T20:00:26.888+0800    INFO    [monitoring]    log/log.go:144  Non-zero metrics in the last 30s    {"monitoring": {"metrics": {"beat":{"cpu":{"system":{"ticks":260,"time":{"ms":1}},"total":{"ticks":860,"time":{"ms":11},"value":860},"user":{"ticks":600,"time":{"ms":10}}},"handles":{"limit":{"hard":4096,"soft":1024},"open":8},"info":{"ephemeral_id":"3e34eb0d-0fd4-4a07-9b1b-cbd2aace15bd","uptime":{"ms":2433050}},"memstats":{"gc_next":4194304,"memory_alloc":1808168,"memory_total":54218560,"rss":-1110016}},"filebeat":{"harvester":{"open_files":1,"running":1}},"libbeat":{"config":{"module":{"running":0}},"pipeline":{"clients":1,"events":{"active":0}}},"registrar":{"states":{"current":1}},"system":{"load":{"1":0,"15":0,"5":0,"norm":{"1":0,"15":0,"5":0}}}}}}
2019-07-17T20:00:56.888+0800    INFO    [monitoring]    log/log.go:144  Non-zero metrics in the last 30s    {"monitoring": {"metrics": {"beat":{"cpu":{"system":{"ticks":260,"time":{"ms":6}},"total":{"ticks":860,"time":{"ms":6},"value":860},"user":{"ticks":600}},"handles":{"limit":{"hard":4096,"soft":1024},"open":8},"info":{"ephemeral_id":"3e34eb0d-0fd4-4a07-9b1b-cbd2aace15bd","uptime":{"ms":2463048}},"memstats":{"gc_next":4194304,"memory_alloc":2076128,"memory_total":54486520,"rss":1142784}},"filebeat":{"harvester":{"open_files":1,"running":1}},"libbeat":{"config":{"module":{"running":0}},"pipeline":{"clients":1,"events":{"active":0}}},"registrar":{"states":{"current":1}},"system":{"load":{"1":0,"15":0,"5":0,"norm":{"1":0,"15":0,"5":0}}}}}}
2019-07-17T20:01:05.056+0800    ERROR   logstash/async.go:256   Failed to publish events caused by: write tcp 10.x.x.88:34584->10.x.x.50:5044: write: connection reset by peer
2019-07-17T20:01:06.566+0800    ERROR   pipeline/output.go:121  Failed to publish events: write tcp 10.x.x.88:34584->10.x.x.50:5044: write: connection reset by peer
2019-07-17T20:01:06.566+0800    INFO    pipeline/output.go:95   Connecting to backoff(async(tcp://10.x.x.50:5044))
2019-07-17T20:01:06.566+0800    INFO    pipeline/output.go:105  Connection to backoff(async(tcp://10.x.x.50:5044)) established
2019-07-17T20:01:26.887+0800    INFO    [monitoring]    log/log.go:144  Non-zero metrics in the last 30s    {"monitoring": {"metrics": {"beat":{"cpu":{"system":{"ticks":270,"time":{"ms":6}},"total":{"ticks":890,"time":{"ms":21},"value":890},"user":{"ticks":620,"time":{"ms":15}}},"handles":{"limit":{"hard":4096,"soft":1024},"open":8},"info":{"ephemeral_id":"3e34eb0d-0fd4-4a07-9b1b-cbd2aace15bd","uptime":{"ms":2493048}},"memstats":{"gc_next":6619248,"memory_alloc":4675344,"memory_total":57755776,"rss":12288}},"filebeat":{"events":{"added":10,"done":10},"harvester":{"open_files":1,"running":1}},"libbeat":{"config":{"module":{"running":0}},"output":{"events":{"acked":10,"batches":2,"failed":10,"total":20},"read":{"bytes":6},"write":{"bytes":919,"errors":1}},"pipeline":{"clients":1,"events":{"active":0,"published":10,"retry":20,"total":10},"queue":{"acked":10}}},"registrar":{"states":{"current":1,"update":10},"writes":{"success":1,"total":1}},"system":{"load":{"1":0,"15":0,"5":0,"norm":{"1":0,"15":0,"5":0}}}}}}
2019-07-17T20:01:56.888+0800    INFO    [monitoring]    log/log.go:144  Non-zero metrics in the last 30s    {"monitoring": {"metrics": {"beat":{"cpu":{"system":{"ticks":270,"time":{"ms":2}},"total":{"ticks":890,"time":{"ms":6},"value":890},"user":{"ticks":620,"time":{"ms":4}}},"handles":{"limit":{"hard":4096,"soft":1024},"open":8},"info":{"ephemeral_id":"3e34eb0d-0fd4-4a07-9b1b-cbd2aace15bd","uptime":{"ms":2523048}},"memstats":{"gc_next":6619248,"memory_alloc":4937616,"memory_total":58018048}},"filebeat":{"harvester":{"open_files":1,"running":1}},"libbeat":{"config":{"module":{"running":0}},"pipeline":{"clients":1,"events":{"active":0}}},"registrar":{"states":{"current":1}},"system":{"load":{"1":0,"15":0,"5":0,"norm":{"1":0,"15":0,"5":0}}}}}}
2019-07-17T20:02:26.889+0800    INFO    [monitoring]    log/log.go:144  Non-zero metrics in the last 30s    {"monitoring": {"metrics": {"beat":{"cpu":{"system":{"ticks":280,"time":{"ms":3}},"total":{"ticks":900,"time":{"ms":8},"value":900},"user":{"ticks":620,"time":{"ms":5}}},"handles":{"limit":{"hard":4096,"soft":1024},"open":8},"info":{"ephemeral_id":"3e34eb0d-0fd4-4a07-9b1b-cbd2aace15bd","uptime":{"ms":2553048}},"memstats":{"gc_next":6619248,"memory_alloc":5191488,"memory_total":58271920}},"filebeat":{"harvester":{"open_files":1,"running":1}},"libbeat":{"config":{"module":{"running":0}},"pipeline":{"clients":1,"events":{"active":0}}},"registrar":{"states":{"current":1}},"system":{"load":{"1":0,"15":0,"5":0,"norm":{"1":0,"15":0,"5":0}}}}}}
2019-07-17T20:02:56.888+0800    INFO    [monitoring]    log/log.go:144  Non-zero metrics in the last 30s    {"monitoring": {"metrics": {"beat":{"cpu":{"system":{"ticks":280,"time":{"ms":3}},"total":{"ticks":910,"time":{"ms":5},"value":910},"user":{"ticks":630,"time":{"ms":2}}},"handles":{"limit":{"hard":4096,"soft":1024},"open":8},"info":{"ephemeral_id":"3e34eb0d-0fd4-4a07-9b1b-cbd2aace15bd","uptime":{"ms":2583050}},"memstats":{"gc_next":6619248,"memory_alloc":5592368,"memory_total":58672800,"rss":4096}},"filebeat":{"harvester":{"open_files":1,"running":1}},"libbeat":{"config":{"module":{"running":0}},"pipeline":{"clients":1,"events":{"active":0}}},"registrar":{"states":{"current":1}},"system":{"load":{"1":0,"15":0,"5":0,"norm":{"1":0,"15":0,"5":0}}}}}}

Do you see any errors on LogStash side?

And Filebeat process is still running,but socket and logstash be automatically disconnected.

LogStash side not found error

as follow:

Sending Logstash logs to /usr/local/logstash/logs which is now configured via log4j2.properties
[2019-07-16T23:39:15,460][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
[2019-07-16T23:39:15,485][INFO ][logstash.runner          ] Starting Logstash {"logstash.version"=>"6.6.1"}
[2019-07-16T23:39:24,435][INFO ][logstash.pipeline        ] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>2, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50}
[2019-07-16T23:39:25,277][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://10.x.x.50:9200/]}}
[2019-07-16T23:39:25,556][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://10.x.x.50:9200/"}
[2019-07-16T23:39:25,693][INFO ][logstash.outputs.elasticsearch] ES Output version determined {:es_version=>6}
[2019-07-16T23:39:25,697][WARN ][logstash.outputs.elasticsearch] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>6}
[2019-07-16T23:39:25,753][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["http://10.x.x.50:9200"]}
[2019-07-16T23:39:25,786][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>nil}
[2019-07-16T23:39:25,824][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-*", "version"=>60001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"_default_"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"*", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}}
[2019-07-16T23:39:25,955][INFO ][logstash.outputs.elasticsearch] Installing elasticsearch template to _template/logstash
[2019-07-16T23:39:26,486][INFO ][logstash.inputs.beats    ] Beats inputs: Starting input listener {:address=>"0.0.0.0:5044"}
[2019-07-16T23:39:26,586][INFO ][logstash.pipeline        ] Pipeline started successfully {:pipeline_id=>"main", :thread=>"#<Thread:0x7f2352a run>"}
[2019-07-16T23:39:26,760][INFO ][logstash.agent           ] Pipelines running {:count=>1, :running_pipelines=>[:main], :non_running_pipelines=>[]}
[2019-07-16T23:39:26,794][INFO ][org.logstash.beats.Server] Starting server on port: 5044
[2019-07-16T23:39:27,237][INFO ][logstash.agent           ] Successfully started Logstash API endpoint {:port=>9600}

this seems correct, can you also check logs on log stash side from a time period displayed at beats log?

also logstash might be closing connections actively due to inactivity. try checking
https://www.elastic.co/guide/en/logstash/current/plugins-inputs-beats.html#plugins-inputs-beats-client_inactivity_timeout

also another thing why this might happen is back pressure (when ES is slow or under the load) it can cause LS to close connections.

Why filebeat is not logging anything ?
And filebeat logging DEBUG output:

./filebeat -c filebeat.yml -e &
[1] 6432
2019-07-20T14:24:34.214+0800    INFO    instance/beat.go:616    Home path: [/usr/local/filebeat] Config path: [/usr/local/filebeat] Data path: [/usr/local/filebeat/data] Logs path: [/usr/local/filebeat/logs]
2019-07-20T14:24:34.214+0800    DEBUG   [beat]  instance/beat.go:653    Beat metadata path: /usr/local/filebeat/data/meta.json
2019-07-20T14:24:34.214+0800    INFO    instance/beat.go:623    Beat UUID: a611ea73-3183-41a9-af8b-f8492ebce11c
2019-07-20T14:24:34.214+0800    INFO    [seccomp]   seccomp/seccomp.go:93   Syscall filter could not be installed because the kernel does not support seccomp
2019-07-20T14:24:34.214+0800    INFO    [beat]  instance/beat.go:936    Beat info   {"system_info": {"beat": {"path": {"config": "/usr/local/filebeat", "data": "/usr/local/filebeat/data", "home": "/usr/local/filebeat", "logs": "/usr/local/filebeat/logs"}, "type": "filebeat", "uuid": "a611ea73-3183-41a9-af8b-f8492ebce11c"}}}
2019-07-20T14:24:34.214+0800    INFO    [beat]  instance/beat.go:945    Build info  {"system_info": {"build": {"commit": "928f5e3f35fe28c1bd73513ff1cc89406eb212a6", "libbeat": "6.6.1", "time": "2019-02-13T16:12:26.000Z", "version": "6.6.1"}}}
2019-07-20T14:24:34.214+0800    INFO    [beat]  instance/beat.go:948    Go runtime info {"system_info": {"go": {"os":"linux","arch":"amd64","max_procs":2,"version":"go1.10.8"}}}
2019-07-20T14:24:34.216+0800    INFO    [beat]  instance/beat.go:952    Host info   {"system_info": {"host": {"architecture":"x86_64","boot_time":"2019-07-06T05:29:15+08:00","containerized":true,"name":"DNS-srv28","ip":["127.0.0.1/8","::1/128","10.26.80.28/24","fe80::20c:29ff:fec8:618a/64"],"kernel_version":"2.6.32-504.el6.x86_64","mac":["00:0c:29:c8:61:8a"],"os":{"family":"redhat","platform":"centos","name":"CentOS","version":"6.6 (Final)","major":6,"minor":6,"patch":0,"codename":"Final"},"timezone":"CST","timezone_offset_sec":28800}}}
2019-07-20T14:24:34.216+0800    INFO    [beat]  instance/beat.go:981    Process info    {"system_info": {"process": {"capabilities": {"inheritable":null,"permitted":["chown","dac_override","dac_read_search","fowner","fsetid","kill","setgid","setuid","setpcap","linux_immutable","net_bind_service","net_broadcast","net_admin","net_raw","ipc_lock","ipc_owner","sys_module","sys_rawio","sys_chroot","sys_ptrace","sys_pacct","sys_admin","sys_boot","sys_nice","sys_resource","sys_time","sys_tty_config","mknod","lease","audit_write","audit_control","setfcap","mac_override","mac_admin","syslog","wake_alarm","block_suspend","audit_read","38","39","40","41","42","43","44","45","46","47","48","49","50","51","52","53","54","55","56","57","58","59","60","61","62","63"],"effective":["chown","dac_override","dac_read_search","fowner","fsetid","kill","setgid","setuid","setpcap","linux_immutable","net_bind_service","net_broadcast","net_admin","net_raw","ipc_lock","ipc_owner","sys_module","sys_rawio","sys_chroot","sys_ptrace","sys_pacct","sys_admin","sys_boot","sys_nice","sys_resource","sys_time","sys_tty_config","mknod","lease","audit_write","audit_control","setfcap","mac_override","mac_admin","syslog","wake_alarm","block_suspend","audit_read","38","39","40","41","42","43","44","45","46","47","48","49","50","51","52","53","54","55","56","57","58","59","60","61","62","63"],"bounding":["chown","dac_override","dac_read_search","fowner","fsetid","kill","setgid","setuid","setpcap","linux_immutable","net_bind_service","net_broadcast","net_admin","net_raw","ipc_lock","ipc_owner","sys_module","sys_rawio","sys_chroot","sys_ptrace","sys_pacct","sys_admin","sys_boot","sys_nice","sys_resource","sys_time","sys_tty_config","mknod","lease","audit_write","audit_control","setfcap","mac_override","mac_admin","syslog","wake_alarm","block_suspend","audit_read","38","39","40","41","42","43","44","45","46","47","48","49","50","51","52","53","54","55","56","57","58","59","60","61","62","63"],"ambient":null}, "cwd": "/usr/local/filebeat", "exe": "/usr/local/filebeat/filebeat", "name": "filebeat", "pid": 6432, "ppid": 6353, "seccomp": {"mode":""}, "start_time": "2019-07-20T14:24:33.680+0800"}}}


2019-07-20T14:24:34.217+0800    INFO    instance/beat.go:281    Setup Beat: filebeat; Version: 6.6.1
2019-07-20T14:24:34.217+0800    DEBUG   [beat]  instance/beat.go:302    Initializing output plugins
2019-07-20T14:24:34.218+0800    DEBUG   [filters]   add_cloud_metadata/add_cloud_metadata.go:160    add_cloud_metadata: starting to fetch metadata, timeout=3s
2019-07-20T14:24:34.242+0800    DEBUG   [filters]   add_cloud_metadata/add_cloud_metadata.go:192    add_cloud_metadata: received disposition for ecs after 24.148681ms. result=[provider:ecs, error=failed requesting ecs metadata: Get http://100.100.100.200/latest/meta-data/instance-id: dial tcp 100.100.100.200:80: connect: connection refused, metadata={}]
2019-07-20T14:24:34.271+0800    DEBUG   [filters]   add_cloud_metadata/add_cloud_metadata.go:192    add_cloud_metadata: received disposition for qcloud after 52.838258ms. result=[provider:qcloud, error=failed requesting qcloud metadata: Get http://metadata.tencentyun.com/meta-data/placement/zone: dial tcp: lookup metadata.tencentyun.com on 114.114.114.114:53: no such host, metadata={}]
2019-07-20T14:24:37.218+0800    DEBUG   [filters]   add_cloud_metadata/add_cloud_metadata.go:192    add_cloud_metadata: received disposition for gce after 3.000374244s. result=[provider:gce, error=failed requesting gce metadata: Get http://169.254.169.254/computeMetadata/v1/?recursive=true&alt=json: net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers), metadata={}]
2019-07-20T14:24:37.219+0800    DEBUG   [filters]   add_cloud_metadata/add_cloud_metadata.go:192    add_cloud_metadata: received disposition for digitalocean after 3.000567845s. result=[provider:digitalocean, error=failed requesting digitalocean metadata: Get http://169.254.169.254/metadata/v1.json: net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers), metadata={}]
2019-07-20T14:24:37.219+0800    DEBUG   [filters]   add_cloud_metadata/add_cloud_metadata.go:192    add_cloud_metadata: received disposition for ec2 after 3.00072512s. result=[provider:ec2, error=failed requesting ec2 metadata: Get http://169.254.169.254/2014-02-25/dynamic/instance-identity/document: dial tcp 169.254.169.254:80: i/o timeout, metadata={}]
2019-07-20T14:24:37.219+0800    DEBUG   [filters]   add_cloud_metadata/add_cloud_metadata.go:192    add_cloud_metadata: received disposition for az after 3.000823146s. result=[provider:az, error=failed requesting az metadata: Get http://169.254.169.254/metadata/instance/compute?api-version=2017-04-02: net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers), metadata={}]
2019-07-20T14:24:37.219+0800    DEBUG   [filters]   add_cloud_metadata/add_cloud_metadata.go:199    add_cloud_metadata: timed-out waiting for all responses
2019-07-20T14:24:37.219+0800    DEBUG   [filters]   add_cloud_metadata/add_cloud_metadata.go:163    add_cloud_metadata: fetchMetadata ran for 3.000950214s
2019-07-20T14:24:37.219+0800    INFO    add_cloud_metadata/add_cloud_metadata.go:319    add_cloud_metadata: hosting provider type not detected.
2019-07-20T14:24:37.219+0800    DEBUG   [processors]    processors/processor.go:66  Processors: add_host_metadata=[netinfo.enabled=[false], cache.ttl=[5m0s]], add_cloud_metadata=null
2019-07-20T14:24:37.220+0800    DEBUG   [publish]   pipeline/consumer.go:137    start pipeline event consumer
2019-07-20T14:24:37.220+0800    INFO    [publisher] pipeline/module.go:110  Beat name: DNS-srv28
2019-07-20T14:24:37.221+0800    INFO    [monitoring]    log/log.go:117  Starting metrics logging every 30s
2019-07-20T14:24:37.221+0800    INFO    instance/beat.go:403    filebeat start running.
2019-07-20T14:24:37.221+0800    DEBUG   [registrar] registrar/registrar.go:114  Registry file set to: /usr/local/filebeat/data/registry
2019-07-20T14:24:37.221+0800    INFO    registrar/registrar.go:134  Loading registrar data from /usr/local/filebeat/data/registry
2019-07-20T14:24:37.221+0800    INFO    registrar/registrar.go:141  States Loaded from registrar: 1
2019-07-20T14:24:37.221+0800    WARN    beater/filebeat.go:367  Filebeat is unable to load the Ingest Node pipelines for the configured modules because the Elasticsearch output is not configured/enabled. If you have already loaded the Ingest Node pipelines or are using Logstash pipelines, you can ignore this warning.
2019-07-20T14:24:37.221+0800    INFO    crawler/crawler.go:72   Loading Inputs: 1
2019-07-20T14:24:37.221+0800    DEBUG   [registrar] registrar/registrar.go:267  Starting Registrar
2019-07-20T14:24:37.222+0800    DEBUG   [processors]    processors/processor.go:66  Processors: 
2019-07-20T14:24:37.222+0800    DEBUG   [input] log/config.go:201   recursive glob enabled
2019-07-20T14:24:37.222+0800    DEBUG   [input] log/input.go:147    exclude_files: []. Number of stats: 1
2019-07-20T14:24:37.222+0800    DEBUG   [input] file/states.go:68   New state added for /var/log/dnsmasq
2019-07-20T14:24:37.222+0800    DEBUG   [input] log/input.go:168    input with previous states loaded: 1
2019-07-20T14:24:37.222+0800    INFO    log/input.go:138    Configured paths: [/var/log/dnsmasq]
2019-07-20T14:24:37.222+0800    INFO    input/input.go:114  Starting input of type: log; ID: 12764668059186481777 
2019-07-20T14:24:37.222+0800    DEBUG   [cfgfile]   cfgfile/reload.go:118   Checking module configs from: /usr/local/filebeat/modules.d/*.yml
2019-07-20T14:24:37.222+0800    DEBUG   [acker] beater/acker.go:64  stateful ack    {"count": 1}
2019-07-20T14:24:37.223+0800    DEBUG   [registrar] registrar/registrar.go:345  Processing 1 events
2019-07-20T14:24:37.223+0800    DEBUG   [registrar] registrar/registrar.go:315  Registrar state updates processed. Count: 1
2019-07-20T14:24:37.223+0800    DEBUG   [cfgfile]   cfgfile/reload.go:132   Number of module configs found: 0
2019-07-20T14:24:37.223+0800    DEBUG   [registrar] registrar/registrar.go:335  Registrar states cleaned up. Before: 1, After: 1, Pending: 0
2019-07-20T14:24:37.223+0800    INFO    crawler/crawler.go:106  Loading and starting Inputs completed. Enabled inputs: 1
2019-07-20T14:24:37.223+0800    DEBUG   [registrar] registrar/registrar.go:400  Write registry file: /usr/local/filebeat/data/registry
2019-07-20T14:24:37.223+0800    DEBUG   [input] log/input.go:174    Start next scan
2019-07-20T14:24:37.223+0800    DEBUG   [input] log/input.go:404    Check file for harvesting: /var/log/dnsmasq
2019-07-20T14:24:37.223+0800    DEBUG   [input] log/input.go:494    Update existing file for harvesting: /var/log/dnsmasq, offset: 16583
2019-07-20T14:24:37.223+0800    DEBUG   [input] log/input.go:548    File didn't change: /var/log/dnsmasq
2019-07-20T14:24:37.223+0800    DEBUG   [input] log/input.go:195    input states cleaned up. Before: 1, After: 1, Pending: 0

To create ES Cluster,the problem already solved.

Due to performance issues ?

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.