Issue with sending logs in secure Kafka

my problem looks the same https://discuss.elastic.co/t/issue-with-sending-to-kafka-0-10-2-1-over-tls/111252
but my version work

however my logs are written for some time (different on several servers) and then they stop getting into kafka.

In the logs everything looks like the recording is in progress

{"log":"2020-07-27T10:44:53.010+0300\u0009INFO\u0009[publisher]\u0009pipeline/retry.go:221\u0009retryer: send unwait signal to consumer\n","stream":"stderr","time":"2020-07-27T07:44:53.010977285Z"}
{"log":"2020-07-27T10:44:53.010+0300\u0009INFO\u0009[publisher]\u0009pipeline/retry.go:225\u0009 done\n","stream":"stderr","time":"2020-07-27T07:44:53.011051349Z"}
{"log":"2020-07-27T10:44:55.970+0300\u0009INFO\u0009[publisher]\u0009pipeline/retry.go:221\u0009retryer: send unwait signal to consumer\n","stream":"stderr","time":"2020-07-27T07:44:55.97124783Z"}
{"log":"2020-07-27T10:44:55.970+0300\u0009INFO\u0009[publisher]\u0009pipeline/retry.go:225\u0009 done\n","stream":"stderr","time":"2020-07-27T07:44:55.971288498Z"}
{"log":"2020-07-27T10:45:14.883+0300\u0009INFO\u0009[monitoring]\u0009log/log.go:145\u0009Non-zero metrics in the last 30s\u0009{"monitoring": {"metrics": {"beat":{"cpu":{"system":{"ticks":448180,"time":{"ms":136}},"total":{"ticks":1085250,"time":{"ms":324},"value":1085250},"user":{"ticks":637070,"time":{"ms":188}}},"handles":{"limit":{"hard":1048576,"soft":1048576},"open":11},"info":{"ephemeral_id":"74c6f4a6-9481-4bb3-b217-e59df60c1494","uptime":{"ms":89640092}},"memstats":{"gc_next":563911584,"memory_alloc":291324664,"memory_total":27734031800},"runtime":{"goroutines":92}},"filebeat":{"harvester":{"files":{"de8f4d01-ab97-49e9-9e67-405ec97e1eee":{"size":2332}},"open_files":1,"running":1}},"libbeat":{"config":{"module":{"running":0}},"output":{"events":{"batches":2,"failed":2,"total":2}},"outputs":{"kafka":{"bytes_read":198548,"bytes_write":3822}},"pipeline":{"clients":1,"events":{"active":4117,"retry":2}}},"registrar":{"states":{"current":2}},"system":{"load":{"1":0,"15":0.05,"5":0.01,"norm":{"1":0,"15":0.025,"5":0.005}}}}}}\n","stream":"stderr","time":"2020-07-27T07:45:14.883331068Z"}
{"log":"2020-07-27T10:45:24.015+0300\u0009INFO\u0009[publisher]\u0009pipeline/retry.go:221\u0009retryer: send unwait signal to consumer\n","stream":"stderr","time":"2020-07-27T07:45:24.015564821Z"}
{"log":"2020-07-27T10:45:24.015+0300\u0009INFO\u0009[publisher]\u0009pipeline/retry.go:225\u0009 done\n","stream":"stderr","time":"2020-07-27T07:45:24.015602753Z"}
{"log":"2020-07-27T10:45:25.858+0300\u0009INFO\u0009[publisher]\u0009pipeline/retry.go:221\u0009retryer: send unwait signal to consumer\n","stream":"stderr","time":"2020-07-27T07:45:25.858744677Z"}
{"log":"2020-07-27T10:45:25.858+0300\u0009INFO\u0009[publisher]\u0009pipeline/retry.go:225\u0009 done\n","stream":"stderr","time":"2020-07-27T07:45:25.858780005Z"}
{"log":"2020-07-27T10:45:41.738+0300\u0009INFO\u0009[publisher]\u0009pipeline/retry.go:221\u0009retryer: send unwait signal to consumer\n","stream":"stderr","time":"2020-07-27T07:45:41.738324614Z"}
{"log":"2020-07-27T10:45:41.738+0300\u0009INFO\u0009[publisher]\u0009pipeline/retry.go:225\u0009 done\n","stream":"stderr","time":"2020-07-27T07:45:41.738372954Z"}
{"log":"2020-07-27T10:45:42.476+0300\u0009INFO\u0009[publisher]\u0009pipeline/retry.go:221\u0009retryer: send unwait signal to consumer\n","stream":"stderr","time":"2020-07-27T07:45:42.477260785Z"}
{"log":"2020-07-27T10:45:42.476+0300\u0009INFO\u0009[publisher]\u0009pipeline/retry.go:225\u0009 done\n","stream":"stderr","time":"2020-07-27T07:45:42.477311442Z"}
{"log":"2020-07-27T10:45:44.883+0300\u0009INFO\u0009[monitoring]\u0009log/log.go:145\u0009Non-zero metrics in the last 30s\u0009{"monitoring": {"metrics": {"beat":{"cpu":{"system":{"ticks":448330,"time":{"ms":151}},"total":{"ticks":1085600,"time":{"ms":353},"value":1085600},"user":{"ticks":637270,"time":{"ms":202}}},"handles":{"limit":{"hard":1048576,"soft":1048576},"open":9},"info":{"ephemeral_id":"74c6f4a6-9481-4bb3-b217-e59df60c1494","uptime":{"ms":89670094}},"memstats":{"gc_next":563911584,"memory_alloc":301172920,"memory_total":27743880056},"runtime":{"goroutines":86}},"filebeat":{"harvester":{"files":{"de8f4d01-ab97-49e9-9e67-405ec97e1eee":{"size":1969}},"open_files":1,"running":1}},"libbeat":{"config":{"module":{"running":0}},"output":{"events":{"batches":4,"failed":6,"total":6}},"outputs":{"kafka":{"bytes_read":210704,"bytes_write":4056}},"pipeline":{"clients":1,"events":{"active":4117,"retry":6}}},"registrar":{"states":{"current":2}},"system":{"load":{"1":0,"15":0.05,"5":0.01,"norm":{"1":0,"15":0.025,"5":0.005}}}}}}\n","stream":"stderr","time":"2020-07-27T07:45:44.883436635Z"}
{"log":"2020-07-27T10:46:14.887+0300\u0009INFO\u0009[monitoring]\u0009log/log.go:145\u0009Non-zero metrics in the last 30s\u0009{"monitoring": {"metrics": {"beat":{"cpu":{"system":{"ticks":448490,"time":{"ms":164}},"total":{"ticks":1085910,"time":{"ms":319},"value":1085910},"user":{"ticks":637420,"time":{"ms":155}}},"handles":{"limit":{"hard":1048576,"soft":1048576},"open":10},"info":{"ephemeral_id":"74c6f4a6-9481-4bb3-b217-e59df60c1494","uptime":{"ms":89700096}},"memstats":{"gc_next":563911584,"memory_alloc":309988400,"memory_total":27752695536},"runtime":{"goroutines":89}},"filebeat":{"harvester":{"files":{"de8f4d01-ab97-49e9-9e67-405ec97e1eee":{"size":2695}},"open_files":1,"running":1}},"libbeat":{"config":{"module":{"running":0}},"outputs":{"kafka":{"bytes_read":196522,"bytes_write":3783}},"pipeline":{"clients":1,"events":{"active":4117}}},"registrar":{"states":{"current":2}},"system":{"load":{"1":0,"15":0.05,"5":0.01,"norm":{"1":0,"15":0.025,"5":0.005}}}}}}\n","stream":"stderr","time":"2020-07-27T07:46:14.888155274Z"}

my config filebeat.yml

name: host1
logging.level: info
filebeat.inputs:

  • type: log
    enabled: true
    paths:
    • /var/lib/docker/containers//-json.log
      fields:
      log_topic: 'containerlogs'

output.kafka:
hosts: [ "host1:9093", "host2:9093" ]
username: 'filebeat'
password: 'filebeat-password'
ssl.enabled: true
ssl.certificate_authorities: ["root-ca.crt"]
topic: 'containerlogs'
compression: none
max_retries: -1
backoff.max: 10s

This is weird indeed. Could you check with debug logs enabled for filebeat?

I don't see anything worrying in this dump. Could you please use wireshark or tcpdump to verify if events are sent to kafka? You should be able to the data flowing. At the moment I'm not able to decide which part might be broken.

I think the problem is with kafka.
i set up a kafka listener without sasl and ssl (plain_text).
After that, the logs are added to the kafka.

But this is a problem, I need sasl + ssl.
I'm trying to add the keep_alive 300s option (to keep the connection open) but it doesn't work as I expect

1 Like

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.