Hello ,
Has any one optimized Filebeat to send more than 200Mil events per hour to kafka to various topics .
Because what i am seeing in my setup is it is way too slow to handle or carter that request - It is only able to send about 1/4 of the event to kafka .
max_procs: 20
queue.mem:
events: 600000
flush.min_events: 600000
flush.timeout: 50ms
http.enabled: true
output.kafka:
required_acks: 1
worker: 40
# compression: gzip
bulk_max_size: 1000000
max_message_bytes: 1000000
Publised events max goes to 3-4 lakh from logs wat i have seen
-
-
2019-03-13T07:42:03.071+0530 INFO [monitoring] log/log.go:144 Non-zero metrics in the last 30s {"monitoring": {"metrics": {"beat":{"cpu":{"system":{"ticks":277770,"time":{"ms":14390}},"total":{"ticks":2908220,"time":{"ms":153340},"value":2908220},"user":{"ticks":2630450,"time":{"ms":138950}}},"handles":{"limit":{"hard":4096,"soft":1024},"open":87},"info":{"ephemeral_id":"05c930bd-4d6a-4200-adfe-7352055e7682","uptime":{"ms":660049}},"memstats":{"gc_next":667089056,"memory_alloc":571288568,"memory_total":1708086450584,"rss":122880}},"filebeat":{"events":{"active":-3541,"added":196761,"done":200302},"harvester":{"open_files":22,"running":22}},"libbeat":{"config":{"module":{"running":0}},"output":{"events":{"acked":199957,"active":-1530,"batches":287,"total":198427}},"outputs":{"kafka":{"bytes_read":5063141,"bytes_write":110404317}},"pipeline":{"clients":21,"events":{"active":2891,"filtered":346,"published":196412,"total":196761},"queue":{"acked":199957}}},"registrar":{"states":{"current":24,"update":200302},"writes":{"success":279,"total":279}},"system":{"load":{"1":22.27,"15":19.05,"5":21.11,"norm":{"1":0.5568,"15":0.4763,"5":0.5277}}}}}}
-
-
-
2019-03-13T07:42:33.038+0530 INFO [monitoring] log/log.go:144 Non-zero metrics in the last 30s {"monitoring": {"metrics": {"beat":{"cpu":{"system":{"ticks":292460,"time":{"ms":14688}},"total":{"ticks":3065440,"time":{"ms":157195},"value":3065440},"user":{"ticks":2772980,"time":{"ms":142507}}},"handles":{"limit":{"hard":4096,"soft":1024},"open":87},"info":{"ephemeral_id":"05c930bd-4d6a-4200-adfe-7352055e7682","uptime":{"ms":690048}},"memstats":{"gc_next":246715056,"memory_alloc":137692184,"memory_total":1806615640928,"rss":19525632}},"filebeat":{"events":{"active":-870,"added":198928,"done":199798},"harvester":{"open_files":22,"running":22}},"libbeat":{"config":{"module":{"running":0}},"output":{"events":{"acked":199468,"active":557,"batches":293,"total":200025}},"outputs":{"kafka":{"bytes_read":5167621,"bytes_write":111660892}},"pipeline":{"clients":21,"events":{"active":2014,"filtered":337,"published":198594,"total":198928},"queue":{"acked":199468}}},"registrar":{"states":{"current":24,"update":199798},"writes":{"success":291,"total":291}},"system":{"load":{"1":23.67,"15":19.33,"5":21.72,"norm":{"1":0.5918,"15":0.4833,"5":0.543}}}}}}
-
File rotation is Per hour so test.log gets rotated every hour
-
type: log
enabled: true
close_inactive: 3h scan_frequency: 2s paths: - /var/log/test.log fields: log_topic: mytopic