Hello everyone, I'm using the metricbeat to get mongodb metics ("dbstats", "status", "collstats", "metrics") and update it to Elasticsearch via logstash. There is no error seen in the logstash logs or metric beat logs. The following metric collection from mongodb is visible in logstash logs,
2022-01-10T16:59:05.069Z	INFO	[monitoring]	log/log.go:184	Non-zero metrics in the last 30s{"monitoring": {"metrics": {"beat":{"cgroup":{"cpuacct":{"total":{"ns":159270505}},"memory":{"mem":{"usage":{"bytes":614400}}}},"cpu":{"system":{"ticks":170,"time":{"ms":56}},"total":{"ticks":520,"time":{"ms":159},"value":520},"user":{"ticks":350,"time":{"ms":103}}},"handles":{"limit":{"hard":1048576,"soft":1048576},"open":9},"info":{"ephemeral_id":"3daf330b-739c-40c4-898a-3ea59a66d95d","uptime":{"ms":60232},"version":"7.16.2"},"memstats":{"gc_next":21140736,"memory_alloc":14206560,"memory_total":66298160,"rss":161378304},"runtime":{"goroutines":51}},"libbeat":{"config":{"module":{"running":0}},"output":{"events":{"acked":126,"active":0,"batches":9,"total":126},"read":{"bytes":54},"write":{"bytes":24842}},"pipeline":{"clients":4,"events":{"active":0,"published":126,"total":126},"queue":{"acked":126}}},"metricbeat":{"mongodb":{"collstats":{"events":108,"success":108},"dbstats":{"events":12,"success":12},"metrics":{"events":3,"success":3},"status":{"events":3,"success":3}}},"system":{"load":{"1":0.7,"15":0.35,"5":0.51,"norm":{"1":0.0875,"15":0.0438,"5":0.0638}}}}}}
However, this is not seen in "discover" tab in kibana. Can some one let me know what could be wrong.
metricbeat.yml
metricbeat.modules:
- module: mongodb
  metricsets: ["dbstats", "status", "collstats", "metrics"]
  period: 10s
  enabled: true
  hosts: ["shard-1-0.shard-1-service.storage.svc.cluster.local:27017"]
  username: beats
  password: ****
output.logstash:
  hosts: ["<logstash_ip>:5044"]
logstash.conf
input {
      beats {
        port => 5044
      }
    }
    output {
      stdout {
          codec => rubydebug
        }
        elasticsearch {
          hosts => ["${ES_HOSTS}"]
          user => "elastic"
          password => "*******"
          cacert => '/etc/logstash/certificates/ca.crt'
          action => "index"
          index => "mongo_data"
        }
    }