hi all! I have exactly the same problem, but none of the solutions that I found here helped me
my filebeat.yml
filebeat.inputs:
- type: filestream
id: my-filestream-id
enabled: false
paths:
- /var/log/*.log
filebeat.config.modules:
path: ${path.config}/modules.d/*.yml
reload.enabled: false
setup.template.settings:
index.number_of_shards: 2
setup.kibana:
host: "https://kibana-linux.elk.com:5601"
protocol: "https"
ssl.enabled: true
ssl.certificate_authorities: ["/etc/filebeat/certs/ca.crt"]
ssl.certificate: "/etc/filebeat/certs/ca.crt"
ssl.key: "/etc/filebeat/certs/ca.key"
ssl.verification_mode: certificate
output.logstash:
hosts: ["logstash01.elk.com:5047", "logstash02.elk.com:5047", "logstash03.elk.com:5047"]
ssl.certificate_authorities: ["/etc/filebeat/certs/ca.crt"]
processors:
- add_host_metadata:
when.not.contains.tags: forwarded
- add_cloud_metadata: ~
- add_docker_metadata: ~
- add_kubernetes_metadata: ~
logging.level: info
logging.to_files: true
logging.files:
path: /var/log/filebeat
name: filebeat
keepfiles: 7
permissions: 0644
my /etc/filebeat/modules.d/elasticsearch.yml (enable)
- module: elasticsearch
server:
enabled: true
var.paths: ["/var/log/elasticsearch/elk_server.json"]
gc:
enabled: true
var.paths: ["/var/log/elasticsearch/gc.log"]
audit:
enabled: true
var.paths: ["/var/log/elasticsearch/elk_audit.json"]
slowlog:
enabled: true
var.paths: ["/var/log/elasticsearch/elk_index_search_slowlog.json"]
deprecation:
enabled: true
var.paths: ["/var/log/elasticsearch/elk_deprecation.json"]
my elasticsearch.yml
cluster.name: elk-elk.com
node.name: node-master01
path.data: /opt/elasticsearch/data
path.logs: /var/log/elasticsearch
bootstrap.memory_lock: true
network.host: 10.200.9.166
http.port: 9200
discovery.seed_hosts: ["10.200.9.166", "10.200.9.172", "10.200.9.175"]
node.roles: [master, ingest]
xpack.security.enabled: true
xpack.security.enrollment.enabled: true
xpack.security.http.ssl.enabled: true
xpack.security.http.ssl.keystore.path: "certs/elasticsearch/http.p12"
xpack.monitoring.collection.enabled: true
xpack.security.transport.ssl:
enabled: true
verification_mode: certificate
keystore.path: certs/cert-master01.p12
certificate_authorities: ["certs/ca/ca.crt"]
http.host: 0.0.0.0
my kibana.yml
xpack.security.encryptionKey: "19dwdqwdwq19dwdqwdwq19dwdqwdwq19dwdqwdwq19dwdqwdwq"
xpack.reporting.encryptionKey: "19dwdqwdwq_d3219dwdqwdwq19dwdqwdwqd32"
xpack.encryptedSavedObjects.encryptionKey: "19dwdqwdwq19dwdqwdwq19dwdqwdwqw_oellek233332ddd"
server.port: 5601
server.host: "0.0.0.0"
monitoring.ui.ccs.enabled: false
server.publicBaseUrl: "https://mster01.elk.elk.com.ru:5601"
server.name: "kibana-linux.elk.elk.com.ru"
server.ssl.enabled: true
server.ssl.keystore.path: "/etc/kibana/kibana/kibana-certificate.p12"
server.ssl.truststore.path: "/etc/kibana/elastic-stack-ca.p12"
elasticsearch.hosts: ["https://kibana-linux.elk.elk.com.ru:9200", "https://datahot01.elk.elk.com.ru:9200", "https://datahot02.elk.elk.com.ru:9200", "https://datahot03.elk.elk.com.ru:9200", "https://datawarm01.elk.elk.com.ru:9200", "https://datawarm02.elk.elk.com.ru:9200", "https://datawarm03.elk.elk.com.ru:9200", "https://datawarm04.elk.elk.com.ru:9200", "https://datawarm05.elk.elk.com.ru:9200"]
elasticsearch.username: "kibana_system"
elasticsearch.password: "PASSWORD"
elasticsearch.ssl.certificateAuthorities: ["/etc/kibana/elasticsearch-ca.pem"]
elasticsearch.sniffInterval: 600000
elasticsearch.sniffOnConnectionFault: true
xpack.security.session.idleTimeout: "30m"
xpack.security.session.lifespan: "1d"
xpack.security.session.cleanupInterval: "8h"
logging:
appenders:
file:
type: file
fileName: /var/log/kibana/kibana.log
layout:
type: pattern
highlight: true
pattern: "[%date][%level][%logger] %message %meta"
root:
appenders:
- default
- file
logging.loggers:
- name: elasticsearch.query
level: error
path.data: /opt/kibana/data
pid.file: /run/kibana/kibana.pid
iam also running this query against the cluster that stores the filebeat data
POST filebeat-*/_search?filter_path=hits.hits._source.event.dataset,hits.hits._source.@timestamp,hits.hits._source.elasticsearch
{
"size": 10,
"sort": [
{
"@timestamp": {
"order": "desc"
}
}
],
"collapse": {
"field": "event.dataset"
}
}
result:
{
"hits": {
"hits": [
{
"_source": {
"@timestamp": "2022-09-02T00:13:17.935Z",
"event": {
"dataset": "elasticsearch.gc"
}
}
},
{
"_source": {
"@timestamp": "2022-09-02T00:12:19.500Z",
"event": {
"dataset": "logstash.log"
}
}
},
{
"_source": {
"@timestamp": "2022-09-02T00:09:35.861Z",
"event": {
"dataset": "kibana.log"
}
}
},
{
"_source": {
"event": {
"dataset": "elasticsearch.server"
},
"@timestamp": "2022-09-02T00:09:17.075Z"
}
},
{
"_source": {
"event": {
"dataset": "elasticsearch.deprecation"
},
"@timestamp": "2022-09-02T00:00:08.951Z"
}
}
]
}
}
and this query
POST filebeat-*/_search
{
"size": 0,
"sort": {
"@timestamp": {
"order": "desc"
}
},
"query": {
"bool": {
"filter": [
{
"term": {
"service.type": "elasticsearch"
}
},
{
"range": {
"@timestamp": {
"gte": "now-1h",
"lte": "now"
}
}
},
{
"term": {
"elasticsearch.cluster.uuid": "{cluster_uuid}"
}
}
]
}
},
"aggs": {
"types": {
"terms": {
"field": "event.dataset"
},
"aggs": {
"levels": {
"terms": {
"field": "log.level"
}
}
}
}
}
}
result:
{
"took": 8,
"timed_out": false,
"_shards": {
"total": 2,
"successful": 2,
"skipped": 0,
"failed": 0
},
"hits": {
"total": {
"value": 0,
"relation": "eq"
},
"max_score": null,
"hits": []
},
"aggregations": {
"types": {
"doc_count_error_upper_bound": 0,
"sum_other_doc_count": 0,
"buckets": []
}
}
}
my filebeat mapping in attach (filebeat_mapping.txt)
GET filebeat-*/_mapping
and this
GET _ingest/pipeline/filebeat-8.3.3-elasticsearch-server-pipeline
result
{
"filebeat-8.3.3-elasticsearch-server-pipeline": {
"description": "Pipeline for parsing elasticsearch server logs",
"processors": [
{
"set": {
"field": "event.ingested",
"value": "{{_ingest.timestamp}}"
}
},
{
"set": {
"copy_from": "@timestamp",
"field": "event.created"
}
},
{
"grok": {
"field": "message",
"patterns": [
"^%{CHAR:first_char}"
],
"pattern_definitions": {
"CHAR": "."
}
}
},
{
"pipeline": {
"if": "ctx.first_char != '{'",
"name": "filebeat-8.3.3-elasticsearch-server-pipeline-plaintext"
}
},
{
"pipeline": {
"if": "ctx.first_char == '{'",
"name": "filebeat-8.3.3-elasticsearch-server-pipeline-json"
}
},
{
"script": {
"params": {
"ms_in_one_m": 60000,
"minutes_unit": "m",
"seconds_unit": "s",
"milliseconds_unit": "ms",
"ms_in_one_s": 1000
},
"lang": "painless",
"source": """if (ctx.elasticsearch.server.gc != null && ctx.elasticsearch.server.gc.observation_duration != null) {
if (ctx.elasticsearch.server.gc.observation_duration.unit == params.seconds_unit) {
ctx.elasticsearch.server.gc.observation_duration.ms = ctx.elasticsearch.server.gc.observation_duration.time * params.ms_in_one_s;
}
if (ctx.elasticsearch.server.gc.observation_duration.unit == params.milliseconds_unit) {
ctx.elasticsearch.server.gc.observation_duration.ms = ctx.elasticsearch.server.gc.observation_duration.time;
}
if (ctx.elasticsearch.server.gc.observation_duration.unit == params.minutes_unit) {
ctx.elasticsearch.server.gc.observation_duration.ms = ctx.elasticsearch.server.gc.observation_duration.time * params.ms_in_one_m;
}
} if (ctx.elasticsearch.server.gc != null && ctx.elasticsearch.server.gc.collection_duration != null) {
if (ctx.elasticsearch.server.gc.collection_duration.unit == params.seconds_unit) {
ctx.elasticsearch.server.gc.collection_duration.ms = ctx.elasticsearch.server.gc.collection_duration.time * params.ms_in_one_s;
}
if (ctx.elasticsearch.server.gc.collection_duration.unit == params.milliseconds_unit) {
ctx.elasticsearch.server.gc.collection_duration.ms = ctx.elasticsearch.server.gc.collection_duration.time;
}
if (ctx.elasticsearch.server.gc.collection_duration.unit == params.minutes_unit) {
ctx.elasticsearch.server.gc.collection_duration.ms = ctx.elasticsearch.server.gc.collection_duration.time * params.ms_in_one_m;
}
}"""
}
},
{
"set": {
"value": "event",
"field": "event.kind"
}
},
{
"set": {
"field": "event.category",
"value": "database"
}
},
{
"script": {
"lang": "painless",
"source": """def errorLevels = ['FATAL', 'ERROR']; if (ctx?.log?.level != null) {
if (errorLevels.contains(ctx.log.level)) {
ctx.event.type = 'error';
} else {
ctx.event.type = 'info';
}
}"""
}
},
{
"set": {
"field": "host.name",
"value": "{{elasticsearch.node.name}}",
"ignore_empty_value": true
}
},
{
"set": {
"field": "host.id",
"value": "{{elasticsearch.node.id}}",
"ignore_empty_value": true
}
},
{
"remove": {
"field": [
"elasticsearch.server.gc.collection_duration.time",
"elasticsearch.server.gc.collection_duration.unit",
"elasticsearch.server.gc.observation_duration.time",
"elasticsearch.server.gc.observation_duration.unit"
],
"ignore_missing": true
}
},
{
"remove": {
"field": [
"elasticsearch.server.timestamp",
"elasticsearch.server.@timestamp"
],
"ignore_missing": true
}
},
{
"remove": {
"field": [
"first_char"
]
}
}
],
"on_failure": [
{
"set": {
"value": "{{ _ingest.on_failure_message }}",
"field": "error.message"
}
}
]
}
}
aaaaand this
GET _ingest/pipeline/filebeat-8.3.3-elasticsearch-server-pipeline-json
result
{
"filebeat-8.3.3-elasticsearch-server-pipeline-json": {
"description": "Pipeline for parsing the Elasticsearch server log file in JSON format.",
"on_failure": [
{
"set": {
"field": "error.message",
"value": "{{ _ingest.on_failure_message }}"
}
}
],
"processors": [
{
"json": {
"field": "message",
"target_field": "elasticsearch.server"
}
},
{
"pipeline": {
"if": "ctx.elasticsearch.server.containsKey('type')",
"name": "filebeat-8.3.3-elasticsearch-server-pipeline-json-7"
}
},
{
"pipeline": {
"if": "ctx.elasticsearch.server.containsKey('ecs.version')",
"name": "filebeat-8.3.3-elasticsearch-server-pipeline-json-8"
}
}
]
}
}