Dear all,
What is wrong with filebeat version 7.4.2 config?
######################## Filebeat Configuration ############################
#========================== Modules configuration =============================
filebeat.modules:
#----------------------------- Elasticsearch Module -----------------------------
- module: elasticsearch
# Server log
server:
enabled: true
gc:
enabled: true
audit:
enabled: true
slowlog:
enabled: true
deprecation:
enabled: true
#--------------------------------- Kafka Module ---------------------------------
- module: kafka
log:
enabled: false
#-------------------------------- Kibana Module --------------------------------
- module: kibana
log:
enabled: true
#------------------------------- Logstash Module -------------------------------
- module: logstash
log:
enabled: true
#=========================== Filebeat inputs =============================
filebeat.inputs:
#------------------------------ Log input --------------------------------
- type: log
enabled: true
paths:
- /home/gunner/Gogunner/logs/log4j_someapp.log
fields:
type: someapp
application: someapp
fields_under_root: true
encoding: utf-8
- type: log
enabled: true
paths:
- /home/gunner/Gogunner/logs/log4j_someapp_synch.log
fields:
type: someapp_synch_log
application: someapp
fields_under_root: true
multiline.pattern: ^
multiline.negate: true
multiline.match: after
exclude_files: ['\.gz$','\.bz2$','\.swp$']
fields_under_root: true
scan_frequency: 5
### Multiline options
multiline.max_lines: 500
multiline.timeout: 10s
close_inactive: 5m
close_renamed: true
close_removed: true
clean_removed: true
close_timeout: 30
# Defines if inputs is enabled
enabled: true
#------------------------------ Docker input --------------------------------
enabled: false
#================================ General ======================================
name: 'gunner3_shipper'
tags: ["gunner3_shipper", "gunner3_logs"]
fields_under_root: false
#-------------------------- Elasticsearch output -------------------------------
output.elasticsearch:
# Boolean flag to enable or disable the output module.
enabled: true
hosts: ["zz.zz.zzz.zz:9200"]
worker: 3
index: "logs-%{+YYYY.MM.dd}"
timeout: 90
#----------------------------- Logstash output ---------------------------------
output.logstash:
# Boolean flag to enable or disable the output module.
enabled: false
# The Logstash hosts
hosts: ["yy.yy.yyy.yyy:54"]
# Number of workers per Logstash host.
worker: 3
# Optional index name. The default index name is set to filebeat
# in all lowercase.
index: "logs-%{+YYYY.MM.dd}"
#================================Data=========================================
path.home: /usr/share/filebeat/bin
path.config: /etc/filebeat/
path.data: /etc/filebeat/data
path.logs: /var/log/filebeat
#============================== Template =====================================
# Set to false to disable template loading.
setup.template.enabled: true
# Template name. By default the template name is "filebeat-%{[beat.version]}"
# The template name and pattern has to be set in case the elasticsearch index pattern is modified.
setup.template.name: "filebeat-%{[beat.version]}"
setup.template.pattern: "filebeat-%{[beat.version]}-*"
# Path to fields.yml file to generate the template
setup.template.fields: "${path.config}/fields.yml"
# Elasticsearch template settings
setup.template.settings:
index:
number_of_shards: 1
codec: best_compression
number_of_routing_shards: 30
#============================== Kibana =====================================
# Starting with Beats version 6.0.0, the dashboards are loaded via the Kibana API.
# This requires a Kibana endpoint configuration.
setup.kibana:
kibana host: 'http://xx.xxx.xx:5601'
#================================ Logging ======================================
# Configure the path where the logs are written. The default is the logs directory
# under the home path (the binary location).
# The name of the files where the logs are written to.
# Configure log file size limit. If limit is reached, log file will be
# automatically rotated
# rotateeverybytes: 10485760 # = 10MB
# Number of rotated log files to keep. Oldest files will be deleted first.
logging.level: debug
logging.to_files: true
logging.files:
path: /var/log/filebeat
name: filebeat
keepfiles: 7
permissions: 0644
rotateeverybytes: 10485760 # = 10MB
I receive these kind of exceptions:
020-01-10T09:43:04.250+0400
WARN elasticsearch/client.go:535
Cannot index event publisher.Event{Content:beat.Event{Timestamp:time.Time{wall:0xbf7e2199cdc9cf0c,
ext:22086279746, loc:(*time.Location)(0x4de6580)}, Meta:common.MapStr(nil),
Fields:common.MapStr{"agent":common.MapStr{"ephemeral_id":"81cdc692-d844-46f8-9470-88baa292d3b9",
"hostname":"zzz.com", "id":"4f8513df-d84e-44f1-a4b5-92739d27db39", "name":"gunner3_shipper",
"type":"filebeat", "version":"7.4.2"}, "application":"someapp", "ecs":common.MapStr{"version":"1.1.0"},
"host":common.MapStr{"name":"runner3_shipper"}, "input":common.MapStr{"type":"log"},
"log":common.MapStr{"file":common.MapStr{"path":"/home/runner/JavaRunner/logs/log4j_app.log"},
"offset":6939560}, "message":" 10 Jan 2020 09:43:02,395 INFO EasySignPrepaidSubscription : Starting to assign lifecycle offer",
"tags":[]string{"runner3_shipper", "gunner3_logs"}, "type":"asan_imza_log"}, Private:file.State{Id:"", Finished:false, Fileinfo:(*os.fileStat)(0xc00066c1a0),
Source:"/home/runner/JavaRunner/logs/log4j_someapp.log", Offset:6939657, Timestamp:time.Time{wall:0xbf7e21944ca8f2b3, ext:67348994,
loc:(*time.Location)(0x4de6580)}, TTL:-1, Type:"log", Meta:map[string]string(nil), FileStateOS:file.StateOS{Inode:0x62251, Device:0xfd02}},
TimeSeries:false}, Flags:0x1} (status=400): {"type":"mapper_parsing_exception","reason":"Failed to parse mapping [_default_]:
Root mapping definition has unsupported parameters:
[_all : {norms=false}]","caused_by":{"type":"mapper_parsing_exception","reason":
"Root mapping definition has unsupported parameters: [_all : {norms=false}]"}}