Hello All,
I've a requirement where I will be having diffrent log path defined in server and Filebeat will read this paths and should write the data to there respective elastic index.
The ILM policy and required rollover alias is defined in INDEX template settings.
I'm unable to get any data/segregate the data to different indices after several attempts.
Summarize: Single filebeat.yml should process diffrent log path and write data to diffrent inidices, also follow the ILM policy and rollover index pattern needed(rollover alias) define in template.
Template setting: (Note rollover alias are different for both template)
PUT _index_template/mis-monitoring-usecases
{
"template": {
"settings": {
"index": {
"lifecycle": {
"name": "mis-monitoring-common-policy",
"rollover_alias": "mis-monitoring-usecases"
},
"default_pipeline": "mis-usecases-ingest-pipeline",
"number_of_shards": "1",
"number_of_replicas": "0"
}
},
"mappings": {
"properties": {
}
}
------------------------------------------------------------------------------------
PUT _index_template/mis-log
{
"template": {
"settings": {
"index": {
"lifecycle": {
"name": "mis-monitoring-common-policy",
"rollover_alias": "mis-log"
},
"default_pipeline": "mis-log-ingest-pipeline",
"number_of_shards": "1",
"number_of_replicas": "0"
}
},
"mappings": {
"properties": {
}
}
Filebeat.yml:
filebeat.inputs:
- type: log
enabled: true
paths:
- /k/app/LOG_ROOT/MIS/**/*.log
fields:
index: mis-monitoring-usecases
ignore_older: 1h
include_lines: ['ranv.*\|']
multiline.type: pattern
multiline.pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z
multiline.negate: true
multiline.match: after
scan_frequency: 30s
harvester_limit: 100
close_inactive: 30m
close_removed: true
clean_removed: true
processors:
- add_host_metadata: null
- drop_fields:
when:
equals:
agent.type: filebeat
fields:
- agent.hostname
- agent.id
- agent.type
- agent.ephemeral_id
- agent.version
- log.offset
- log.flags
- input.type
- ecs.version
- host
filebeat.config.modules:
path: ${path.config}/modules.d/*.yml
- type: log
enabled: true
paths:
- /l/app/LOG_ROOT/Demo/*.log
fields:
index: mis-log
ignore_older: 1h
include_lines:
- ^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z ?(.*)
multiline.type: pattern
multiline.pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z
multiline.negate: true
multiline.match: after
scan_frequency: 30s
harvester_limit: 100
close_inactive: 30m
close_removed: true
clean_removed: true
reload.enabled: false
processors:
- add_host_metadata: null
- drop_fields:
when:
equals:
agent.type: filebeat
fields:
- agent.hostname
- agent.id
- agent.type
- agent.ephemeral_id
- agent.version
- log.offset
- log.flags
- input.type
- ecs.version
- host.os
- host.id
- host.mac
- host.architecture
filebeat.config.modules:
path: ${path.config}/modules.d/*.yml
setup.kibana:
host: http://abc:5601
output.elasticsearch:
hosts:
- http://abc:9200
index: "%{[fields.index]}-%{+yyyy.MM.dd}-000001"
monitoring.enabled: true
monitoring.elasticsearch: null
setup.ilm.enabled: false
setup.template.enabled: false
Output required index:
mis-monitoring-usecases-2023-05-05-000001
mis-log-2023-05-05-000001
Kindly let me know how this can be done without LOGSTASH.