@exekias Here is the full filebeat.yml
filebeat.prospectors:
# Each - is a prospector. Most options can be set at the prospector level, so
# you can use different prospectors for various configurations.
# Below are the prospector specific configurations.
** #- c:\programdata\elasticsearch\logs***
** # Exclude lines. A list of regular expressions to match. It drops the lines that are**
** # matching any regular expression from the list.**
** #exclude_lines: ["^DBG"]**
** # Include lines. A list of regular expressions to match. It exports the lines that are**
** # matching any regular expression from the list.**
** #include_lines: ["^ERR", "^WARN"]**
** # Exclude files. A list of regular expressions to match. Filebeat drops the files that**
** # are matching any regular expression from the list. By default, no files are dropped.**
** #exclude_files: [".gz$"]**
** # Optional additional fields. These field can be freely picked**
** # to add additional information to the crawled log files for filtering**
** #fields:**
** # level: debug**
** # review: 1**
** ### Multiline options**
** # Mutiline can be used for log messages spanning multiple lines. This is common**
** # for Java Stack Traces or C-Line Continuation**
** # The regexp Pattern that has to be matched. The example pattern matches all lines starting with [**
** #multiline.pattern: ^[**
** # Defines if the pattern set under pattern should be negated or not. Default is false.**
** #multiline.negate: false**
** # Match can be set to "after" or "before". It is used to define if lines should be append to a pattern**
** # that was (not) matched before or after or as long as a pattern is not matched based on negate.**
** # Note: After is the equivalent to previous and before is the equivalent to to next in Logstash**
** #multiline.match: after**
#================================ General =====================================
# The name of the shipper that publishes the network data. It can be used to group
# all the transactions sent by a single shipper in the web interface.
#name:
# The tags of the shipper are included in their own field with each
# transaction published.
#tags: ["service-X", "web-tier"]
# Optional fields that you can specify to add additional information to the
# output.
#fields:
# env: staging
#================================ Outputs =====================================
# Configure what outputs to use when sending the data collected by the beat.
# Multiple outputs may be used.
#-------------------------- Elasticsearch output ------------------------------
#output.elasticsearch:
** # Array of hosts to connect to.**
** #hosts: ["localhost:9200"]**
** # Optional protocol and basic auth credentials.**
** #protocol: "https"**
** #username: "elastic"**
** #password: "changeme"**
#----------------------------- Logstash output --------------------------------
output.logstash:
** # The Logstash hosts**
** hosts: ["172.31.1.109:5044"]**
** # Optional SSL. By default is off.**
** # List of root certificates for HTTPS server verifications**
** #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"]**
** # Certificate for SSL client authentication**
** #ssl.certificate: "/etc/pki/client/cert.pem"**
** # Client Certificate Key**
** #ssl.key: "/etc/pki/client/cert.key"**
#================================ Logging =====================================
# Sets log level. The default log level is info.
# Available log levels are: critical, error, warning, info, debug
#logging.level: debug
# At debug level, you can selectively enable logging only for some components.
# To enable all selectors use ["*"]. Examples of other selectors are "beat",
# "publish", "service".
#logging.selectors: ["*"]