Hello,
I'm trying to use fortinet module to parse and make logs presentable before it ships to logstash here is my filebeats configuration file:
# ============================== Filebeat inputs ===============================
filebeat.inputs:
- type: log
# Change to true to enable this input configuration.
enabled: false
# Paths that should be crawled and fetched. Glob based paths.
paths:
- /var/log/*.log
#- c:\programdata\elasticsearch\logs\*
# ============================== Filebeat modules ==============================
filebeat.config.modules:
# Glob pattern for configuration loading
path: ${path.config}/modules.d/*.yml
# Set to true to enable config reloading
reload.enabled: false
# Period on which files under path should be checked for changes
#reload.period: 10s
filebeat.modules:
- module: fortinet
firewall:
enabled: true
var.input: udp
var.syslog_host: 0.0.0.0
var.syslog_port: 514
# ======================= Elasticsearch template setting =======================
setup.template.settings:
index.number_of_shards: 1
#index.codec: best_compression
#_source.enabled: false
# ================================== General ===================================
# The name of the shipper that publishes the network data. It can be used to group
# all the transactions sent by a single shipper in the web interface.
#name:
# The tags of the shipper are included in their own field with each
# transaction published.
#tags: ["service-X", "web-tier"]
# Optional fields that you can specify to add additional information to the
# output.
#fields:
# env: staging
# ================================= Dashboards =================================
# These settings control loading the sample dashboards to the Kibana index. Loading
# the dashboards is disabled by default and can be enabled either by setting the
# options here or by using the `setup` command.
#setup.dashboards.enabled: false
# The URL from where to download the dashboards archive. By default this URL
# has a value which is computed based on the Beat name and version. For released
# versions, this URL points to the dashboard archive on the artifacts.elastic.co
# website.
#setup.dashboards.url:
# =================================== Kibana ===================================
# Starting with Beats version 6.0.0, the dashboards are loaded via the Kibana API.
# This requires a Kibana endpoint configuration.
#setup.kibana:
# Kibana Host
# Scheme and port can be left out and will be set to the default (http and 5601)
# In case you specify and additional path, the scheme is required: http://localhost:5601/path
# IPv6 addresses should always be defined as: https://[2001:db8::1]:5601
#host: "localhost:5601"
# Kibana Space ID
# ID of the Kibana Space into which the dashboards should be loaded. By default,
# the Default Space will be used.
#space.id:
# =============================== Elastic Cloud ================================
# These settings simplify using Filebeat with the Elastic Cloud (https://cloud.elastic.co/).
# The cloud.id setting overwrites the `output.elasticsearch.hosts` and
# `setup.kibana.host` options.
# You can find the `cloud.id` in the Elastic Cloud web UI.
#cloud.id:
# The cloud.auth setting overwrites the `output.elasticsearch.username` and
# `output.elasticsearch.password` settings. The format is `<user>:<pass>`.
#cloud.auth:
# ================================== Outputs ===================================
# Configure what output to use when sending the data collected by the beat.
# ---------------------------- Elasticsearch Output ----------------------------
#output.elasticsearch:
# Array of hosts to connect to.
# hosts: ["localhost:9200"]
# Protocol - either `http` (default) or `https`.
#protocol: "https"
# Authentication credentials - either API key or username/password.
#api_key: "id:api_key"
#username: "elastic"
#password: "changeme"
# ------------------------------ Logstash Output -------------------------------
output.logstash:
# The Logstash hosts
hosts: ["localhost:5522"]
# Optional SSL. By default is off.
# List of root certificates for HTTPS server verifications
#ssl.certificate_authorities: ["/etc/pki/root/ca.pem"]
# Certificate for SSL client authentication
#ssl.certificate: "/etc/pki/client/cert.pem"
# Client Certificate Key
#ssl.key: "/etc/pki/client/cert.key"
# ================================= Processors =================================
# Configure processors to enhance or manipulate events generated by the beat.
processors:
- add_host_metadata: ~
- add_cloud_metadata: ~
- add_docker_metadata: ~
- add_kubernetes_metadata: ~
# ================================== Logging ===================================
# Sets log level. The default log level is info.
# Available log levels are: error, warning, info, debug
#logging.level: debug
# At debug level, you can selectively enable logging only for some components.
# To enable all selectors use ["*"]. Examples of other selectors are "beat",
# "publish", "service".
#logging.selectors: ["*"]
# ============================= X-Pack Monitoring ==============================
# Filebeat can export internal metrics to a central Elasticsearch monitoring
# cluster. This requires xpack monitoring to be enabled in Elasticsearch. The
# reporting is disabled by default.
# Set to true to enable the monitoring reporter.
#monitoring.enabled: false
# Sets the UUID of the Elasticsearch cluster under which monitoring data for this
# Filebeat instance will appear in the Stack Monitoring UI. If output.elasticsearch
# is enabled, the UUID is derived from the Elasticsearch cluster referenced by output.elasticsearch.
#monitoring.cluster_uuid:
# Uncomment to send the metrics to Elasticsearch. Most settings from the
# Elasticsearch output are accepted here as well.
# Note that the settings should point to your Elasticsearch *monitoring* cluster.
# Any setting that is not set is automatically inherited from the Elasticsearch
# output configuration, so if you have the Elasticsearch output configured such
# that it is pointing to your Elasticsearch monitoring cluster, you can simply
# uncomment the following line.
#monitoring.elasticsearch:
# ================================= Migration ==================================
# This allows to enable 6.7 migration aliases
#migration.6_to_7.enabled: true
and here is my logstash pipeline file for it :
input {
beats {
port => 5522
}
}
output {
elasticsearch {
hosts => ["http://10.0.200.120:9200"]
index => "fortios-6.0.9-%{+YYYY.MM.dd}"
}
stdout {
codec => rubydebug
}
}
the output i desire is that every field in the log entry is added as a field on its own with its right tag which doesn't happen and it collect operating system data from the server which houses the filebeat itself which i doesn't need at all.
here is the output i get in the command line:
Jul 20 13:31:45 logstash logstash: {
Jul 20 13:31:45 logstash logstash: "@timestamp" => 2020-07-20T11:31:44.135Z,
Jul 20 13:31:45 logstash logstash: "@version" => "1",
Jul 20 13:31:45 logstash logstash: "message" => "<189>date=2020-07-20 time=13:28:24 devname=\"FG200D4Q16809345\" devid=\"FG200D4Q16809345\" logid=\"0001000014\" type=\"traffic\" subtype=\"local\" level=\"notice\" vd=\"root\" eventtime=1595244504 srcip=10.0.0.6 srcname=\"KS2\" srcport=137 srcintf=\"VL01\" srcintfrole=\"lan\" dstip=10.0.0.255 dstport=137 dstintf=unknown-0 dstintfrole=\"undefined\" sessionid=179595452 proto=17 action=\"deny\" policyid=0 policytype=\"local-in-policy\" service=\"udp/137\" dstcountry=\"Reserved\" srccountry=\"Reserved\" trandisp=\"noop\" app=\"netbios forward\" duration=0 sentbyte=0 rcvdbyte=0 sentpkt=0 appcat=\"unscanned\" devtype=\"Windows PC\" devcategory=\"Windows Device\" osname=\"Windows 10 / 2016\" mastersrcmac=\"00:50:56:81:2f:46\" srcmac=\"00:50:56:81:2f:46\" srcserver=1",
Jul 20 13:31:45 logstash logstash: "ecs" => {
Jul 20 13:31:45 logstash logstash: "version" => "1.5.0"
Jul 20 13:31:45 logstash logstash: },
Jul 20 13:31:45 logstash logstash: "event" => {
Jul 20 13:31:45 logstash logstash: "timezone" => "+02:00",
Jul 20 13:31:45 logstash logstash: "dataset" => "fortinet.firewall",
Jul 20 13:31:45 logstash logstash: "module" => "fortinet"
Jul 20 13:31:45 logstash logstash: },
Jul 20 13:31:45 logstash logstash: "tags" => [
Jul 20 13:31:45 logstash logstash: [0] "fortinet-firewall",
Jul 20 13:31:45 logstash logstash: [1] "beats_input_codec_plain_applied"
Jul 20 13:31:45 logstash logstash: ],
Jul 20 13:31:45 logstash logstash: "service" => {
Jul 20 13:31:45 logstash logstash: "type" => "fortinet"
Jul 20 13:31:45 logstash logstash: },
Jul 20 13:31:45 logstash logstash: "host" => {
Jul 20 13:31:45 logstash logstash: "os" => {
Jul 20 13:31:45 logstash logstash: "codename" => "Core",
Jul 20 13:31:45 logstash logstash: "family" => "redhat",
Jul 20 13:31:45 logstash logstash: "platform" => "centos",
Jul 20 13:31:45 logstash logstash: "name" => "CentOS Linux",
Jul 20 13:31:45 logstash logstash: "version" => "7 (Core)",
Jul 20 13:31:45 logstash logstash: "kernel" => "3.10.0-1127.10.1.el7.x86_64"
Jul 20 13:31:45 logstash logstash: },
Jul 20 13:31:45 logstash logstash: "hostname" => "logstash.sumerge.local",
Jul 20 13:31:45 logstash logstash: "mac" => [
Jul 20 13:31:45 logstash logstash: [0] "00:50:56:a4:ec:eb",
Jul 20 13:31:45 logstash logstash: [1] "52:54:00:41:89:ef",
Jul 20 13:31:45 logstash logstash: [2] "52:54:00:41:89:ef"
Jul 20 13:31:45 logstash logstash: ],
Jul 20 13:31:45 logstash logstash: "containerized" => false,
Jul 20 13:31:45 logstash logstash: "ip" => [
Jul 20 13:31:45 logstash logstash: [0] "10.0.200.125",
Jul 20 13:31:45 logstash logstash: [1] "fe80::508e:fb6c:279c:6b0a",
Jul 20 13:31:45 logstash logstash: [2] "fe80::3a2f:caf7:33bc:9495",
Jul 20 13:31:45 logstash logstash: [3] "fe80::6f1:676d:7399:649d",
Jul 20 13:31:45 logstash logstash: [4] "192.168.122.1"
Jul 20 13:31:45 logstash logstash: ],
Jul 20 13:31:45 logstash logstash: "architecture" => "x86_64",
Jul 20 13:31:45 logstash logstash: "id" => "910e60d7c1e64e309ca7f310da1ea10e",
Jul 20 13:31:45 logstash logstash: "name" => "logstash.sumerge.local"
Jul 20 13:31:45 logstash logstash: },
Jul 20 13:31:45 logstash logstash: "fileset" => {
Jul 20 13:31:45 logstash logstash: "name" => "firewall"
Jul 20 13:31:45 logstash logstash: },
Jul 20 13:31:45 logstash logstash: "input" => {
Jul 20 13:31:45 logstash logstash: "type" => "udp"
Jul 20 13:31:45 logstash logstash: },
Jul 20 13:31:45 logstash logstash: "agent" => {
Jul 20 13:31:45 logstash logstash: "type" => "filebeat",
Jul 20 13:31:45 logstash logstash: "hostname" => "logstash.sumerge.local",
Jul 20 13:31:45 logstash logstash: "ephemeral_id" => "72b719eb-4b3c-4e79-8d64-5fbeed478fa9",
Jul 20 13:31:45 logstash logstash: "name" => "logstash.sumerge.local",
Jul 20 13:31:45 logstash logstash: "version" => "7.8.0",
Jul 20 13:31:45 logstash logstash: "id" => "a9b4394d-aad5-463e-9341-17b4775efa09"
Jul 20 13:31:45 logstash logstash: },
Jul 20 13:31:45 logstash logstash: "log" => {
Jul 20 13:31:45 logstash logstash: "source" => {
Jul 20 13:31:45 logstash logstash: "address" => "10.0.200.251:1102"
Jul 20 13:31:45 logstash logstash: }
Jul 20 13:31:45 logstash logstash: }
Jul 20 13:31:45 logstash logstash: }
and here is how it looks in kibana:
also these tags appear which i don't understand what it means :
Could anyone help me fix this issue please ?
Thank you