Install & configuration of live logs with logstash (docker)

HI,

I'm trying to install elk on the docker host and filebeat on another docker.

So fo the first docker i followed that:
Elasticsearch
kibana
& logstash
with the ELK image. (i don't want security s i enabled it)

I'm running elk with docker-compose and here is all of my files:

docker-compose.yml

version: '3.6'
services:
  Elasticsearch:
    image: elasticsearch:8.3.3
    container_name: elasticsearch
    restart: always
    volumes:
    - elastic_data:/usr/share/elasticsearch/data/
    environment:
      ES_JAVA_OPTS: "-Xms512m -Xmx512m"
      discovery.type: single-node
      cluster.name: docker-cluster
      xpack.security.enabled: "false"

    ports:
    - '9200:9200'
    - '9300:9300'
    networks:
      - elk

  Logstash:
    image: logstash:8.3.3
    container_name: logstash
    restart: always
    volumes:
    - /home/epnp/elastic_src/docker_elk:/logstash
    command: logstash -f /logstash/logstash-beat-electric.conf
    depends_on:
      - Elasticsearch
    ports:
    - '5044:5044'
    environment:
      LS_JAVA_OPTS: "-Xms512m -Xmx512m"
    networks:
      - elk

  Kibana:
    image: kibana:8.3.3
    container_name: kibana
    restart: always
    ports:
    - '5601:5601'
    environment:
      - ELASTICSEARCH_URL=http://elasticsearch:9200
    depends_on:
      - Elasticsearch
    networks:
      - elk

  #filebeat:
    #image: filebeat:8.3.3
    #container_name: filebeat
    #restart: always
    #volumes:
    #- /home/epnp/elastic_src/docker_elk:/filebeat
    #networks:
    #- elk

volumes:
  elastic_data: {}

networks:
  elk:

logstash-beatelectric.conf:

# Sample Logstash configuration for creating a simple
# Beats -> Logstash -> Elasticsearch pipeline.

input {
  beats {
    port => 5044
    id => "from_filebeat"
  }
}

output {
  elasticsearch {
    hosts => ["http://elasticsearch:9200"]
    index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}"
    manage_template => false
    action => "create"
    }
    stdout { codec => rubydebug }
}

filter {
  dissect {
    mapping => {
      "message" => "%{log_time} %{log_absolutetime} %{log_model} %{log_zone} %{log_data_direction} %{log_data_name} %{log_data_value}"
    }
  }

  date {
    match => [ "log_time", "HH:mm:ss.SSS", "ISO8601" ]
  }

  ruby {
    init => "@current_car_mode = ''
             @transfer_car_mode = ''
             @start_driving_time = 0
             @current_weather = 'rain'
             @current_speed_limit = 300.0
             @current_speed = 0.0
             @start_over_speed_limit_time = 0
             @current_distraction = '0'
             @start_distraction_time = 0
             @start_reaction_time = 0
             @start_transfer_time = 0
             @current_driving_level = 'senior'
             #@test_reaction_time = 0
             #@test_transfer_time = 0
             @first_time = 0"
    code => "if @first_time == 0
               @first_time = 1
               new_event = event.clone()
               new_event.set('log_data_name', 'distraction_time');
               new_event.set('log_data_value', '0')
               new_event.set('log_data_info1', @current_weather)
               new_event.set('log_data_info2', @current_car_mode)
               new_event.set('log_data_numerical', 0)
               new_event_block.call(new_event)

               new_event = event.clone()
               new_event.set('log_data_name', 'over_speed_limit_time');
               new_event.set('log_data_value', '0')
               new_event.set('log_data_info1', @current_weather)
               new_event.set('log_data_info2', @current_car_mode)
               new_event.set('log_data_numerical', 0)
               new_event_block.call(new_event)

               @start_driving_time = event.get('log_absolutetime').to_f 
               @start_over_speed_limit_time = event.get('log_absolutetime').to_f
               @start_distraction_time = event.get('log_absolutetime').to_f
               @start_reaction_time = event.get('log_absolutetime').to_f
               @start_transfer_time = event.get('log_absolutetime').to_f
             end

             event.set('log_data_numerical', 0.0)
             if event.get('log_data_name') == 'car_mode' and event.get('log_model') == 'public_plugin_vehicle_data'
               if @current_car_mode == ''
                 @start_driving_time = event.get('log_absolutetime').to_f
               else
                 if event.get('log_data_value') != @current_car_mode
                   new_event = event.clone
                   new_event.set('log_data_name', 'driving_time');
                   new_event.set('log_data_value', event.get('log_absolutetime').to_f - @start_driving_time)
                   new_event.set('log_data_info1', @current_weather)
                   new_event.set('log_data_info2', @current_car_mode)
                   new_event.set('log_data_numerical', event.get('log_absolutetime').to_f - @start_driving_time)
                   new_event_block.call(new_event)
                   @start_driving_time = event.get('log_absolutetime').to_f
                 end
               end
               
               if @transfer_car_mode != ''
                 new_event = event.clone
                 new_event.set('log_data_name', 'transfer_time');
                 new_event.set('log_data_value', event.get('log_absolutetime').to_f - @start_transfer_time)
                 new_event.set('log_data_info1', @current_driving_level)
                 new_event.set('log_data_info2', @current_car_mode)
                 new_event.set('log_data_numerical', event.get('log_absolutetime').to_f - @start_transfer_time)
                 new_event_block.call(new_event)
                 @start_reaction_time = 0
                 @start_transfer_time = 0
                 @transfer_car_mode = ''
               end

               if event.get('log_data_value') == 'mrm'
                 new_event = event.clone
                 new_event.set('log_data_name', 'mrm_detection');
                 new_event.set('log_data_value', 'true')
                 new_event.set('log_data_info1', @current_driving_level)
                 new_event.set('log_data_info2', @current_car_mode)
                 new_event_block.call(new_event)
               end

               @current_car_mode = event.get('log_data_value')

             elsif event.get('log_data_name') == 'authority_transfer_request' and event.get('log_model') == 'public_plugin_smart_cabin'
               if event.get('log_data_value') == 'request_start'
                 @start_reaction_time = event.get('log_absolutetime').to_f
                 @start_transfer_time = event.get('log_absolutetime').to_f
                 @transfer_car_mode = ''
               elsif event.get('log_data_value') == 'request_taken_into_account'
                 if @start_reaction_time != 0 and @transfer_car_mode == ''
                   new_event = event.clone
                   new_event.set('log_data_name', 'reaction_time');
                   new_event.set('log_data_value', event.get('log_absolutetime').to_f - @start_reaction_time)
                   new_event.set('log_data_info1', @current_driving_level)
                   new_event.set('log_data_info2', @current_car_mode)
                   new_event.set('log_data_numerical', event.get('log_absolutetime').to_f - @start_reaction_time)
                   new_event_block.call(new_event)
                   @start_transfer_time = event.get('log_absolutetime').to_f
                   @transfer_car_mode = @current_car_mode
                 end
               elsif event.get('log_data_value') == 'request_cancel'
                   @start_reaction_time = 0
                   @start_transfer_time = 0
                   @transfer_car_mode = ''
               elsif event.get('log_data_value') == 'request_end'
                   #new_event = event.clone
                   #new_event.set('log_data_name', 'transfer_time');
                   #new_event.set('log_data_value', event.get('log_absolutetime').to_f - @start_transfer_time)
                   #new_event.set('log_data_info1', @current_driving_level)
                   #new_event.set('log_data_info2', @transfer_car_mode)
                   #new_event.set('log_data_numerical', event.get('log_absolutetime').to_f - @start_transfer_time)
                   #new_event_block.call(new_event)
                   #@start_transfer_time = 0
                   #@start_reaction_time = 0  
                   #@transfer_car_mode = ''
               end

             elsif event.get('log_data_name') == 'user.user_driving_level' and event.get('log_zone') == 'user_driver'
               @current_driving_level = event.get('log_data_value')
             
             elsif event.get('log_data_name') == 'weather' and event.get('log_model') == 'public_plugin_ros_driving_environment'
               if @current_car_mode != ''
                 new_event = event.clone
                 new_event.set('log_data_name', 'driving_time');
                 new_event.set('log_data_value', event.get('log_absolutetime').to_f - @start_driving_time)
                 new_event.set('log_data_info1', @current_weather)
                 new_event.set('log_data_info2', @current_car_mode)
                 new_event.set('log_data_numerical', event.get('log_absolutetime').to_f - @start_driving_time)
                 new_event_block.call(new_event)
                 @start_driving_time = event.get('log_absolutetime').to_f
               end
               if @current_distraction == '1'
                 new_event = event.clone
                 new_event.set('log_data_name', 'distraction_time');
                 new_event.set('log_data_value', event.get('log_absolutetime').to_f - @start_distraction_time)
                 new_event.set('log_data_info1', @current_weather)
                 new_event.set('log_data_info2', @current_car_mode)
                 new_event.set('log_data_numerical', event.get('log_absolutetime').to_f - @start_distraction_time)
                 new_event_block.call(new_event)
                 @start_distraction_time = event.get('log_absolutetime').to_f
               end
               if (@current_speed > @current_speed_limit)
                 new_event = event.clone
                 new_event.set('log_data_name', 'over_speed_limit_time');
                 new_event.set('log_data_value', event.get('log_absolutetime').to_f - @start_over_speed_limit_time)
                 new_event.set('log_data_info1', @current_weather)
                 new_event.set('log_data_info2', @current_car_mode)
                 new_event.set('log_data_numerical', event.get('log_absolutetime').to_f - @start_over_speed_limit_time)
                 new_event_block.call(new_event)
                 @start_over_speed_limit_time = event.get('log_absolutetime').to_f
               end
               @current_weather = event.get('log_data_value')

             elsif event.get('log_data_name') == 'distraction.distracted'
               if event.get('log_data_value') == '1'
                 if  @current_distraction == '0'
                   @start_distraction_time = event.get('log_absolutetime').to_f
                 end
                 event.set('log_data_numerical', '1');
                 @current_distraction = '1'

                 #new_event = event.clone
                 #new_event.set('log_data_name', 'reaction_time');
                 #new_event.set('log_data_value', @test_reaction_time)
                 #new_event.set('log_data_info1', @current_weather)
                 #new_event.set('log_data_info2', 'autonomous_driving')
                 #new_event.set('log_data_numerical', @test_reaction_time)
                 #@test_reaction_time = @test_reaction_time + 2
                 #new_event_block.call(new_event)

               else
                 if  @current_distraction == '1'
                   new_event = event.clone
                   new_event.set('log_data_name', 'distraction_time');
                   new_event.set('log_data_value', event.get('log_absolutetime').to_f - @start_distraction_time)
                   new_event.set('log_data_info1', @current_weather)
                   new_event.set('log_data_info2', @current_car_mode)
                   new_event.set('log_data_numerical', event.get('log_absolutetime').to_f - @start_distraction_time)
                   new_event_block.call(new_event)
                 end
                 event.set('log_data_numerical', '0');
                 @current_distraction = '0'

                 #new_event = event.clone
                 #new_event.set('log_data_name', 'transfer_time');
                 #new_event.set('log_data_value', @test_transfer_time)
                 #new_event.set('log_data_info1', @current_weather)
                 #new_event.set('log_data_info2', 'autonomous_driving')
                 #new_event.set('log_data_numerical', @test_transfer_time)
                 #@test_transfer_time = @test_transfer_time + 2
                 #new_event_block.call(new_event)

               end
             
             elsif event.get('log_data_name') == 'speed_limit' and event.get('log_model') == 'public_plugin_vehicle_data'
               if (@current_speed > @current_speed_limit)
                 @current_speed_limit = event.get('log_data_value').to_f
                 if @current_speed_limit >= @current_speed
                   new_event = event.clone
                   new_event.set('log_data_name', 'over_speed_limit_time');
                   new_event.set('log_data_value', event.get('log_absolutetime').to_f - @start_over_speed_limit_time)
                   new_event.set('log_data_info1', @current_weather)
                   new_event.set('log_data_info2', @current_car_mode)
                   new_event.set('log_data_numerical', event.get('log_absolutetime').to_f - @start_over_speed_limit_time)
                   new_event_block.call(new_event)
                 end
               else
                 @current_speed_limit = event.get('log_data_value').to_f
                 if @current_speed_limit < @current_speed
                   @start_over_speed_limit_time = event.get('log_absolutetime').to_f
                 end               
               end
             
             elsif event.get('log_data_name') == 'speed' and event.get('log_model') == 'public_plugin_vehicle_data'
               if @current_speed > @current_speed_limit
                 @current_speed = event.get('log_data_value').to_f
                 if @current_speed_limit >= @current_speed
                   new_event = event.clone
                   new_event.set('log_data_name', 'over_speed_limit_time');
                   new_event.set('log_data_value', event.get('log_absolutetime').to_f - @start_over_speed_limit_time)
                   new_event.set('log_data_info1', @current_weather)
                   new_event.set('log_data_info2', @current_car_mode)
                   new_event.set('log_data_numerical', event.get('log_absolutetime').to_f - @start_over_speed_limit_time)
                   new_event_block.call(new_event)
                 end
               else
                 @current_speed = event.get('log_data_value').to_f
                 if @current_speed_limit < @current_speed
                   @start_over_speed_limit_time = event.get('log_absolutetime').to_f
                 end               
               end

             elsif event.get('log_data_name') == 'lane_crossing_detection' and event.get('log_data_value') != 'lane_crossing' and event.get('log_model') == 'public_plugin_vehicle_data'
               new_event = event.clone
               new_event.set('log_data_name', 'lane_crossing');
               new_event.set('log_data_value', 'true')
               new_event.set('log_data_info1', @current_weather)
               new_event.set('log_data_info2', @current_car_mode)
               new_event_block.call(new_event)

             end"
  }

  mutate {
    convert => ["log_data_numerical","float"]
  }
}

conf/elasticsearch.yml

# ----------------------------------- Paths ------------------------------------
#
# Path to directory where to store the data (separate multiple locations by comma):
#
path.data: /var/lib/elasticsearch
#
# Path to log files:
#
path.logs: /var/log/elasticsearch
# ---------------------------------- Network -----------------------------------
#
# By default Elasticsearch is only accessible on localhost. Set a different
# address here to expose this node on the network:
network.host: 192.168.66.214
#network.host: 192.168.66.214
#
# By default Elasticsearch listens for HTTP traffic on the first free port it
# finds starting at 9200. Set a specific HTTP port here:
#
http.port: 9200
#
# For more information, consult the network module documentation.
#
# --------------------------------- Discovery ----------------------------------
#
# Pass an initial list of hosts to perform discovery when this node is started:
# The default list of hosts is ["127.0.0.1", "[::1]"]
#
discovery.seed_hosts: []
#
# Bootstrap the cluster using an initial set of master-eligible nodes:
#
#cluster.initial_master_nodes: ["node-1", "node-2"]
#
# For more information, consult the discovery and cluster formation module documentation.
#
# --------------------------------- Readiness ----------------------------------
#
# Enable an unauthenticated TCP readiness endpoint on localhost
#
#readiness.port: 9399
#
# ---------------------------------- Various -----------------------------------
#
# Allow wildcard deletion of indices:
#
#action.destructive_requires_name: false

#----------------------- BEGIN SECURITY AUTO CONFIGURATION -----------------------
#
# The following settings, TLS certificates, and keys have been automatically      
# generated to configure Elasticsearch security features on 17-06-2022 12:40:24
#
# --------------------------------------------------------------------------------

# Enable security features
xpack.security.enabled: false

xpack.security.enrollment.enabled: true

# Enable encryption for HTTP API client connections, such as Kibana, Logstash, and Agents
xpack.security.http.ssl:
  enabled: true
  keystore.path: certs/http.p12

# Enable encryption and mutual authentication between cluster nodes
xpack.security.transport.ssl:
  enabled: true
  verification_mode: certificate
  keystore.path: certs/transport.p12
  truststore.path: certs/transport.p12
# Create a new cluster with the current node only
# Additional nodes can still join the cluster later
cluster.initial_master_nodes: ["EB-epicnpoc"]

# Allow HTTP API connections from anywhere
# Connections are encrypted and require user authentication
http.host: 0.0.0.0

# Allow other nodes to join the cluster from anywhere
# Connections are encrypted and mutually authenticated
#transport.host: 0.0.0.0

#----------------------- END SECURITY AUTO CONFIGURATION -------------------------

and filebeat in the other docker (i install it manually):

/etc/filebeat/filebeat.yml

###################### Filebeat Configuration Example #########################

# This file is an example configuration file highlighting only the most common
# options. The filebeat.reference.yml file from the same directory contains all the
# supported options with more comments. You can use it as a reference.
#
# You can find the full configuration reference here:
# https://www.elastic.co/guide/en/beats/filebeat/index.html

# For more available modules and options, please see the filebeat.reference.yml sample
# configuration file.

# ============================== Filebeat inputs ===============================

filebeat.inputs:

# Each - is an input. Most options can be set at the input level, so
# you can use different inputs for various configurations.
# Below are the input specific configurations.


- type: log
  enabled: true
  paths: 
    - /tmp/reference-data/*.log
  tags: ["iocore_data"]

# filestream is an input for collecting log messages from files.
- type: filestream

  # Unique ID among all inputs, an ID is required.
  id: my-filestream-id

  # Change to true to enable this input configuration.
  enabled: false

  # Paths that should be crawled and fetched. Glob based paths.
  paths:
    - /var/log/*.log
    #- c:\programdata\elasticsearch\logs\*

  # Exclude lines. A list of regular expressions to match. It drops the lines that are
  # matching any regular expression from the list.
  #exclude_lines: ['^DBG']

  # Include lines. A list of regular expressions to match. It exports the lines that are
  # matching any regular expression from the list.
  #include_lines: ['^ERR', '^WARN']

  # Exclude files. A list of regular expressions to match. Filebeat drops the files that
  # are matching any regular expression from the list. By default, no files are dropped.
  #prospector.scanner.exclude_files: ['.gz$']

  # Optional additional fields. These fields can be freely picked
  # to add additional information to the crawled log files for filtering
  #fields:
  #  level: debug
  #  review: 1

# ============================== Filebeat modules ==============================

filebeat.config.modules:
  # Glob pattern for configuration loading
  path: ${path.config}/modules.d/*.yml

  # Set to true to enable config reloading
  reload.enabled: false

  # Period on which files under path should be checked for changes
  #reload.period: 10s

# ======================= Elasticsearch template setting =======================

setup.template.settings:
  index.number_of_shards: 1
  #index.codec: best_compression
  #_source.enabled: false


# ================================== General ===================================

# The name of the shipper that publishes the network data. It can be used to group
# all the transactions sent by a single shipper in the web interface.
#name:

# The tags of the shipper are included in their own field with each
# transaction published.
#tags: ["service-X", "web-tier"]

# Optional fields that you can specify to add additional information to the
# output.
#fields:
#  env: staging

# ================================= Dashboards =================================
# These settings control loading the sample dashboards to the Kibana index. Loading
# the dashboards is disabled by default and can be enabled either by setting the
# options here or by using the `setup` command.
#setup.dashboards.enabled: false

# The URL from where to download the dashboards archive. By default this URL
# has a value which is computed based on the Beat name and version. For released
# versions, this URL points to the dashboard archive on the artifacts.elastic.co
# website.
#setup.dashboards.url:

# =================================== Kibana ===================================

# Starting with Beats version 6.0.0, the dashboards are loaded via the Kibana API.
# This requires a Kibana endpoint configuration.
setup.kibana:

  # Kibana Host
  # Scheme and port can be left out and will be set to the default (http and 5601)
  # In case you specify and additional path, the scheme is required: http://localhost:5601/path
  # IPv6 addresses should always be defined as: https://[2001:db8::1]:5601
  host: "192.168.66.214:5601"

  # Kibana Space ID
  # ID of the Kibana Space into which the dashboards should be loaded. By default,
  # the Default Space will be used.
  #space.id:

# =============================== Elastic Cloud ================================

# These settings simplify using Filebeat with the Elastic Cloud (https://cloud.elastic.co/).

# The cloud.id setting overwrites the `output.elasticsearch.hosts` and
# `setup.kibana.host` options.
# You can find the `cloud.id` in the Elastic Cloud web UI.
#cloud.id:

# The cloud.auth setting overwrites the `output.elasticsearch.username` and
# `output.elasticsearch.password` settings. The format is `<user>:<pass>`.
#cloud.auth:

# ================================== Outputs ===================================

# Configure what output to use when sending the data collected by the beat.

# ---------------------------- Elasticsearch Output ----------------------------
#output.elasticsearch:
  # Array of hosts to connect to.
  #hosts: ["192.168.66.214:9200"]

  # Protocol - either `http` (default) or `https`.
  #protocol: "https"

  # Authentication credentials - either API key or username/password.
  #api_key: "id:api_key"
  #username: "elastic"
  #password: "changeme"

# ------------------------------ Logstash Output -------------------------------
output.logstash:
  # The Logstash hosts
  hosts: ["192.168.66.214:5044"]

  # Optional SSL. By default is off.
  # List of root certificates for HTTPS server verifications
  #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"]

  # Certificate for SSL client authentication
  #ssl.certificate: "/etc/pki/client/cert.pem"

  # Client Certificate Key
  #ssl.key: "/etc/pki/client/cert.key"

# ================================= Processors =================================
processors:
  - add_host_metadata:
      when.not.contains.tags: forwarded
  - add_cloud_metadata: ~
  - add_docker_metadata: ~
  - add_kubernetes_metadata: ~

# ================================== Logging ===================================

# Sets log level. The default log level is info.
# Available log levels are: error, warning, info, debug
#logging.level: debug

# At debug level, you can selectively enable logging only for some components.
# To enable all selectors use ["*"]. Examples of other selectors are "beat",
# "publisher", "service".
#logging.selectors: ["*"]

# ============================= X-Pack Monitoring ==============================
# Filebeat can export internal metrics to a central Elasticsearch monitoring
# cluster.  This requires xpack monitoring to be enabled in Elasticsearch.  The
# reporting is disabled by default.

# Set to true to enable the monitoring reporter.
#monitoring.enabled: false

# Sets the UUID of the Elasticsearch cluster under which monitoring data for this
# Filebeat instance will appear in the Stack Monitoring UI. If output.elasticsearch
# is enabled, the UUID is derived from the Elasticsearch cluster referenced by output.elasticsearch.
#monitoring.cluster_uuid:

# Uncomment to send the metrics to Elasticsearch. Most settings from the
# Elasticsearch output are accepted here as well.
# Note that the settings should point to your Elasticsearch *monitoring* cluster.
# Any setting that is not set is automatically inherited from the Elasticsearch
# output configuration, so if you have the Elasticsearch output configured such
# that it is pointing to your Elasticsearch monitoring cluster, you can simply
# uncomment the following line.
#monitoring.elasticsearch:

# ============================== Instrumentation ===============================

# Instrumentation support for the filebeat.
#instrumentation:
    # Set to true to enable instrumentation of filebeat.
    #enabled: false

    # Environment in which filebeat is running on (eg: staging, production, etc.)
    #environment: ""

    # APM Server hosts to report instrumentation results to.
    #hosts:
    #  - http://localhost:8200

    # API Key for the APM Server(s).
    # If api_key is set then secret_token will be ignored.
    #api_key:

    # Secret token for the APM Server(s).
    #secret_token:


# ================================= Migration ==================================

# This allows to enable 6.7 migration aliases
#migration.6_to_7.enabled: true

Here is the way how i have install & configure filebeat:

  1. curl -L -O https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-7.17.1-amd64.deb
  2. sudo dpkg -i filebeat-7.17.5-amd64.deb
  3. i configure filebeat port (kibana + logstash)
  4. sudo filebeat setup --index-management -E output.logstash.enabled=false -E 'output.elasticsearch.hosts=["192.168.66.214:9200"]'
  5. sudo filebeat -e

I've got this to my logstash stdout:

logstash         |                "message" => "2022-08-10T11:41:19.963 1660124479.963 public_plugin_vehicle_data Broadcast OUT speed 0.000000",
logstash         |                    "log" => {
logstash         |           "file" => {
logstash         |             "path" => "/tmp/reference-data/vehicle_data_public_plugin_vehicle_data.log"
logstash         |         },
logstash         |         "offset" => 529034
logstash         |     },
logstash         |               "@version" => "1",
logstash         |                  "input" => {
logstash         |         "type" => "log"
logstash         |     },
logstash         |               "log_zone" => "Broadcast",
logstash         |                  "event" => {
logstash         |         "original" => "2022-08-10T11:41:19.963 1660124479.963 public_plugin_vehicle_data Broadcast OUT speed 0.000000"
logstash         |     },
logstash         |              "log_model" => "public_plugin_vehicle_data",
logstash         |     "log_data_numerical" => 0.0,
logstash         |         "log_data_value" => "0.000000",
logstash         |     "log_data_direction" => "OUT",
logstash         |               "log_time" => "2022-08-10T11:41:19.963",
logstash         |          "log_data_name" => "speed",
logstash         |                   "host" => {
logstash         |                    "os" => {
logstash         |             "codename" => "bionic",
logstash         |               "family" => "debian",
logstash         |               "kernel" => "5.13.0-51-generic",
logstash         |             "platform" => "ubuntu",
logstash         |                 "type" => "linux",
logstash         |                 "name" => "Ubuntu",
logstash         |              "version" => "18.04.5 LTS (Bionic Beaver)"
logstash         |         },
logstash         |                    "id" => "270a461e0432457f9eac8bab1a33b1c0",
logstash         |          "architecture" => "x86_64",
logstash         |                  "name" => "bowlmain",
logstash         |         "containerized" => true,
logstash         |                    "ip" => [
logstash         |             [0] "192.168.66.10",
logstash         |             [1] "fe80::ac81:259f:f6c2:4589",
logstash         |             [2] "172.17.0.1"
logstash         |         ],
logstash         |                   "mac" => [
logstash         |             [0] "00:01:2e:a2:0e:b8",
logstash         |             [1] "00:01:2e:a2:0e:b9",
logstash         |             [2] "02:42:83:4d:13:58",
logstash         |             [3] "70:9c:d1:82:4d:f8"
logstash         |         ],
logstash         |              "hostname" => "bowlmain"
logstash         |     },
logstash         |             "@timestamp" => 2022-08-10T11:41:19.963Z,
logstash         |                   "tags" => [
logstash         |         [0] "iocore_data",
logstash         |         [1] "beats_input_codec_plain_applied"
logstash         |     ],
logstash         |                  "agent" => {
logstash         |                   "id" => "0df421b0-519e-489a-b593-ba2608005867",
logstash         |                 "name" => "bowlmain",
logstash         |         "ephemeral_id" => "c4a7c3cb-af0e-4131-89b3-c7707d964beb",
logstash         |                 "type" => "filebeat",
logstash         |              "version" => "8.3.3"
logstash         |     },
logstash         |       "log_absolutetime" => "1660124479.963",
logstash         |                    "ecs" => {
logstash         |         "version" => "8.0.0"
logstash         |     }
logstash         | }

my filebeat log:

{"log.level":"info","@timestamp":"2022-08-10T11:41:25.926+0200","log.logger":"monitoring","log.origin":{"file.name":"log/log.go","file.line":193},"message":"Total metrics","service.name":"filebeat","monitoring":{"metrics":{"beat":{"cgroup":{"cpu":{"cfs":{"period":{"us":0},"quota":{"us":0}},"id":"ee9d6ec2de8a5e2229f6c5ac0b338cad3151ae3da4a13f5a3c5ade29208bd1fa","stats":{"periods":0,"throttled":{"ns":0,"periods":0}}},"cpuacct":{"id":"ee9d6ec2de8a5e2229f6c5ac0b338cad3151ae3da4a13f5a3c5ade29208bd1fa","total":{"ns":0}},"memory":{"id":"ee9d6ec2de8a5e2229f6c5ac0b338cad3151ae3da4a13f5a3c5ade29208bd1fa","mem":{"limit":{"bytes":0},"usage":{"bytes":0}}}},"cpu":{"system":{"ticks":590,"time":{"ms":590}},"total":{"ticks":3150,"time":{"ms":3150},"value":0},"user":{"ticks":2560,"time":{"ms":2560}}},"handles":{"limit":{"hard":1048576,"soft":2048},"open":9},"info":{"ephemeral_id":"c4a7c3cb-af0e-4131-89b3-c7707d964beb","name":"filebeat","uptime":{"ms":860504},"version":"8.3.3"},"memstats":{"gc_next":19775032,"memory_alloc":17058016,"memory_sys":44909576,"memory_total":787908096,"rss":106606592},"runtime":{"goroutines":29}},"filebeat":{"events":{"active":0,"added":1389,"done":1389},"harvester":{"closed":1,"open_files":0,"running":0,"skipped":0,"started":1},"input":{"log":{"files":{"renamed":0,"truncated":0}},"netflow":{"flows":0,"packets":{"dropped":0,"received":0}}}},"libbeat":{"config":{"module":{"running":2,"starts":2,"stops":0},"reloads":1,"scans":1},"output":{"events":{"acked":1313,"active":0,"batches":558,"dropped":0,"duplicates":0,"failed":0,"toomany":0,"total":1313},"read":{"bytes":3348,"errors":0},"type":"logstash","write":{"bytes":400573,"errors":0}},"pipeline":{"clients":0,"events":{"active":0,"dropped":0,"failed":0,"filtered":76,"published":1313,"retry":169,"total":1389},"queue":{"acked":1313,"max_events":4096}}},"registrar":{"states":{"cleanup":0,"current":75,"update":1389},"writes":{"fail":0,"success":409,"total":409}},"system":{"cpu":{"cores":16},"load":{"1":1.69,"15":1.24,"5":1.31,"norm":{"1":0.1056,"15":0.0775,"5":0.0819}}}},"ecs.version":"1.6.0"}}

The probem is in my kibana i receive nothing when i'm using logstash but when i'm using elasticsearch ip:port on my filebeat i receive date but the data isn't parsed

image

So i want to know why my logstash is sending nothing to elasticsearch and how can i receive data ?

Okay now i got live logs but i change nothing, i just let it run...

filebeat logs:

{"log.level":"info","@timestamp":"2022-08-10T13:57:52.282+0200","log.logger":"monitoring","log.origin":{"file.name":"log/log.go","file.line":185},"message":"Non-zero metrics in the last 30s","service.name":"filebeat","monitoring":{"metrics":{"beat":{"cpu":{"system":{"ticks":3640,"time":{"ms":20}},"total":{"ticks":17250,"time":{"ms":60},"value":0},"user":{"ticks":13610,"time":{"ms":40}}},"handles":{"limit":{"hard":1048576,"soft":2048},"open":11},"info":{"ephemeral_id":"2ef18e18-869a-4728-b3c8-2ac18adb6a65","uptime":{"ms":5643034},"version":"8.3.3"},"memstats":{"gc_next":20621464,"memory_alloc":15100936,"memory_total":3966613912,"rss":107741184},"runtime":{"goroutines":68}},"filebeat":{"events":{"active":2,"added":27,"done":25},"harvester":{"open_files":1,"running":1}},"libbeat":{"config":{"module":{"running":2}},"output":{"events":{"acked":25,"active":0,"batches":10,"total":25},"read":{"bytes":60},"write":{"bytes":7384}},"pipeline":{"clients":5,"events":{"active":2,"published":27,"total":27},"queue":{"acked":25}}},"registrar":{"states":{"current":75,"update":25},"writes":{"success":7,"total":7}},"system":{"load":{"1":1.6,"15":1.24,"5":1.35,"norm":{"1":0.1,"15":0.0775,"5":0.0844}}}},"ecs.version":"1.6.0"}}

logstash logs:

logstash         |                "message" => "2022-08-10T13:58:43.859 1660132723.859 public_plugin_vehicle_data Broadcast OUT lane_crossing_detection right_lane_crossing",
logstash         |         "log_data_value" => "true",
logstash         |                   "host" => {
logstash         |                    "id" => "270a461e0432457f9eac8bab1a33b1c0",
logstash         |                    "os" => {
logstash         |                 "type" => "linux",
logstash         |              "version" => "18.04.5 LTS (Bionic Beaver)",
logstash         |               "family" => "debian",
logstash         |             "platform" => "ubuntu",
logstash         |                 "name" => "Ubuntu",
logstash         |               "kernel" => "5.13.0-51-generic",
logstash         |             "codename" => "bionic"
logstash         |         },
logstash         |         "containerized" => true,
logstash         |                   "mac" => [
logstash         |             [0] "00:01:2e:a2:0e:b8",
logstash         |             [1] "00:01:2e:a2:0e:b9",
logstash         |             [2] "02:42:83:4d:13:58",
logstash         |             [3] "70:9c:d1:82:4d:f8"
logstash         |         ],
logstash         |                    "ip" => [
logstash         |             [0] "192.168.66.10",
logstash         |             [1] "fe80::ac81:259f:f6c2:4589",
logstash         |             [2] "172.17.0.1"
logstash         |         ],
logstash         |              "hostname" => "bowlmain",
logstash         |                  "name" => "bowlmain",
logstash         |          "architecture" => "x86_64"
logstash         |     },
logstash         |               "@version" => "1",
logstash         |       "log_absolutetime" => "1660132723.859",
logstash         |         "log_data_info1" => "rain",
logstash         |         "log_data_info2" => "",
logstash         |                   "tags" => [
logstash         |         [0] "iocore_data",
logstash         |         [1] "beats_input_codec_plain_applied"
logstash         |     ]
logstash         | }

I can see in my data view i got some data at "16h00 & 16h30" and it's 14h30

Okay i think i resolv my problem,

I set the time zone to Europe/Paris in my docker-compose.yml

version: '2.2'
services:
  Elasticsearch:
    image: elasticsearch:8.3.3
    container_name: elasticsearch
    restart: always
    volumes:
    - elastic_data:/usr/share/elasticsearch/data/
    environment:
      ES_JAVA_OPTS: "-Xms512m -Xmx512m"
      discovery.type: single-node
      cluster.name: docker-cluster
      xpack.security.enabled: "false"
      TZ: "Europe/Paris"
    ports:
    - '9200:9200'
    - '9300:9300'
    networks:
      - elk

  Logstash:
    image: logstash:8.3.3
    container_name: logstash
    restart: always
    volumes:
    - /home/epnp/elastic_src/docker_elk:/logstash
    command: logstash -f /logstash/logstash-beat-electric.conf
    depends_on:
      - Elasticsearch
    ports:
    - '5044:5044'
    environment:
      LS_JAVA_OPTS: "-Xms512m -Xmx512m"
      TZ: "Europe/Paris"
    networks:
      - elk

  Kibana:
    image: kibana:8.3.3
    container_name: kibana
    restart: always
    ports:
    - '5601:5601'
    environment:
      TZ: "Europe/Paris"
      ELASTICSEARCH_URL: "http://elasticsearch:9200"
    depends_on:
      - Elasticsearch
    networks:
      - elk
volumes:
  elastic_data: {}

networks:
  elk:

And it's wor for now !

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.