Field was not found

Hi,

I try to install & configure elk 8.3.3 without security on the docker "host" and filebeat on another docker.

All works, i have the live logs, i imported my dashboards, but i have this:
image

and the problem is because all of my logs are in field.keyword and not field.

there is my .conf of my logstash:

# Sample Logstash configuration for creating a simple
# Beats -> Logstash -> Elasticsearch pipeline.

input {
  beats {
    port => 5044
    id => "from_filebeat"
    ssl  => false
  }
}

output {
  elasticsearch {
    hosts => ["http://elasticsearch:9200"]
    index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}"
  }
}

filter {
  dissect {
    mapping => {
      "message" => "%{log_time} %{log_absolutetime} %{log_model} %{log_zone} %{log_data_direction} %{log_data_name} %{log_data_value}"
    }
  }

  date {
    match => [ "log_time", "HH:mm:ss.SSS", "ISO8601" ]
  }

  ruby {
    init => "@current_car_mode = ''
             @transfer_car_mode = ''
             @start_driving_time = 0
             @current_weather = 'rain'
             @current_speed_limit = 300.0
             @current_speed = 0.0
             @start_over_speed_limit_time = 0
             @current_distraction = '0'
             @start_distraction_time = 0
             @start_reaction_time = 0
             @start_transfer_time = 0
             @current_driving_level = 'senior'
             #@test_reaction_time = 0
             #@test_transfer_time = 0
             @first_time = 0"
    code => "if @first_time == 0
               @first_time = 1
               new_event = event.clone()
               new_event.set('log_data_name', 'distraction_time');
               new_event.set('log_data_value', '0')
               new_event.set('log_data_info1', @current_weather)
               new_event.set('log_data_info2', @current_car_mode)
               new_event.set('log_data_numerical', 0)
               new_event_block.call(new_event)

               new_event = event.clone()
               new_event.set('log_data_name', 'over_speed_limit_time');
               new_event.set('log_data_value', '0')
               new_event.set('log_data_info1', @current_weather)
               new_event.set('log_data_info2', @current_car_mode)
               new_event.set('log_data_numerical', 0)
               new_event_block.call(new_event)

               @start_driving_time = event.get('log_absolutetime').to_f 
               @start_over_speed_limit_time = event.get('log_absolutetime').to_f
               @start_distraction_time = event.get('log_absolutetime').to_f
               @start_reaction_time = event.get('log_absolutetime').to_f
               @start_transfer_time = event.get('log_absolutetime').to_f
             end

             event.set('log_data_numerical', 0.0)
             if event.get('log_data_name') == 'car_mode' and event.get('log_model') == 'public_plugin_vehicle_data'
               if @current_car_mode == ''
                 @start_driving_time = event.get('log_absolutetime').to_f
               else
                 if event.get('log_data_value') != @current_car_mode
                   new_event = event.clone
                   new_event.set('log_data_name', 'driving_time');
                   new_event.set('log_data_value', event.get('log_absolutetime').to_f - @start_driving_time)
                   new_event.set('log_data_info1', @current_weather)
                   new_event.set('log_data_info2', @current_car_mode)
                   new_event.set('log_data_numerical', event.get('log_absolutetime').to_f - @start_driving_time)
                   new_event_block.call(new_event)
                   @start_driving_time = event.get('log_absolutetime').to_f
                 end
               end
               
               if @transfer_car_mode != ''
                 new_event = event.clone
                 new_event.set('log_data_name', 'transfer_time');
                 new_event.set('log_data_value', event.get('log_absolutetime').to_f - @start_transfer_time)
                 new_event.set('log_data_info1', @current_driving_level)
                 new_event.set('log_data_info2', @current_car_mode)
                 new_event.set('log_data_numerical', event.get('log_absolutetime').to_f - @start_transfer_time)
                 new_event_block.call(new_event)
                 @start_reaction_time = 0
                 @start_transfer_time = 0
                 @transfer_car_mode = ''
               end

               if event.get('log_data_value') == 'mrm'
                 new_event = event.clone
                 new_event.set('log_data_name', 'mrm_detection');
                 new_event.set('log_data_value', 'true')
                 new_event.set('log_data_info1', @current_driving_level)
                 new_event.set('log_data_info2', @current_car_mode)
                 new_event_block.call(new_event)
               end

               @current_car_mode = event.get('log_data_value')

             elsif event.get('log_data_name') == 'authority_transfer_request' and event.get('log_model') == 'public_plugin_smart_cabin'
               if event.get('log_data_value') == 'request_start'
                 @start_reaction_time = event.get('log_absolutetime').to_f
                 @start_transfer_time = event.get('log_absolutetime').to_f
                 @transfer_car_mode = ''
               elsif event.get('log_data_value') == 'request_taken_into_account'
                 if @start_reaction_time != 0 and @transfer_car_mode == ''
                   new_event = event.clone
                   new_event.set('log_data_name', 'reaction_time');
                   new_event.set('log_data_value', event.get('log_absolutetime').to_f - @start_reaction_time)
                   new_event.set('log_data_info1', @current_driving_level)
                   new_event.set('log_data_info2', @current_car_mode)
                   new_event.set('log_data_numerical', event.get('log_absolutetime').to_f - @start_reaction_time)
                   new_event_block.call(new_event)
                   @start_transfer_time = event.get('log_absolutetime').to_f
                   @transfer_car_mode = @current_car_mode
                 end
               elsif event.get('log_data_value') == 'request_cancel'
                   @start_reaction_time = 0
                   @start_transfer_time = 0
                   @transfer_car_mode = ''
               elsif event.get('log_data_value') == 'request_end'
                   #new_event = event.clone
                   #new_event.set('log_data_name', 'transfer_time');
                   #new_event.set('log_data_value', event.get('log_absolutetime').to_f - @start_transfer_time)
                   #new_event.set('log_data_info1', @current_driving_level)
                   #new_event.set('log_data_info2', @transfer_car_mode)
                   #new_event.set('log_data_numerical', event.get('log_absolutetime').to_f - @start_transfer_time)
                   #new_event_block.call(new_event)
                   #@start_transfer_time = 0
                   #@start_reaction_time = 0  
                   #@transfer_car_mode = ''
               end

             elsif event.get('log_data_name') == 'user.user_driving_level' and event.get('log_zone') == 'user_driver'
               @current_driving_level = event.get('log_data_value')
             
             elsif event.get('log_data_name') == 'weather' and event.get('log_model') == 'public_plugin_ros_driving_environment'
               if @current_car_mode != ''
                 new_event = event.clone
                 new_event.set('log_data_name', 'driving_time');
                 new_event.set('log_data_value', event.get('log_absolutetime').to_f - @start_driving_time)
                 new_event.set('log_data_info1', @current_weather)
                 new_event.set('log_data_info2', @current_car_mode)
                 new_event.set('log_data_numerical', event.get('log_absolutetime').to_f - @start_driving_time)
                 new_event_block.call(new_event)
                 @start_driving_time = event.get('log_absolutetime').to_f
               end
               if @current_distraction == '1'
                 new_event = event.clone
                 new_event.set('log_data_name', 'distraction_time');
                 new_event.set('log_data_value', event.get('log_absolutetime').to_f - @start_distraction_time)
                 new_event.set('log_data_info1', @current_weather)
                 new_event.set('log_data_info2', @current_car_mode)
                 new_event.set('log_data_numerical', event.get('log_absolutetime').to_f - @start_distraction_time)
                 new_event_block.call(new_event)
                 @start_distraction_time = event.get('log_absolutetime').to_f
               end
               if (@current_speed > @current_speed_limit)
                 new_event = event.clone
                 new_event.set('log_data_name', 'over_speed_limit_time');
                 new_event.set('log_data_value', event.get('log_absolutetime').to_f - @start_over_speed_limit_time)
                 new_event.set('log_data_info1', @current_weather)
                 new_event.set('log_data_info2', @current_car_mode)
                 new_event.set('log_data_numerical', event.get('log_absolutetime').to_f - @start_over_speed_limit_time)
                 new_event_block.call(new_event)
                 @start_over_speed_limit_time = event.get('log_absolutetime').to_f
               end
               @current_weather = event.get('log_data_value')

             elsif event.get('log_data_name') == 'distraction.distracted'
               if event.get('log_data_value') == '1'
                 if  @current_distraction == '0'
                   @start_distraction_time = event.get('log_absolutetime').to_f
                 end
                 event.set('log_data_numerical', '1');
                 @current_distraction = '1'

                 #new_event = event.clone
                 #new_event.set('log_data_name', 'reaction_time');
                 #new_event.set('log_data_value', @test_reaction_time)
                 #new_event.set('log_data_info1', @current_weather)
                 #new_event.set('log_data_info2', 'autonomous_driving')
                 #new_event.set('log_data_numerical', @test_reaction_time)
                 #@test_reaction_time = @test_reaction_time + 2
                 #new_event_block.call(new_event)

               else
                 if  @current_distraction == '1'
                   new_event = event.clone
                   new_event.set('log_data_name', 'distraction_time');
                   new_event.set('log_data_value', event.get('log_absolutetime').to_f - @start_distraction_time)
                   new_event.set('log_data_info1', @current_weather)
                   new_event.set('log_data_info2', @current_car_mode)
                   new_event.set('log_data_numerical', event.get('log_absolutetime').to_f - @start_distraction_time)
                   new_event_block.call(new_event)
                 end
                 event.set('log_data_numerical', '0');
                 @current_distraction = '0'

                 #new_event = event.clone
                 #new_event.set('log_data_name', 'transfer_time');
                 #new_event.set('log_data_value', @test_transfer_time)
                 #new_event.set('log_data_info1', @current_weather)
                 #new_event.set('log_data_info2', 'autonomous_driving')
                 #new_event.set('log_data_numerical', @test_transfer_time)
                 #@test_transfer_time = @test_transfer_time + 2
                 #new_event_block.call(new_event)

               end
             
             elsif event.get('log_data_name') == 'speed_limit' and event.get('log_model') == 'public_plugin_vehicle_data'
               if (@current_speed > @current_speed_limit)
                 @current_speed_limit = event.get('log_data_value').to_f
                 if @current_speed_limit >= @current_speed
                   new_event = event.clone
                   new_event.set('log_data_name', 'over_speed_limit_time');
                   new_event.set('log_data_value', event.get('log_absolutetime').to_f - @start_over_speed_limit_time)
                   new_event.set('log_data_info1', @current_weather)
                   new_event.set('log_data_info2', @current_car_mode)
                   new_event.set('log_data_numerical', event.get('log_absolutetime').to_f - @start_over_speed_limit_time)
                   new_event_block.call(new_event)
                 end
               else
                 @current_speed_limit = event.get('log_data_value').to_f
                 if @current_speed_limit < @current_speed
                   @start_over_speed_limit_time = event.get('log_absolutetime').to_f
                 end               
               end
             
             elsif event.get('log_data_name') == 'speed' and event.get('log_model') == 'public_plugin_vehicle_data'
               if @current_speed > @current_speed_limit
                 @current_speed = event.get('log_data_value').to_f
                 if @current_speed_limit >= @current_speed
                   new_event = event.clone
                   new_event.set('log_data_name', 'over_speed_limit_time');
                   new_event.set('log_data_value', event.get('log_absolutetime').to_f - @start_over_speed_limit_time)
                   new_event.set('log_data_info1', @current_weather)
                   new_event.set('log_data_info2', @current_car_mode)
                   new_event.set('log_data_numerical', event.get('log_absolutetime').to_f - @start_over_speed_limit_time)
                   new_event_block.call(new_event)
                 end
               else
                 @current_speed = event.get('log_data_value').to_f
                 if @current_speed_limit < @current_speed
                   @start_over_speed_limit_time = event.get('log_absolutetime').to_f
                 end               
               end

             elsif event.get('log_data_name') == 'lane_crossing_detection' and event.get('log_data_value') != 'lane_crossing' and event.get('log_model') == 'public_plugin_vehicle_data'
               new_event = event.clone
               new_event.set('log_data_name', 'lane_crossing');
               new_event.set('log_data_value', 'true')
               new_event.set('log_data_info1', @current_weather)
               new_event.set('log_data_info2', @current_car_mode)
               new_event_block.call(new_event)

             end"
  }

  mutate {
    convert => ["log_data_numerical","float"]
  }
}

I do all step for install & configure filebeat & elk.

i do the filebeat setup -e with elasticsearch port

and after i set the logstash port and i launched filebeat.

If someone have the answer that can help me a lot !

OK, i reinstall filebeat on the docker and now i don't have live logs
image

my filebeat seems not to send data but my path for data is good and the data always update.

filebeat.yml

- type: log
  enabled: true
  paths:
    - /tmp/reference-data/*.log
  tags: ["iocore_data"]

- type: filestream
  id: my-filestream-id
  enabled: false
  paths:
    - /var/log/*.log

filebeat.config.modules:
  path: ${path.config}/modules.d/*.yml
  reload.enabled: false

setup.template.settings:
  index.number_of_shards: 1

setup.kibana:
  host: "192.168.66.214:5601"

output.logstash:
  hosts: ["192.168.66.214:5044"]


processors:
  - add_host_metadata:
      when.not.contains.tags: forwarded
  - add_cloud_metadata: ~
  - add_docker_metadata: ~
  - add_kubernetes_metadata: ~

i do this before starting:

sudo filebeat modules enable system
sudo vi modules.d/system.yml

i set syslog & auth enabled to true

# Module: system
# Docs: https://www.elastic.co/guide/en/beats/filebeat/8.3/filebeat-module-system.html

- module: system
  # Syslog
  syslog:
    enabled: true

    # Set custom paths for the log files. If left empty,
    # Filebeat will choose the paths depending on your OS.
    #var.paths:

  # Authorization logs
  auth:
    enabled: true

    # Set custom paths for the log files. If left empty,
    # Filebeat will choose the paths depending on your OS.
    #var.paths:

i do

sudo filebeat setup --index-management -E output.logstash.enabled=false -E 'output.elasticsearch.hosts=["192.168.66.214:9200"]'

for the setup and after i start it

it's not working so i disable system and enable nginx

i do the same as system.yml
so i set to true all in the nginx.yml

and it's not working too ...

Ok i don't understand why but my live logs is working.

Now i just need to know: Why my logs are in a field.keywords and not in the field ?

Can you elaborate more on what you are seeing, and expecting?

1 Like

Here is what is see:

  • in my host (with docker-compose)
logstash         | [2022-08-08T07:32:09,724][INFO ][org.logstash.beats.Server][main][from_filebeat] Starting server on port: 5044

this is my docker-compose.yml

version: '3.6'
services:
  Elasticsearch:
    image: elasticsearch:8.3.3
    container_name: elasticsearch
    restart: always
    volumes:
    - elastic_data:/usr/share/elasticsearch/data/
    environment:
      ES_JAVA_OPTS: "-Xms512m -Xmx512m"
      discovery.type: single-node
      cluster.name: docker-cluster
      xpack.security.enabled: "false"

    ports:
    - '9200:9200'
    - '9300:9300'
    networks:
      - elk

  Logstash:
    image: logstash:8.3.3
    container_name: logstash
    restart: always
    volumes:
    - /home/epnp/elastic_src/docker_elk:/logstash
    command: logstash -f /logstash/logstash-beat-electric.conf
    depends_on:
      - Elasticsearch
    ports:
    - '5044:5044'
    environment:
      LS_JAVA_OPTS: "-Xms512m -Xmx512m"
    networks:
      - elk

  Kibana:
    image: kibana:8.3.3
    container_name: kibana
    restart: always
    ports:
    - '5601:5601'
    environment:
      - ELASTICSEARCH_URL=http://elasticsearch:9200
    depends_on:
      - Elasticsearch
    networks:
      - elk
volumes:
  elastic_data: {}

networks:
  elk:

-in an other docker with filebeat

{"log.level":"info","@timestamp":"2022-08-08T08:59:41.901+0200","log.logger":"monitoring","log.origin":{"file.name":"log/log.go","file.line":185},"message":"Non-zero metrics in the last 30s","service.name":"filebeat","monitoring":{"metrics":{"beat":{"cpu":{"system":{"ticks":90,"time":{"ms":20}},"total":{"ticks":290,"time":{"ms":30},"value":0},"user":{"ticks":200,"time":{"ms":10}}},"handles":{"limit":{"hard":1048576,"soft":2048},"open":14},"info":{"ephemeral_id":"0e9544d9-4797-4613-ab53-ca514db7306f","uptime":{"ms":63085},"version":"8.3.3"},"memstats":{"gc_next":19967352,"memory_alloc":13815640,"memory_total":74787464,"rss":102715392},"runtime":{"goroutines":83}},"filebeat":{"events":{"added":4,"done":4},"harvester":{"open_files":4,"running":4}},"libbeat":{"config":{"module":{"running":2}},"output":{"events":{"acked":4,"active":0,"batches":1,"total":4},"read":{"bytes":6},"write":{"bytes":877}},"pipeline":{"clients":5,"events":{"active":0,"published":4,"total":4},"queue":{"acked":4}}},"registrar":{"states":{"current":75,"update":4},"writes":{"success":1,"total":1}},"system":{"load":{"1":0.72,"15":0.3,"5":0.57,"norm":{"1":0.045,"15":0.0188,"5":0.0356}}}},"ecs.version":"1.6.0"}}
{"log.level":"info","@timestamp":"2022-08-08T09:00:11.899+0200","log.logger":"monitoring","log.origin":{"file.name":"log/log.go","file.line":185},"message":"Non-zero metrics in the last 30s","service.name":"filebeat","monitoring":{"metrics":{"beat":{"cpu":{"system":{"ticks":110,"time":{"ms":20}},"total":{"ticks":320,"time":{"ms":30},"value":0},"user":{"ticks":210,"time":{"ms":10}}},"handles":{"limit":{"hard":1048576,"soft":2048},"open":14},"info":{"ephemeral_id":"0e9544d9-4797-4613-ab53-ca514db7306f","uptime":{"ms":93085},"version":"8.3.3"},"memstats":{"gc_next":19967352,"memory_alloc":15765080,"memory_total":76736904,"rss":102715392},"runtime":{"goroutines":83}},"filebeat":{"harvester":{"open_files":4,"running":4}},"libbeat":{"config":{"module":{"running":2}},"output":{"events":{"active":0}},"pipeline":{"clients":5,"events":{"active":0}}},"registrar":{"states":{"current":75}},"system":{"load":{"1":0.67,"15":0.31,"5":0.58,"norm":{"1":0.0419,"15":0.0194,"5":0.0363}}}},"ecs.version":"1.6.0"}}

My filebeat.yml:

- type: log
  enabled: true
  paths:
    - /tmp/reference-data/*.log
  tags: ["iocore_data"]

- type: filestream
  id: my-filestream-id
  enabled: false
  paths:
    - /var/log/*.log

filebeat.config.modules:
  path: ${path.config}/modules.d/*.yml
  reload.enabled: false

setup.template.settings:
  index.number_of_shards: 1

setup.kibana:
  host: "192.168.66.214:5601"

output.logstash:
  hosts: ["192.168.66.214:5044"]


processors:
  - add_host_metadata:
      when.not.contains.tags: forwarded
  - add_cloud_metadata: ~
  - add_docker_metadata: ~
  - add_kubernetes_metadata: ~

and what i see in kibana:

(it's my log with 2 type of field, the normal one log_data_name with 0 data and the second one log_data_name.keyword with some data inside.)

alll data are in .keyword and not in the normal field...

I don't know if it's normal or not but my docker-compose (host) just give me some news of elasticsearch and not logstash.

But my filebeat have the ip + port of logstash.
it's possibly that filebeat is sending data to elasticsearch and not logstash and with that problem logstash can't take data and don't stack it this in the normal field (ex: log_data_name) ?

How you can see i'm stuck with that keyword type... so if someone know a solution and can explain it to me i'm very greatfull !

Please don't post pictures of text, logs or code. They are difficult to read, impossible to search and replicate (if it's code), and some people may not be even able to see them :slight_smile:

1 Like

Ok i change that but for the kibana i can't do more.
Did you need some informations about my problem ?

I got that in my host:

kibana           | [2022-08-08T08:55:54.659+00:00][INFO ][plugins.securitySolution.endpoint:metadata-check-transforms-task:0.0.1] no endpoint installation found
kibana           | [2022-08-08T09:32:15.847+00:00][INFO ][plugins.ml] Task ML:saved-objects-sync-task: No ML saved objects in need of synchronization

So i checked

I try this on my host:

curl -X GET "192.168.66.214:5601/api/ml/saved_objects/sync?simulate=true" -H 'kbn-xsrf: true'

and i get that

{"statusCode":403,"error":"Forbidden","message":"Forbidden"}

How can we see if my filebeat is sending data to my logstash ?

The log is saying there was nothing to do so there's no reason to do that.

What do the Filebeat logs show?

Here is my filebeat logs:

{"log.level":"info","@timestamp":"2022-08-10T09:50:25.198+0200","log.logger":"publisher_pipeline_output","log.origin":{"file.name":"pipeline/client_worker.go","file.line":147},"message":"Connection to backoff(async(tcp://192.168.66.214:5044)) established","service.name":"filebeat","ecs.version":"1.6.0"}
{"log.level":"info","@timestamp":"2022-08-10T09:51:53.875+0200","log.logger":"monitoring","log.origin":{"file.name":"log/log.go","file.line":185},"message":"Non-zero metrics in the last 30s","service.name":"filebeat","monitoring":{"metrics":{"beat":{"cpu":{"system":{"ticks":30},"total":{"ticks":400,"value":0},"user":{"ticks":370}},"handles":{"limit":{"hard":1048576,"soft":2048},"open":12},"info":{"ephemeral_id":"b8ec8799-e7f5-457e-bc8c-9940b0718a8f","uptime":{"ms":93083},"version":"8.3.3"},"memstats":{"gc_next":19829336,"memory_alloc":11428320,"memory_sys":262144,"memory_total":127544992,"rss":102662144},"runtime":{"goroutines":73}},"filebeat":{"harvester":{"open_files":2,"running":2}},"libbeat":{"config":{"module":{"running":2}},"output":{"events":{"active":0}},"pipeline":{"clients":5,"events":{"active":0}}},"registrar":{"states":{"current":75}},"system":{"load":{"1":1.19,"15":1.18,"5":1.22,"norm":{"1":0.0744,"15":0.0738,"5":0.0763}}}},"ecs.version":"1.6.0"}}

Logstash logs:

logstash         | [2022-08-10T07:46:40,839][INFO ][logstash.javapipeline    ][main] Pipeline Java execution initialization time {"seconds"=>0.55}
logstash         | [2022-08-10T07:46:40,864][INFO ][logstash.inputs.beats    ][main] Starting input listener {:address=>"0.0.0.0:5044"}
logstash         | [2022-08-10T07:46:40,886][INFO ][logstash.javapipeline    ][main] Pipeline started {"pipeline.id"=>"main"}
logstash         | [2022-08-10T07:46:40,945][INFO ][logstash.agent           ] Pipelines running {:count=>2, :running_pipelines=>[:".monitoring-logstash", :main], :non_running_pipelines=>[]}
logstash         | [2022-08-10T07:46:41,060][INFO ][org.logstash.beats.Server][main][from_filebeat] Starting server on port: 5044
logstash         |                "message" => "2022-08-10T10:26:59.965 1660120019.965 public_plugin_vehicle_data Broadcast OUT speed 36.000000",
logstash         |                  "input" => {
logstash         |         "type" => "log"
logstash         |     },
logstash         |               "log_time" => "2022-08-10T10:26:59.965",
logstash         |         "log_data_value" => "36.000000",
logstash         |                  "event" => {
logstash         |         "original" => "2022-08-10T10:26:59.965 1660120019.965 public_plugin_vehicle_data Broadcast OUT speed 36.000000"
logstash         |     },
logstash         |                  "agent" => {
logstash         |              "version" => "8.3.3",
logstash         |         "ephemeral_id" => "fc072729-08fc-4d5c-8491-ccd664dd6c1e",
logstash         |                   "id" => "0df421b0-519e-489a-b593-ba2608005867",
logstash         |                 "type" => "filebeat",
logstash         |                 "name" => "bowlmain"
logstash         |     },
logstash         |                   "host" => {
logstash         |          "architecture" => "x86_64",
logstash         |                   "mac" => [
logstash         |             [0] "00:01:2e:a2:0e:b8",
logstash         |             [1] "00:01:2e:a2:0e:b9",
logstash         |             [2] "02:42:83:4d:13:58",
logstash         |             [3] "70:9c:d1:82:4d:f8"
logstash         |         ],
logstash         |         "containerized" => true,
logstash         |              "hostname" => "bowlmain",
logstash         |                  "name" => "bowlmain",
logstash         |                    "os" => {
logstash         |               "kernel" => "5.13.0-51-generic",
logstash         |               "family" => "debian",
logstash         |                 "name" => "Ubuntu",
logstash         |                 "type" => "linux",
logstash         |             "codename" => "bionic",
logstash         |              "version" => "18.04.5 LTS (Bionic Beaver)",
logstash         |             "platform" => "ubuntu"
logstash         |         },
logstash         |                    "id" => "270a461e0432457f9eac8bab1a33b1c0",
logstash         |                    "ip" => [
logstash         |             [0] "192.168.66.10",
logstash         |             [1] "fe80::ac81:259f:f6c2:4589",
logstash         |             [2] "172.17.0.1"
logstash         |         ]
logstash         |     },
logstash         |     "log_data_numerical" => 0.0,
logstash         |              "log_model" => "public_plugin_vehicle_data",
logstash         |                    "log" => {
logstash         |           "file" => {
logstash         |             "path" => "/tmp/reference-data/vehicle_data_public_plugin_vehicle_data.log"
logstash         |         },
logstash         |         "offset" => 83336
logstash         |     },
logstash         |               "log_zone" => "Broadcast",
logstash         |             "@timestamp" => 2022-08-10T10:26:59.965Z,
logstash         |     "log_data_direction" => "OUT",
logstash         |          "log_data_name" => "speed",
logstash         |                   "tags" => [
logstash         |         [0] "iocore_data",
logstash         |         [1] "beats_input_codec_plain_applied"
logstash         |     ],
logstash         |               "@version" => "1",
logstash         |       "log_absolutetime" => "1660120019.965",
logstash         |                    "ecs" => {
logstash         |         "version" => "8.0.0"
logstash         |     }
logstash         | }

Elasticsearch logs:

elasticsearch    | {"@timestamp":"2022-08-10T07:50:10.395Z", "log.level": "WARN",  "data_stream.dataset":"deprecation.elasticsearch","data_stream.namespace":"default","data_stream.type":"logs","elasticsearch.elastic_product_origin":"kibana","elasticsearch.event.category":"api","elasticsearch.http.request.x_opaque_id":"9ab39623-9fb4-4a7f-8ff6-91898c87d832;kibana:application:management:","event.code":"open_system_index_access","message":"this request accesses system indices: [.async-search, .tasks], but in a future major version, direct access to system indices will be prevented by default" , "ecs.version": "1.2.0","service.name":"ES_ECS","event.dataset":"deprecation.elasticsearch","process.thread.name":"elasticsearch[7304b2989fb3][transport_worker][T#2]","log.logger":"org.elasticsearch.deprecation.cluster.metadata.IndexNameExpressionResolver","trace.id":"0772c0dc1ed187539d7b5c70bc6703de","elasticsearch.cluster.uuid":"uzyIcCeXTnesfj0jzP60TQ","elasticsearch.node.id":"hsE1WvAkQlaGHB5a6twqQw","elasticsearch.node.name":"7304b2989fb3","elasticsearch.cluster.name":"docker-cluster"}
elasticsearch    | {"@timestamp":"2022-08-10T07:50:25.545Z", "log.level": "INFO", "message":"[filebeat-8.3.3-2022.08.10] creating index, cause [auto(bulk api)], templates [], shards [1]/[1]", "ecs.version": "1.2.0","service.name":"ES_ECS","event.dataset":"elasticsearch.server","process.thread.name":"elasticsearch[7304b2989fb3][masterService#updateTask][T#1]","log.logger":"org.elasticsearch.cluster.metadata.MetadataCreateIndexService","elasticsearch.cluster.uuid":"uzyIcCeXTnesfj0jzP60TQ","elasticsearch.node.id":"hsE1WvAkQlaGHB5a6twqQw","elasticsearch.node.name":"7304b2989fb3","elasticsearch.cluster.name":"docker-cluster"}
elasticsearch    | {"@timestamp":"2022-08-10T07:50:25.672Z", "log.level": "INFO", "message":"[filebeat-8.3.3-2022.08.10/vyTCdM9sTqiShk4gAeMP6w] create_mapping", "ecs.version": "1.2.0","service.name":"ES_ECS","event.dataset":"elasticsearch.server","process.thread.name":"elasticsearch[7304b2989fb3][masterService#updateTask][T#1]","log.logger":"org.elasticsearch.cluster.metadata.MetadataMappingService","elasticsearch.cluster.uuid":"uzyIcCeXTnesfj0jzP60TQ","elasticsearch.node.id":"hsE1WvAkQlaGHB5a6twqQw","elasticsearch.node.name":"7304b2989fb3","elasticsearch.cluster.name":"docker-cluster"}
elasticsearch    | {"@timestamp":"2022-08-10T07:50:25.743Z", "log.level": "INFO", "message":"[filebeat-8.3.3-2022.08.10/vyTCdM9sTqiShk4gAeMP6w] update_mapping [_doc]", "ecs.version": "1.2.0","service.name":"ES_ECS","event.dataset":"elasticsearch.server","process.thread.name":"elasticsearch[7304b2989fb3][masterService#updateTask][T#1]","log.logger":"org.elasticsearch.cluster.metadata.MetadataMappingService","elasticsearch.cluster.uuid":"uzyIcCeXTnesfj0jzP60TQ","elasticsearch.node.id":"hsE1WvAkQlaGHB5a6twqQw","elasticsearch.node.name":"7304b2989fb3","elasticsearch.cluster.name":"docker-cluster"}