Hi Rios,
Removing the type field still the same error so did some research on manticore and changed the config as follows:
input {
# Input for the first log file
file {
path => "${LOG_BUNDLE}/*/cvm_logs/stargate.*" # Path to the first log file
start_position => "beginning"
exit_after_read => true
sincedb_clean_after => "1 day"
sincedb_path => "/dev/null"
mode => "read"
file_completed_action => "log"
file_completed_log_path => "/dev/null"
type => "cpp_stargate"
}
}
filter {
# Filter for the first log type
if [type] == "cpp_stargate" {
if [message] =~ /^Log|^Running/ {
drop { }
}
grok {
match => {
"message" => "^(?<log_level>(I|W|E|F)+)(?<time>\d{4}\d{2}\d{2} \d{2}:\d{2}:\d{2}.\d{3})\d{3}Z\s+(?<pid>[^ ]+)\s+(?<source_log_filename>[^\:]+):[^\]]+]*%{GREEDYDATA:message}"
}
overwrite => [ "message" ]
break_on_match => true
}
date {
match => [ "time", "yyyyMMdd HH:mm:ss.SSS" ]
target => "@timestamp"
timezone => "UTC"
}
}
# Extract the source file name from the path
mutate {
add_field => {
"filepath" => "%{path}"
"filename_without_ext" => "%{path}"
"filename" => "%{path}"
"log_bundle_path" => "${LOG_BUNDLE}"
}
}
# Extract the base filename from the full path
mutate {
gsub => ["filepath", "^.*/", ""]
}
mutate {
update => { "time" => "@timestamp" }
}
mutate { remove_field => ["type", "tags", "@timestamp", "@version", "log", "event" ] }
# Calculate a hash value from the source filename
#ruby {
# code => "
# require 'digest'
# filename = event.get('filepath')
# hash = Digest::MD5.hexdigest(filename)
# index_number = hash.to_i(16) % 32 + 1
# event.set('index_number', index_number)
# "
#}
}
output {
elasticsearch {
hosts => ["http://localhost:9308"]
#index => " nu_logs"
index => " nu_logs1"
ilm_enabled => false
manage_template => false
http_compression => false
#document_id => "@timestamp"
}
}
After that now, i see that there are no other unknown column issues as before and now i face this error where it says this nu_logs1 index is absent in the elasticsearch as follows:
[WARN ] 2024-08-15 00:21:01.145 [[main]>worker30] elasticsearch - Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>" nu_logs1", :routing=>nil}, {"message"=>" ParseReturnCodes: Backend returns error kCASFailure for <vdisk, block>: <26472428930, 26624>", "filename"=>"%{path}", "log_bundle_path"=>"/home/ml/log_bundles/NCC-logs-06-14-2024-1718330601674411443-0005c928-4262-a90d-4a5b-d4f5ef3c9d60", "time"=>"@timestamp", "filepath"=>"%{path}", "pid"=>"5203", "log_level"=>"E", "filename_without_ext"=>"%{path}", "source_log_filename"=>"vdiskmap_ops.cc", "host"=>{"name"=>"ml-worker"}}], :response=>{"index"=>{"_index"=>" nu_logs1", "_type"=>"doc", "_id"=>"0", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"table ' nu_logs1' absent, or does not support INSERT"}}}}
What could be the reason?
This table is defined in the manticore DB:
mysql> DESCRIBE nu_logs1;
+----------------------+-----------+----------------+
| Field | Type | Properties |
+----------------------+-----------+----------------+
| id | bigint | |
| message | text | indexed stored |
| filename_without_ext | string | |
| filename | string | |
| filepath | string | |
| time | timestamp | |
| log_level | string | |
| source_log_filename | string | |
| pid | bigint | |
| cvm_ip | string | |
| log_bundle_path | string | |
| anomaly | bool | |
| anomaly_category | string | |
+----------------------+-----------+----------------+
13 rows in set (0.00 sec)
These are shards that belongs to the table nu_logs
mysql> DESCRIBE nu_logs;
+-----------+-------+
| Agent | Type |
+-----------+-------+
| nu_logs1 | local |
| nu_logs2 | local |
| nu_logs3 | local |
| nu_logs4 | local |
| nu_logs5 | local |
| nu_logs6 | local |
| nu_logs7 | local |
| nu_logs8 | local |
| nu_logs9 | local |
| nu_logs10 | local |
| nu_logs11 | local |
| nu_logs12 | local |
| nu_logs13 | local |
| nu_logs14 | local |
| nu_logs15 | local |
| nu_logs16 | local |
| nu_logs17 | local |
| nu_logs18 | local |
| nu_logs19 | local |
| nu_logs20 | local |
| nu_logs21 | local |
| nu_logs22 | local |
| nu_logs23 | local |
| nu_logs24 | local |
| nu_logs25 | local |
| nu_logs26 | local |
| nu_logs27 | local |
| nu_logs28 | local |
| nu_logs29 | local |
| nu_logs30 | local |
| nu_logs31 | local |
| nu_logs32 | local |
+-----------+-------+
32 rows in set (0.00 sec)
Kindly help. Any ideas?