Hi. I am trying to create a new index in Kibana. My logstash conf file looks like below:
if [type] == "uatelectron_appLog" {
mutate {
split => ["message", "|"]
add_field =>{
"timestamp" => "%{[message][0]}"
"requestId" => "%{[message][1]}"
"requestType" => "%{[message][2]}"
"environment" => "%{[message][3]}"
"version" => "%{[message][4]}"
"service" => "%{[message][5]}"
"RegionSsoid" => "%{[message][6]}"
"detail" => "%{[message][7]}"
"detailName" => "%{[message][8]}"
"detailValue" => "%{[message][9]}"
"other" => "%{[message][10]}"
}
}
grok{
patterns_dir => ["/opt/gp/portal/elasticsearch/app/logstash/patterns"]
match => [ "reqID","%{GREEDYDATA}%{REQUEST_ID:requestId}" ]
}
date{
match => ["timestamp", "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"]
}
}
mutate{
add_field =>{ "_ELKId" => "%{@timestamp}%{offset}" }
}
mutate{
convert => {
"timestamp" => "string"
"requestId" => "string"
"requestType" => "string"
"Region" => "string"
}
}
I see the following error in the log when message is pushed through filebeat:
[2020-12-08T10:04:17,691][WARN ][logstash.outputs.elasticsearch][main] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"uatelectron_applog-2020.12.08", :routing=>nil, :_type=>"_doc"}, #<LogStash::Event:0x127f9e22>], :response=>{"index"=>{"_index"=>"uat_cv_electron_applog-2020.12.08", "_type"=>"_doc", "_id"=>"3H_hQnYBZDd5lC9rt2fO", "status"=>400, "error"=>{"type"=>"illegal_argument_exception", "reason"=>"mapper [message] of different type, current_type [text], merged_type [date]"}}}}
Please help suggest a solution.
Thanks