Hi,
I am using ELK GA 6, and I am trying to create a brand new index. Before pushing into the topic using Logstash, I have created a template like below;
PUT _template/my_template
{
"index_patterns": ["mylogs*"],
"mappings": {
"type1": {
"properties": {
"@timestamp": {
"type": "date"
},
"@version": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"beat": {
"properties": {
"hostname": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"name": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"version": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
}
}
},
"f6": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"f1": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"f8": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"f3": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"f2": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"message": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"f7": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"f4": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"offset": {
"type": "long"
},
"prospector": {
"properties": {
"type": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
}
}
},
"source": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"f5": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"timestamp": {
"type": "date",
"format": "MMM dd, YYYY h:mm:ss:SSS aa"
}
}
}
}
}
below is my logstash configuration;
input {
kafka{
group_id => "group_1"
topics => ["topic_1"]
bootstrap_servers => "192.168.0.1:9092"
codec => json
}
}
filter {
grok {
match => { "message" => "<(?<timestamp>%{MONTH} %{MONTHDAY}, 20%{YEAR} %{HOUR}:?%{MINUTE}(?::?%{SECOND}) (?:AM|PM))\> <%{NOTSPACE:f1}> <%{NOTSPACE:f2}> <%{NOTSPACE:f3}> <%{NOTSPACE:f4}> <%{NOTSPACE:f5}> <%{NOTSPACE:f6}> <%{NOTSPACE:f7}>\n <%{GREEDYDATA:f8}>" }
}
date {
match => [ "timestamp", "MMM dd, YYYY h:mm:ss:SSS aa" ]
timezone => "UTC"
target => "@timestamp"
}
}
output{
elasticsearch {
hosts => ["192.168.0.1"]
index => "mylogs-%{+YYYY-MM-dd}"
}
}
when I try to execute this, I am getting the error like;
[2017-11-29T16:55:46,753][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"mylogs-2017-11-24", :_type=>"doc", :_routing=>nil}, #<LogStash::Event:0x100cee42>], :response=>{"index"=>{"_index"=>"mylogs-2017-11-24", "_type"=>"doc", "_id"=>"l5OGB2AB6TiNi_DsjbZm", "status"=>400, "error"=>{"type"=>"illegal_argument_exception", "reason"=>"Rejecting mapping update to [mylogs-2017-11-24] as the final mapping would have more than 1 type: [doc, type1]"}}}}
Why is this happening and how can I fix this?
Thanks.