Hello all,
I have error while parsing data to elasticserach.
it's my template
{
"template": ".alertmanager-2*",
"settings": {
"number_of_shards": 1,
"number_of_replicas": 1,
"index.refresh_interval": "10s",
"index.query.default_field": "groupLabels.alertname"
},
"mappings": {
"default": {
"_all": {
"enabled": false
},
"properties": {
"@timestamp": {
"type": "date",
"doc_values": true
}
},
"dynamic_templates": [
{
"string_fields": {
"match": "*",
"match_mapping_type": "string",
"mapping": {
"type": "string",
"index": "not_analyzed",
"ignore_above": 1024,
"doc_values": true
}
}
}
]
}
}
and this exception from log file:
2018/11/12 16:14:33 POST to Elasticsearch on "http://127.0.0.1:9200/alertmanager-2018.11/alert_group" returned HTTP 400: {"error":{"root_cause":[{"type":"mapper_parsing_exception","reason":"failed to find type parsed [string] for [summary]"}],"type":"mapper_parsing_exception","reason":"failed to find type parsed [string] for [summary]"},"status":400}
[2018-11-12T16:14:38,895][DEBUG][o.e.a.b.TransportShardBulkAction] [alertmanager-2018.11][0] failed to execute bulk item (index) BulkShardRequest [[alertmanager-2018.11][0]] containing [index {[alertmanager-2018.11][alert_group][9hS0CGcB_ks-8w2cFUEt], source[{"alerts":[{"annotations":{"summary":"Instance Azrieli Holon:grok_mango - Instance Down"},"endsAt":"2018-11-12T15:55:19.226473212Z","generatorURL":"http://Prometheus-Server:9090/graph?g0.expr=up+%3D%3D+0\u0026g0.tab=1","labels":{"alertname":"instanceDown","instance":"Azrieli Holon","job":"grok_mango","severity":"critical"},"startsAt":"2018-11-12T15:53:19.226473212Z","status":"resolved"},{"annotations":{"summary":"Instance Azrieli Holon:mysql - Instance Down"},"endsAt":"0001-01-01T00:00:00Z","generatorURL":"http://Prometheus-Server:9090/graph?g0.expr=up+%3D%3D+0\u0026g0.tab=1","labels":{"alertname":"instanceDown","instance":"Azrieli Holon","job":"mysql","severity":"critical"},"startsAt":"2018-11-12T16:10:19.226473212Z","status":"firing"},{"annotations":{"summary":"Instance Azrieli Holon:node - Instance Down"},"endsAt":"2018-11-12T15:55:19.226473212Z","generatorURL":"http://Prometheus-Server:9090/graph?g0.expr=up+%3D%3D+0\u0026g0.tab=1","labels":{"alertname":"instanceDown","instance":"Azrieli Holon","job":"node","severity":"critical"},"startsAt":"2018-11-12T15:52:19.226473212Z","status":"resolved"}],"commonAnnotations":{},"commonLabels":{"alertname":"instanceDown","instance":"Azrieli Holon","severity":"critical"},"externalURL":"http://Prometheus-Server:9093","groupLabels":{"alertname":"instanceDown"},"receiver":"alertmanager2es","status":"firing","version":"4","groupKey":"{}:{alertname="instanceDown"}","@timestamp":"2018-11-12T16:14:38Z"}]}]
org.elasticsearch.index.mapper.MapperParsingException: failed to find type parsed [string] for [summary]
at org.elasticsearch.index.mapper.RootObjectMapper.findTemplateBuilder(RootObjectMapper.java:255) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.mapper.RootObjectMapper.findTemplateBuilder(RootObjectMapper.java:236) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.mapper.DocumentParser.createBuilderFromDynamicValue(DocumentParser.java:745) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.mapper.DocumentParser.parseDynamicValue(DocumentParser.java:807) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.mapper.DocumentParser.parseValue(DocumentParser.java:608) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.mapper.DocumentParser.innerParseObject(DocumentParser.java:403) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.mapper.DocumentParser.parseObjectOrNested(DocumentParser.java:380) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.mapper.DocumentParser.parseObjectOrField(DocumentParser.java:478) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.mapper.DocumentParser.parseObject(DocumentParser.java:514) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.mapper.DocumentParser.innerParseObject(DocumentParser.java:390) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.mapper.DocumentParser.parseObjectOrNested(DocumentParser.java:380) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.mapper.DocumentParser.parseObjectOrField(DocumentParser.java:478) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.mapper.DocumentParser.parseObject(DocumentParser.java:514) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.mapper.DocumentParser.parseNonDynamicArray(DocumentParser.java:580) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.mapper.DocumentParser.parseArray(DocumentParser.java:551) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.mapper.DocumentParser.innerParseObject(DocumentParser.java:392) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.mapper.DocumentParser.parseObjectOrNested(DocumentParser.java:380) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.mapper.DocumentParser.internalParseDocument(DocumentParser.java:95) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.mapper.DocumentParser.parseDocument(DocumentParser.java:69) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.mapper.DocumentMapper.parse(DocumentMapper.java:261) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.shard.IndexShard.prepareIndex(IndexShard.java:708) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.shard.IndexShard.applyIndexOperation(IndexShard.java:685) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.index.shard.IndexShard.applyIndexOperationOnPrimary(IndexShard.java:666) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.action.bulk.TransportShardBulkAction.lambda$executeIndexRequestOnPrimary$2(TransportShardBulkAction.java:553) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.action.bulk.TransportShardBulkAction.executeOnPrimaryWhileHandlingMappingUpdates(TransportShardBulkAction.java:572) ~[elasticsearch-6.3.2.jar:6.3.2]
at org.elasticsearch.action.bulk.TransportShardBulkAction.executeIndexRequestOnPrimary(TransportShardBulkAction.java:551) ~[elasticsearch-6.3.2.jar:6.3.2]
at .
.
.
.
Thanks,
Mark.