Hi all
Here i am trying to define custom template for csv data
here is my template
PUT _template/speedtest
{
"template": "speedtest*",
"settings": {
"number_of_shards": "6",
"number_of_replicas": "1"
},
"mappings": {
"type1": {
"_source": {
"enabled": true
},
"properties": {
"@timestamp": { "type": "date" },
"DeviceVendor": { "index": true, "type": "keyword" },
"@version": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"DeviceType": { "index": true, "type": "keyword" },
"DeviceModel": { "index": true, "type": "keyword" },
"EnDepth": { "index": true, "type": "keyword" },
"AlarmName": { "index": true, "type": "keyword" },
"AlarmCondition": { "index": true, "type": "keyword" },
"Severity": { "index": true, "type": "keyword" },
"Actionable": { "index": true, "type": "boolean" },
"Enrich": { "index": true, "type": "boolean" },
"CreatedBy": { "index": true, "type": "keyword" },
"CreatedOn": { "index": true, "type": "date" },
"UpdatedBy": { "index": true, "type": "keyword" },
"UpdatedOn": { "index": true, "type": "date" }
}
}
},
"aliases" : { "aemdev" : {} }
}
After loading template into ES i creates an index with disabling dynamicmapping
PUT speedtest1
{
"index.mapper.dynamic": false
}
after that ingest data into index with logstash
input {
file {
path => "/home/sgpl/AEM_Sampledata.csv"
sincedb_path => "/tmp/AEM_Sampledata.csv"
start_position => "beginning"
}
}
filter {
csv {
columns => ["DeviceVendor1","DeviceModel1","DeviceType1","EnDepth1","AlarmName1","AlarmCondition1","Severity1","Actionable1","Enrich1","CreatedBy1","CreatedOn1","UpdatedBy1","UpdatedOn1"]
separator => ","
}
mutate {
add_field => { "DeviceVendor" => "%{DeviceVendor1}" }
add_field => { "DeviceModel" => "%{DeviceModel1}" }
add_field => { "DeviceType" => "%{DeviceType1}" }
add_field => { "EnDepth" => "%{EnDepth1}" }
add_field => { "AlarmName" => "%{AlarmName1}" }
add_field => { "AlarmCondition" => "%{AlarmCondition1}" }
add_field => { "Severity" => "%{Severity1}" }
add_field => { "Actionable" => "%{Actionable1}" }
add_field => { "Enrich" => "%{Enrich1}" }
add_field => { "CreatedBy" => "%{CreatedBy1}" }
add_field => { "UpdatedBy1" => "%{UpdatedBy1}" }
add_field => { "CreatedOn" => "%{@timestamp}" }
add_field => { "UpdatedOn" => "%{@timestamp}" }
}
prune {
whitelist_names => ["^DeviceVendor$","^DeviceModel$","^DeviceType$","^EnDepth$","^AlarmName$","^AlarmCondition$","^Severity$","^Actionable$","^Enrich$","^CreatedBy$","^CreatedOn$","^UpdatedBy$","^UpdatedOn$","^@timestamp$","^@version$" ]
}
}
output {
elasticsearch {
hosts => ["http://192.168.1.74:9200"]
index => 'speedtest1'
}
stdout {codec => rubydebug }
}
During ingestion i got following warning
[2017-08-15T05:40:24,702][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>[#<URI::HTTP:0x22ab879d URL:http://192.168.1.74:9200>]}
[2017-08-15T05:40:24,731][INFO ][logstash.pipeline ] Starting pipeline {"id"=>"main", "pipeline.workers"=>4, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>5, "pipeline.max_inflight"=>500}
[2017-08-15T05:40:25,235][INFO ][logstash.pipeline ] Pipeline main started
[2017-08-15T05:40:25,533][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2017-08-15T05:40:26,167][WARN ][logstash.outputs.elasticsearch] Failed action. {:status=>404, :action=>["index", {:_id=>nil, :_index=>"speedtest1", :_type=>"logs", :_routing=>nil}, 2017-08-15T00:10:25.695Z %{host} %{message}], :response=>{"index"=>{"_index"=>"speedtest1", "_type"=>"logs", "_id"=>"AV3jOinmTUJMOSFhpl_z", "status"=>404, "error"=>{"type"=>"type_missing_exception", "reason"=>"type[logs] missing", "index_uuid"=>"44s89yVsQc-hvqOJn6SiKQ", "index"=>"speedtest1", "caused_by"=>{"type"=>"illegal_state_exception", "reason"=>"trying to auto create mapping, but dynamic mapping is disabled"}}}}}
[2017-08-15T05:40:26,174][WARN ][logstash.outputs.elasticsearch] Failed action. {:status=>404, :action=>["index", {:_id=>nil, :_index=>"speedtest1", :_type=>"logs", :_routing=>nil}, 2017-08-15T00:10:25.719Z %{host} %{message}], :response=>{"index"=>{"_index"=>"speedtest1", "_type"=>"logs", "_id"=>"AV3jOinmTUJMOSFhpl_0", "status"=>404, "error"=>{"type"=>"type_missing_exception", "reason"=>"type[logs] missing", "index_uuid"=>"44s89yVsQc-hvqOJn6SiKQ", "index"=>"speedtest1", "caused_by"=>{"type"=>"illegal_state_exception", "reason"=>"trying to auto create mapping, but dynamic mapping is disabled"}}}}}
[2017-08-15T05:40:26,175][WARN ][logstash.outputs.elasticsearch] Failed action. {:status=>404, :action=>["index", {:_id=>nil, :_index=>"speedtest1", :_type=>"logs", :_routing=>nil}, 2017-08-15T00:10:25.636Z %{host} %{message}], :response=>{"index"=>{"_index"=>"speedtest1", "_type"=>"logs", "_id"=>"AV3jOin1TUJMOSFhpl_2", "status"=>404, "error"=>{"type"=>"type_missing_exception", "reason"=>"type[logs] missing", "index_uuid"=>"44s89yVsQc-hvqOJn6SiKQ", "index"=>"speedtest1", "caused_by"=>{"type"=>"illegal_state_exception", "reason"=>"trying to auto create mapping, but dynamic mapping is disabled"}}}}}
[2017-08-15T05:40:26,184][WARN ][logstash.outputs.elasticsearch] Failed action. {:status=>404, :action=>["index", {:_id=>nil, :_index=>"speedtest1", :_type=>"logs", :_routing=>nil}, 2017-08-15T00:10:25.659Z %{host} %{message}], :response=>{"index"=>{"_index"=>"speedtest1", "_type"=>"logs", "_id"=>"AV3jOin1TUJMOSFhpl_3", "status"=>404, "error"=>{"type"=>"type_missing_exception", "reason"=>"type[logs] missing", "index_uuid"=>"44s89yVsQc-hvqOJn6SiKQ", "index"=>"speedtest1", "caused_by"=>{"type"=>"illegal_state_exception", "reason"=>"trying to auto create mapping, but dynamic mapping is disabled"}}}}}
[2017-08-15T05:40:26,176][WARN ][logstash.outputs.elasticsearch] Failed action. {:status=>404, :action=>["index", {:_id=>nil, :_index=>"speedtest1", :_type=>"logs", :_routing=>nil}, 2017-08-
and after that when i search for that indexed data in kibana i am getting following error on discovery page of kibana
Saved "field" parameter is now invalid. Please select a new field.
OK
273s
Discover: "field" is a required parameter
Please help me in finding my mistake overhere
Thank You