I'm having an issue with something related to this but, they get off that topic. I'm using the logstash default template and have the .keyword field but, it's empty and I'd like to visualize on the data. The documentation tells me that via dynamic field mapping a text field with a keyword subfield will be created. That keyword field does not seem to be automatically populated with data like I thought it might
Is this by design? Need I create a template with a map to get the main fields aggregatable instead because, my data doesn't have a keyword in the source? Logstash seems to be parsing everything correctly based on the pattern/conf
Index snip
{
"logstash-2017.09.05" : {
"aliases" : { },
"mappings" : {
"system_logs" : {
"_all" : {
"enabled" : true,
"norms" : false
},
"dynamic_templates" : [
{
"message_field" : {
"path_match" : "message",
"match_mapping_type" : "string",
"mapping" : {
"norms" : false,
"type" : "text"
}
}
},
{
"string_fields" : {
"match" : "*",
"match_mapping_type" : "string",
"mapping" : {
"fields" : {
"keyword" : {
"ignore_above" : 256,
"type" : "keyword"
}
},
"norms" : false,
"type" : "text"
}
}
}
],
"properties" : {
"@timestamp" : {
"type" : "date",
"include_in_all" : false
},
"@version" : {
"type" : "keyword",
"include_in_all" : false
},
"action_id" : {
"type" : "text",
"norms" : false,
"fields" : {
"keyword" : {
"type" : "keyword",
"ignore_above" : 256
}
}
},
"client_ip" : {
"type" : "text",
"norms" : false,
"fields" : {
"keyword" : {
"type" : "keyword",
"ignore_above" : 256
}
}
},
Logstash Config
input {
tcp {
port => 5000
type => "syslog"
}
}
filter {
if [type] == "syslog" {
grok {
patterns_dir => ["/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-patterns-core-4.1.1/patterns"]
match => { "message" => "%{BSF_SCAN}" }
match => { "message" => "%{BSF_SEND}" }
match => { "message" => "%{BSF_SEND_NO_DESTINATION}" }
match => { "message" => "%{BSF_RECV_SCAN}" }
match => { "message" => "%{BSF_RECV_SCAN_2}" }
match => { "message" => "%{BSF_RECV}" }
match => { "message" => "%{BSF_WEB_SYSLOG}" }
}
}
}
output {
elasticsearch {
hosts => ["localhost:9200"]
index => "logstash-%{+YYYY.MM.dd}"
document_type => "system_logs"
}
}