Logstash into Elasticsearch Mapping Issues

Anyone have an idea what to do in a situation where I am using the output
function in logstash to send it to an Elasticsearch cluster via protocol
http and using a JSON template....and the mappings in the JSON template
aren't being used in the elasticsearch cluster.

logstash.conf

input {
tcp {
port => 5170
type => "sourcefire"
}
}

filter {

mutate{
    split => ["message", "|"]
    add_field => {
        "event" => "%{message[5]}"
        "eventSource" => "%{message[1]}"
    }
}

kv {
    include_keys => ["dhost", "dst", "dpt", "shost", "src", "spt", "rt"]
}

mutate {
    rename => [ "dhost", "destinationHost" ]
    rename => [ "dst", "destinationAddress" ]
    rename => [ "dpt", "destinationPort" ]
    rename => [ "shost", "sourceHost" ]
    rename => [ "src", "sourceAddress" ]
    rename => [ "spt", "sourcePort" ]
}

date {
    match => ["rt","UNIX_MS"]
    target => "eventDate"
}

geoip {
    add_tag => [ "sourceGeo" ]
    source => "src"
    database => "/opt/logstash/vendor/geoip/GeoLiteCity.dat"
}

geoip {
    add_tag => [ "destinationGeo" ]
    source => "src"
    database => "/opt/logstash/vendor/geoip/GeoLiteCity.dat"
}

}

output {
if [type] == "sourcefire" {
elasticsearch {
cluster => "XXX-cluster"
flush_size => 1
manage_template => true
template => "/opt/logstash/lib/logstash/outputs/elasticsearch/elasticsearch-sourcefire.json"
}
}
}

JSON Template

{
"template": "logstash-*",
"settings": {
"index.refresh_interval": "5s"
},
"mappings": {
"Sourcefire": {
"_all": {
"enabled": true
},
"properties": {
"@timestamp": {
"type": "date",
"format": "basicDateTimeNoMillis"
},
"@version": {
"type": "string",
"index": "not_analyzed"
},
"geoip": {
"type": "object",
"dynamic": true,
"path": "full",
"properties": {
"location": {
"type": "geo_point"
}
}
},
"event": {
"type": "string",
"index": "not_analyzed"
},
"eventDate": {
"type": "date",
"format": "basicDateTimeNoMillis"
},
"destinationAddress": {
"type": "ip"
},
"destinationHost": {
"type": "string",
"index": "not_analyzed"
},
"destinationPort": {
"type": "integer",
"index": "not_analyzed"
},
"sourceAddress": {
"type": "ip"
},
"sourceHost": {
"type": "string",
"index": "not_analyzed"
},
"sourcePort": {
"type": "integer",
"index": "not_analyzed"
}
}
}
}
}

--
You received this message because you are subscribed to the Google Groups "elasticsearch" group.
To unsubscribe from this group and stop receiving emails from it, send an email to elasticsearch+unsubscribe@googlegroups.com.
To view this discussion on the web visit https://groups.google.com/d/msgid/elasticsearch/86a4ddaf-554b-4e78-b8ac-18af1373f0cc%40googlegroups.com.
For more options, visit https://groups.google.com/d/optout.

I haven't ever let logstash set the default mappings. Instead, whenever a
logstash-style index is created, I let Elasticsearch set the default
mappings from its template. That way, it works even if I replace logstash
with something else.

For example, with my $ES_CONFIG/templates/automap.json file is the following

{
"automap" : {
"template" : "logstash-*",
"settings" : {
"index.mapping.ignore_malformed" : true
},
"mappings" : {
"default" : {
"numeric_detection" : true,
"_all" : { "enabled" : false },
"properties" : {
"message" : { "type" : "string" },
"host" : { "type" : "string" },
"UUID" : { "type" : "string", "index" : "not_analyzed" },
"logdate" : { "type" : "string", "index" : "no" }
}
}
}
}
}

And since logstash stores the entire message within the message field and I
never modify that particular field, the _all field is disabled and
Elasticsearch is told to use the message field as the default within a
Kibana query via the following Java option when starting Elasticsearch as
part of the ELK stack:

-Des.index.query.default_field=message

I hope this helps!

Brian

On Thursday, October 2, 2014 9:02:17 PM UTC-4, elo...@gmail.com wrote:

Anyone have an idea what to do in a situation where I am using the output
function in logstash to send it to an Elasticsearch cluster via protocol
http and using a JSON template....and the mappings in the JSON template
aren't being used in the elasticsearch cluster.

logstash.conf

input {
tcp {
port => 5170
type => "sourcefire"
}
}

filter {

mutate{
    split => ["message", "|"]
    add_field => {
        "event" => "%{message[5]}"
        "eventSource" => "%{message[1]}"
    }
}

kv {
    include_keys => ["dhost", "dst", "dpt", "shost", "src", "spt", "rt"]
}

mutate {
    rename => [ "dhost", "destinationHost" ]
    rename => [ "dst", "destinationAddress" ]
    rename => [ "dpt", "destinationPort" ]
    rename => [ "shost", "sourceHost" ]
    rename => [ "src", "sourceAddress" ]
    rename => [ "spt", "sourcePort" ]
}

date {
    match => ["rt","UNIX_MS"]
    target => "eventDate"
}

geoip {
    add_tag => [ "sourceGeo" ]
    source => "src"
    database => "/opt/logstash/vendor/geoip/GeoLiteCity.dat"
}

geoip {
    add_tag => [ "destinationGeo" ]
    source => "src"
    database => "/opt/logstash/vendor/geoip/GeoLiteCity.dat"
}

}

output {
if [type] == "sourcefire" {
elasticsearch {
cluster => "XXX-cluster"
flush_size => 1
manage_template => true
template => "/opt/logstash/lib/logstash/outputs/elasticsearch/elasticsearch-sourcefire.json"
}
}
}

JSON Template

{
"template": "logstash-*",
"settings": {
"index.refresh_interval": "5s"
},
"mappings": {
"Sourcefire": {
"_all": {
"enabled": true
},
"properties": {
"@timestamp": {
"type": "date",
"format": "basicDateTimeNoMillis"
},
"@version": {
"type": "string",
"index": "not_analyzed"
},
"geoip": {
"type": "object",
"dynamic": true,
"path": "full",
"properties": {
"location": {
"type": "geo_point"
}
}
},
"event": {
"type": "string",
"index": "not_analyzed"
},
"eventDate": {
"type": "date",
"format": "basicDateTimeNoMillis"
},
"destinationAddress": {
"type": "ip"
},
"destinationHost": {
"type": "string",
"index": "not_analyzed"
},
"destinationPort": {
"type": "integer",
"index": "not_analyzed"
},
"sourceAddress": {
"type": "ip"
},
"sourceHost": {
"type": "string",
"index": "not_analyzed"
},
"sourcePort": {
"type": "integer",
"index": "not_analyzed"
}
}
}
}
}

--
You received this message because you are subscribed to the Google Groups "elasticsearch" group.
To unsubscribe from this group and stop receiving emails from it, send an email to elasticsearch+unsubscribe@googlegroups.com.
To view this discussion on the web visit https://groups.google.com/d/msgid/elasticsearch/ed3eba42-7142-4b9a-8334-8463f519c9bc%40googlegroups.com.
For more options, visit https://groups.google.com/d/optout.

I also have the following Logstash output configuration:

output {

For testing only

stdout { codec => rubydebug }

Elasticsearch via HTTP REST

elasticsearch {
protocol => "http"
codec => json
manage_template => false

 # Or whatever target ES host is required:
 host => "localhost"

 # Or whatever _type is desired: Usually the environment name
 # e.g. qa, devtest, prod, and so on:
 index_type => "sample"

}
}

Brian

--
You received this message because you are subscribed to the Google Groups "elasticsearch" group.
To unsubscribe from this group and stop receiving emails from it, send an email to elasticsearch+unsubscribe@googlegroups.com.
To view this discussion on the web visit https://groups.google.com/d/msgid/elasticsearch/e05143d2-a2fd-4365-932b-b4603b08165c%40googlegroups.com.
For more options, visit https://groups.google.com/d/optout.