Elasticsearch - Mapping

Hi All,

Iam using ELK for analysing our netflows ,i am amazed with its functionality but I have a small concern.When i started this project ,elasticsearch was using its default mapping ,so after analyzing the datas,i wanted certain fields as not analyzed but i have more than one week data,am not sure If we can change mapping of old indexes but I thought may be from the present day and future I can have my customized mappings .So I deleted my current index and i put ,

First I deleted it ,
curl -XDELETE http://localhost:9200/logstash_netflow_cisco-2016.12.12
then
curl -XPUT http://localhost:9200/logstash_netflow_cisco-2016.12.12 -d '
{
"mappings" : {
"default" : {
"properties" : {
"src_hostname" : {"type": "string", "index" : "not_analyzed" },
"dst_hostname" : {"type": "string", "index" : "not_analyzed" },
"geo" : {
"properties" : {
"location": {"type": "geo_point", "index" : "not_analyzed" },
"dst_geoip" : {
"properties" : {
"city_name": {"type": "string", "index" : "not_analyzed" },
"country_name": {"type": "string", "index" : "not_analyzed" }
}
}
}
}
}
}
}
}
';

and then I tried to XGET to see if it worked out, I was getting the exact mapping what i want

curl 'localhost:9200/logstash_netflow_cisco-2016.12.12/ _mapping?pretty=true'
{
"logstash_netflow_cisco-2016.12.12" : {
"mappings" : {
"netflow_cisco" : {
"properties" : {
"@timestamp" : {
"type" : "date",
"format" : "strict_date_optional_time||epoch_millis"
},
"@version" : {
"type" : "string"
},
"dst_geoip" : {
"properties" : {
"area_code" : {
"type" : "long"
},
"city_name" : {
"type" : "string",
"index" : "not_analyzed"
},
"continent_code" : {
"type" : "string"
},
"country_code2" : {
"type" : "string"
},
"country_code3" : {
"type" : "string"
},
"country_name" : {
"type" : "string",
"index" : "not_analyzed"
},
"dma_code" : {
"type" : "long"
},
"ip" : {
"type" : "string"
},
"latitude" : {
"type" : "double"
},
"location" : {
"type" : "double"
},
"longitude" : {
"type" : "double"
},
"postal_code" : {
"type" : "string"
},
"real_region_name" : {
"type" : "string"
},
"region_name" : {
"type" : "string"
},
"timezone" : {
"type" : "string"
}
}
},
"dst_hostname" : {
"type" : "string",
"index" : "not_analyzed"
},
"dst_port_name" : {
"type" : "string"
},
"firewall_action" : {
"type" : "string"
},
"geo" : {
"properties" : {
"location" : {
"type" : "geo_point"
}
}
},
"geoip" : {
"properties" : {
"coordinates" : {
"type" : "double"
}
}
},
"host" : {
"type" : "string"
},
"netflow" : {
"properties" : {
"conn_id" : {
"type" : "long"
},
"protocol" : {
"type" : "long"
},
"version" : {
"type" : "long"
},
"xlate_dst_addr_ipv4" : {
"type" : "string"
},
"xlate_dst_port" : {
"type" : "long"
},
"xlate_src_addr_ipv4" : {
"type" : "string"
},
"xlate_src_port" : {
"type" : "long"
}
}
},
"netflow_ProtName" : {
"type" : "string"
},
"src_hostname" : {
"type" : "string",
"index" : "not_analyzed"
},
"src_port_name" : {
"type" : "string"
},
"type" : {
"type" : "string"
}
}
},
"default" : {
"properties" : {
"dst_geoip" : {
"properties" : {
"city_name" : {
"type" : "string",
"index" : "not_analyzed"
},
"country_name" : {
"type" : "string",
"index" : "not_analyzed"
}
}
},
"dst_hostname" : {
"type" : "string",
"index" : "not_analyzed"
},
"geo" : {
"properties" : {
"location" : {
"type" : "geo_point"
}
}
},
"src_hostname" : {
"type" : "string",
"index" : "not_analyzed"
}
}
}
}
}
}

it was working fine for that particular day ,but the next day it was using the old mapping format.Iam not sure how to implement without deleting the old datas and for new indexes I want my current mappings implemented instead me deleting the index everyday and putting mappings everyday.
Please someone help me it would be really helpful for my project.
Thank you in advance,
Raj

Hi,

please take a look at Index Templates. This will let you define rules how to set up indices that will automatically be applied when new indices are created.

Thank you very much for the response :slight_smile: Chris

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.