Query on @timestamp mapping

Hi,
I have a logstash config file which parses some logs and indexes them into elasticsearch.
I have renamed the @timestamp field to "timestamp" field for my convenience in my project as below

mutate
{
rename => { "@timestamp" => "timestamp" }
}

I have not specified any mapping for the above timestamp field in elasticsearch, so elasticsearch takes the default mapping as:

"timestamp" : {
"type" : "date",
"format" : "strict_date_optional_time||epoch_millis"
}

But now I want to change the format of timestamp to "yyyy-MM-dd HH:mm:ss" or "MMM dd yyyy HH:MM:SS"

If I try the above mappings in my mapping, and tries to parse the file through logstash, it throws the below error:

"reason"=>"Failed to parse mapping [testinfo]: Root mapping definition has unsupported parameters: [timestamp : {format=YYYY-MM-dd:HH:mm:ss||epoch_millis, type=date}]", "caused_by"=>{"type"=>"mapper_parsing_exception", "reason"=>"Root mapping definition has unsupported parameters: [timestamp : {format=YYYY-MM-dd HH:mm:ss||epoch_millis, type=date}]"}}}}, :level=>:warn}

Could you please let me know how to overcome this issue?

Hi,

There's something wrong in your mapping. Can you please copy/paste your mapping update request here?

mapping:

"mappings": {
"testinfo": {
"dynamic_templates": [{
"string_fields": {
"match": "*",
"match_mapping_type": "string",
"mapping": {
"type": "string",
"analyzer": "whitespaceanalyzer",
"fields": {
"raw": {
"type": "string",
"index": "not_analyzed"
}
}
}

			}
		}],
		"properties": {
		 "timestamp" : {
        "type" : "date",
        "format" : "YYYY-mm-dd HH:mm:ss||epoch_millis"
      }}
	}

}

Config file:

input
{
file
{
codec => multiline
{
pattern => 'Logfile is saved'
negate => true
what => previous
}
path => ["path/to/log"]
start_position => "beginning"
#sincedb_path => "/path/to/NSP.db1"
sincedb_path => "/dev/null"
type => "NSP"
ignore_older => 0

}

}

filter
{
if [type] == "NSP"
{
ruby
{
code => "
filename = event['path'].split('/')[-5];
event['job'] = filename
stagename_temp = event['path'].split('/')[-3];
stagename = stagename_temp.split('__').last;
event['stage'] = stagename+'_'+event['job']
event['logpath']= event['path'].strip
"
}
grok
{
match => [
"message" , "%{GREEDYDATA}INFO-> Test Script Name: %{GREEDYDATA:testname}\n%{YEAR}/%{MONTHNUM}/%{MONTHDAY}[T ]%{HOUR}:?%{MINUTE}(?::?%{SECOND})?%{ISO8601_TIMEZONE}? INFO-> Logfile is %{GREEDYDATA:testlogpath}\n%{YEAR}/%{MONTHNUM}/%{MONTHDAY}[T ]%{HOUR}:?%{MINUTE}(?::?%{SECOND})?%{ISO8601_TIMEZONE}? INFO-> Test Start time : %{GREEDYDATA:data3} INFO-> The Script took %{GREEDYDATA:executiontime}\n%{YEAR}/%{MONTHNUM}/%{MONTHDAY}[T ]%{HOUR}:?%{MINUTE}(?::?%{SECOND})?%{ISO8601_TIMEZONE}?%{GREEDYDATA} Test Result: %{GREEDYDATA:status}"
]
}
mutate
{
gsub => ["msg", "\r\n", ""]
}
if "_jsonparsefailure" in [tags]
{
drop{}
}

    if "_grokparsefailure" in [tags] 
    {
        drop {}
    }
    else 
    {
        mutate
        {
            rename => { "@timestamp" => "timestamp" }
            remove_field => [  "message", "@version", "path",  "host", data3, tags, "testlogpath"]
        }
    }
    if [status] == "Passed" 
    {	
        mutate 
        {
            replace => ["status", "pass"]
        }
    }
    else if [status] == "Failed" 
    {	
        mutate 
        {
            replace => ["status", "fail"]
        }
    }
    else if [status] == "Abort "
    {
        mutate 
        {
            replace => ["status", "abort"]
        }
    }
    else if [status] == "Abort"
    {
        mutate 
        {
            replace => ["status", "abort"]
        }
    }
    if "Hours" in [executiontime] 
    {
        mutate 
        {
            gsub => ["executiontime","Hours",""]
            gsub => ["executiontime","Minutes",""]
            gsub => ["executiontime","Seconds",""]
            gsub => ["executiontime"," ",""]
        }
    }
	ruby 
    {
        code => "
        hour             = event['executiontime'].split(':')[0];
		min              = event['executiontime'].split(':')[1];
		sec              = event['executiontime'].split(':')[2];
        if(hour.length==1);
		    hour = '0'+hour;
		end;
		if(min.length==1);
		    min = '0'+min;
		end;
		if(sec.length==1);
		    sec = '0'+sec;
		end;
		event['executiontime'] = hour+':'+min+':'+sec
        "
    }
    mutate{
        gsub => ['executiontime', '^', '0d ']
    }
}

}

output
{
if [type] == "NSP"
{
stdout { codec => rubydebug }
elasticsearch
{
template_name => "my_template"
manage_template => true
template => "/etc/logstash/mapping/my_template.json"
hosts => "127.0.0.1:9200"
index => "myindex"
document_type => "testinfo"
document_id => "%{job}_%{testname}"
}
}
}

I am placing my mapping into a template file and then use the template in my logstash config file so that the mapping gets applied.