ElasticSearch--Not able to convert date to nanomillis using index template

Elasticsearch--Not able to convert date to nanomillis using index template.

From logstash below json field "startTime" is coming in as date. I need to copy to a new field which i am doing. Then convert it to as below fields:

startTimeMillis Jul 16, 2024 @ 21:24:24.143 [This is working fine]

startTime 1,721,145,264,143,073 [Not able to get below]

Below is my json log string:

{
	"traceId": "o0uyveRU/8BkjL+lbpDlnQ==",
	"spanId": "73rmqIRmo34=",
	"operationName": "ProcessWorkItem",
	"startTime": "2024-07-22T06:07:12.650881409Z",
	"duration": "0.256620153s",
	"tags": [
		{
			"key": "otel.library.name",
			"vStr": "com.mdi.core.workmgmt.TypedConsumer"
		},
		{
			"key": "ta.work.type",
			"vStr": "INBOUND_ENTITY_INTEGRATION"
		},
		{
			"key": "ta.companyCode",
			"vStr": "AMBER_ROAD"
		},
		{
			"key": "ta.work.status",
			"vStr": "COMPLETED"
		},
		{
			"key": "ta.work.loadDuration",
			"vType": "INT64"
		},
		{
			"key": "service.version",
			"vStr": "24.2"
		},
		{
			"key": "work.processorclass",
			"vStr": "com.mdi.core.entity.integration.EntityStdIntegrationWorkProcessor"
		},
		{
			"key": "ta.work.receivedTime",
			"vType": "INT64",
			"vInt64": "1721628429423"
		},
		{
			"key": "ta.work.delegateDuration",
			"vType": "INT64",
			"vInt64": "257"
		},
		{
			"key": "ta.work.busKeySearch",
			"vStr": "AMBER_ROAD;SO_TEST_93;SALES_ORDER"
		},
		{
			"key": "ta.orgCode",
			"vStr": "AMBER_ROAD"
		},
		{
			"key": "ta.work.id",
			"vStr": "33267221"
		},
		{
			"key": "length",
			"vType": "INT64",
			"vInt64": "3680"
		},
		{
			"key": "ta.work.reportStatusDuration",
			"vType": "INT64",
			"vInt64": "4"
		},
		{
			"key": "ta.work.errorCount",
			"vType": "INT64"
		},
		{
			"key": "ta.work.delegateConsumerTime",
			"vType": "INT64",
			"vInt64": "253"
		},
		{
			"key": "span.kind",
			"vStr": "internal"
		},
		{
			"key": "otel.status_code",
			"vStr": "OK"
		},
		{
			"key": "internal.span.format",
			"vStr": "otlp"
		}
	],
	"process": {
		"serviceName": "TRADE_SINGLETON",
		"tags": [
			{
				"key": "deployment.environment",
				"vStr": "PSR"
			},
			{
				"key": "e2.environment.name",
				"vStr": "OTEL_PST"
			},
			{
				"key": "e2.instance.name",
				"vStr": "TRADE_SINGLETON_1"
			},
			{
				"key": "e2.product.family",
				"vStr": "GTM"
			},
			{
				"key": "e2.product.tenant.id",
				"vStr": "WMMERCURYTA"
			},
			{
				"key": "service.instance.id",
				"vStr": "URN:PSR:GTM:TA:OTEL_PST:TRADE_SINGLETON:TRADE_SINGLETON_1"
			},
			{
				"key": "service.namespace",
				"vStr": "TA"
			},
			{
				"key": "telemetry.sdk.language",
				"vStr": "java"
			},
			{
				"key": "telemetry.sdk.name",
				"vStr": "opentelemetry"
			},
			{
				"key": "telemetry.sdk.version",
				"vStr": "1.34.0"
			}
		]
	},
	"tag": "jaeger_spans"
}

below is the filter i am using:


filter 
{ 
	json 
	{
		source => "message"
	}
		
	json 
	{
		source => "message"
		target => "tmessage"
	}
	
	mutate {
			copy => 
			{
				"startTime" => "startTimeMillis"
			}
		}
	
	mutate
	{
		add_field => {"process.serviceName" => "%{[tmessage][process][serviceName]}"}
	}
	
	ruby {
		code => '
			event.get("[tmessage][tags]").each { |a|
				name = a["key"]
				name=name.gsub(/\./,"@")
				if a["vStr"]
					value = a["vStr"]
				elsif a["vInt64"] 
					value = a["vInt64"]
				else
					value = ""
				end
				event.set("tag." + name, value)
				}
			'
		}
				
	ruby {
		code => '
			event.get("[tmessage][process][tags]").each { |a|
				name = a["key"]
				name=name.gsub(/\./,"@")
				if a["vStr"]
					value = a["vStr"]
				elsif a["vInt64"]
					value = a["vInt64"]
				else
					value = ""
				end
				event.set("process.tag." + name, value)
			}
		'
		}
		
	mutate {
		remove_field => [ "message", "tmessage","tags"]
	}
}	

In index template, if i use starttime field type to Date nanoseconds, not working. How can i convert to nanoseconds or millis at Elasticsearch level or logstash side? Pls let me know?