Hi
I am trying to import a JSON file into ElasticSearch to use with Kibana. I am able to pull the file into ElasticSearch using Logstash but I am getting stuck on this one issue.
The json has the timestamp for the record as:
"Timestamp": "/Date(1446528260196-0500)/"
Is there a way I can use Grok to parse that field and convert the time so it will serve as a recognized timestamp field in my ElasticSearch index? Unfortunately I am not familiar with Grok at all so I am at a loss here.
Below is an example of the JSON.
Thanks for any help you can provide.
{ "LineNumber": 0, "SqlServerName": null, "SqlErrorNumber": 0, "SqlErrorMessage": null, "SqlProcedure": null, "SqlLineNumber": 0, "LogInformation": { "ServiceType": 1, "ServiceMethod": null, "HttpMethod": 0, "StatusCode": 404, "Controller": "carriercred", "Action": null, "TransactionResultCode": null, "ElapsedTime": "00:00:00.0178840", "PartnerId": null, "OrderId": null, "CarrierId": null, "MessageId": null }, "HttpStatusCode": 404, "HttpMethod": "Get", "Controller": "carriercred", "Action": null, "ServiceMethod": null, "ServiceType": "Rest", "ElapsedMs": 17.884, "LogLevel": "Info", "HostName": "", "ServerIP": "::1", "MethodName": "b__0", "ExceptionMethodName": null, "ExceptionTypeFullName": null, "StackTrace": null, "LogMessage": "GET /secapi/carriercred/876ca42065c64067af5dc2532c3625d1 404", "Timestamp": "/Date(1446528260196-0500)/", "AdditionalData": null, "Namespace": "Service.Security.Http", "ClassName": "<>c__DisplayClass3", "ProcessId": 6888, "ThreadId": 7, "CurrentIdentityName": "xxx", "CurrentIdentityAuthenticationType": "TrustedSubsystem", "IsCurrentIdentityAuthenticated": true, "SourceSystem": "ServiceSecurity", "ExceptionSource": null, "TransactionResultCode": null, "MessageType": "ServiceResult"}