Logstash parse date format AM/PM

Hello All,

I need to parse below data and send it to elastic index.For some reason index gets created but data dont come in index. I am trying to parse multiple csv file with below data. Here date format field contain AM/PM and not sure how to parse this through logstash and map to elastic custom index template.

input {
  file {
    path => "K:/app/iis/logstash_excel_data/*.csv"
    start_position => "beginning"
  }
}

filter {
  csv {
    separator => ","
    columns => ["brandName", "hostName", "instanceName", "tcVersion", "clientType", "clientVersion", "usecaseName", "usecaseStepName", "startTime", "endTime", "duration"]
  }
  
  date {
    match => ["startTime", "MM/dd/yyyy h:mm:ss a"]
    target => "startTime"
  }

  date {
    match => ["endTime", "MM/dd/yyyy h:mm:ss a"]
    target => "endTime"
  }

  mutate { remove_field => [ "[event][original]" ] }

}

output {
  elasticsearch {
    hosts => "https://abc:443"
    ilm_pattern => "{now/d}-000001"
    ilm_rollover_alias => "abc-monitoring"
    ilm_policy => "abc-monitoring-policy"
    api_key => "WA"
    ssl_enabled => true
    ssl_certificate_authorities => "U:\\ec_rot_ca.pem"
    http_compression => true
    data_stream => false
  }
}

PUT _index_template/abc-monitoring
{
  "template": {
    "settings": {
      "index": {
        "lifecycle": {
          "name": "abc-monitoring-policy",
          "rollover_alias": "abc-monitoring"
        },
        "number_of_shards": "1",
        "number_of_replicas": "0"
      }
    },
    "mappings": {
      "properties": {
        "startTime": {
          "format": "MM/dd/yyyy h:mm:ss a",
          "type": "date"
        },
		        "endTime": {
          "format": "MM/dd/yyyy h:mm:ss a",
          "type": "date"
        },
        "@timestamp": {
          "type": "date"
        },
        "brandName": {
          "fields": {
            "keyword": {
              "ignore_above": 256,
              "type": "keyword"
            }
          },
          "type": "text"
        },
        "hostName": {
          "fields": {
            "keyword": {
              "ignore_above": 256,
              "type": "keyword"
            }
          },
          "type": "text"
        },
        "instanceName": {
          "fields": {
            "keyword": {
              "ignore_above": 256,
              "type": "keyword"
            }
          },
          "type": "text"
        },
        "tcVersion": {
          "fields": {
            "keyword": {
              "ignore_above": 256,
              "type": "keyword"
            }
          },
          "type": "text"
        },
        "clientType": {
          "fields": {
            "keyword": {
              "ignore_above": 256,
              "type": "keyword"
            }
          },
          "type": "text"
        },
        "clientVersion": {
          "fields": {
            "keyword": {
              "ignore_above": 256,
              "type": "keyword"
            }
          },
          "type": "text"
        },
        "duration": {
          "type": "integer"
        },
        "usecaseName": {
          "fields": {
            "keyword": {
              "ignore_above": 256,
              "type": "keyword"
            }
          },
          "type": "text"
        },
		"usecaseStepName": {
          "fields": {
            "keyword": {
              "ignore_above": 256,
              "type": "keyword"
            }
          },
          "type": "text"
        }
      }
    }
  },
  "index_patterns": [
    "abc-monitoring-*"
  ],
  "composed_of": []
}
--------------------------------------------------------------------------------------

PUT _ilm/policy/pfi-monitoring-policy
{
  "policy": {
    "phases": {
      "hot": {
        "min_age": "0ms",
        "actions": {
          "rollover": {
            "max_age": "1d",
            "max_size": "5gb"
          },
          "set_priority": {
            "priority": 100
          }
        }
      },
      "delete": {
        "min_age": "5d",
        "actions": {
          "delete": {
            "delete_searchable_snapshot": true
          }
        }
      }
    }
  }
}
----------------------------------------------------------------------------------------

This config works without error but data dont come in index,though index created.
The dates also need to be used in kibana using data histogram to make visual.

Plz help here.

Thanx

Below worked in template cahnges and config: for someone reference

input {
file {
      path => "P:/Ppp/Mis/logstash_execl_data/*.csv"
      start_position => "beginning"
      sincedb_path => "D:/ppp/mis/logstash/logstash-8.8.2/last_read_file.db"
      mode => "read"
      file_completed_action => "delete"
 
     }
}
 
filter {
        csv {
        separator => ","
        columns => ["brandName","hostName","instanceName","tcVersion","clientType","clientVersion","usecaseName","usecaseStepName","startTime","endTime","duration"]
}

mutate { remove_field => [ "[event][original]" ] }
}
 
output {
	elasticsearch {
		hosts => "https://abc:443"
		ilm_pattern => "{now/d}-000001"
		ilm_rollover_alias => "abc-monitoring"
		ilm_policy => "abc-monitoring-policy"
		api_key => ""
		ssl_enabled => true
		ssl_certificate_authorities => "U:\\ece_proxy_root_ca.pem"
		http_compression => true
		data_stream => false
	}  
}

"startTime": {
"format": "M/d/yyyy h:mm:ss a",
"type": "date"
},
"endTime": {
"format": "M/d/yyyy h:mm:ss a",
"type": "date"
}

Do you have any error in Logstash? Please check your Logstash errors.

Also, you are using the date filter in Logstash, this will change the format of the field.

After the field startTime pass through the date filter, its value will not be in the format MM/dd/YYYY h:mm:ss a, it will be in the ISO8601 format, something like yyyy-MM-ddTHH:mm:ss.SSSZ

You can easily check this adding a file or stdout output to your logstash pipeline.

Since you set the format in the mapping, Elasticsearch is probably rejecting the document because the current value does not match the specified format, but this would be logged in Logstash logs.

You can remove the date filter from your mappings so logstash will not change the original field, or you can remove the format from your mappings.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.