My Logstash config file....
input {
file {
path => "C:\Users\1480587\Documents\Chander\Elastic\Data\Inc_Details.txt"
start_position => "beginning"
sincedb_path => "nul"
}
}
filter{
mutate {
gsub => ["[message]", "\s", "0"]
}
csv {
separator => ","
#skip_empty_columns => true
columns => [ "Month", "Quarter", "Year", "INCIDENT_ID", "REQ_ID", "COUNTRY", "SERVICE", "ASSIGNED_GROUP", "STATUS", "SERVICE_TYPE", "PRIORITY", "URGENCY", "REPORTED_DATE", "SUBMIT_DATE", "LAST_RESOLVED_DATE", "LAST_MODIFIED_DATE", "CLOSED_DATE", "RESPONDED_DATE", "OPS_CATEGORIZATION_TIER_1", "OPS_CATEGORIZATION_TIER_2", "OPS_CATEGORIZATION_TIER_3", "PRODUCT_CATEGORIZATION_TIER_1", "PRODUCT_CATEGORIZATION_TIER_2", "PRODUCT_CATEGORIZATION_TIER_3", "SUMMARY", "SLM_STATUS", "ASSIGNED_SUPPORT_ORGANIZATION", "FIRST_NAME", "LAST_NAME", "OWNER_GROUP", "OWNER_SUPPORT_ORGANIZATION", "DIRECT_CONTACT_COMPANY", "RESOLUTION", "RESOLUTION_CATEGORY", "RESOLUTION_CATEGORY_TIER_2", "RESOLUTION_CATEGORY_TIER_3", "CLOSURE_PRODUCT_CATEGORY_TIER1", "CLOSURE_PRODUCT_CATEGORY_TIER2", "CLOSURE_PRODUCT_CATEGORY_TIER3", "SLA_RESUME_MIN", "SLA_GOAL", "INC_SLA", "SLA_OVERALLSTARTTIME", "SLA_OVERALLSTOPTIME" ]
}
mutate {convert => ["Month", "integer"]}
mutate {convert => ["Quarter", "integer"]}
mutate {convert => ["Year", "integer"]}
mutate {convert => ["INCIDENT_ID", "integer"]}
mutate {convert => ["REQ_ID", "integer"]}
mutate {convert => ["SLA_RESUME_MIN", "integer"]}
}
output{
elasticsearch {
hosts => "localhost"
index => "reports"
#document_type => "Inc Test"
}
file{
path => "C:\Users\1480587\Documents\Chander\Elastic\Data\H-Out.csv"
codec => line { format => "custom format: %{message}"}
}
stdout{
codec => rubydebug
}
}