Logstash pipeline for csv file using filebeat

Hi, Spent too much time on this, would really appreciate any help. My csv file loading in one record.

My data is csv:

Number,Category,Assignment group,Technology,Assigned to,Opened,O_Month,Opened Day,Opened Date,Opened Time,Duration,Durn in hrs,Floor,TimeWindow,Closed,C_Month,PriorZZZy,Short description,Configuration ZZZem,Attribute,Env
INC0403021,Application,ZZZ MS-SQL Admin,MS_SQL,Adam SmZZZh,2019-06-30 23:05:55,Jun,Sunday,06/30/19,11:05 PM,28531,7.9,10:00 PM,10 PM - 12 AM,1/0/1900,Jan,3 - Low,Control-M DS: DMS_XXXsqldba0040_YYYY_DL_XXX_P - RC 1 - Ended not OK 6/30/2019 11:05:08 PM,Database - SQL Server,Backup,Prod
INC0403009,Application,ZZZ MS-SQL Admin,MS_SQL,Adam SmZZZh,2019-06-30 19:33:47,Jun,Sunday,06/30/19,7:33 PM,41202,11.4,6:00 PM,6 PM - 8 PM,1/0/1900,Jan,3 - Low,Control-M DS: DS_XXXsqldba0015_TRANSLOG_BCK_PROD - RC 1 - Ended not OK 6/30/2019 7:13:53 PM,Database - SQL Server,Backup,Prod

This is the logstash pipeline:
input {
beats {
port => 5045
ssl => true
ssl_certificate_authorities => ["xxxxxx.pem"]
ssl_certificate => "/etc/logstash/SSL/logstash_xxxxx.pem"
ssl_key => "/etc/logstash/SSL/logstash_xxxxx.key"
}

}

filter {
if "inc" in [tags]
{
csv {
columns => ["Number","Category","Assignment group","Technology","Assigned to","Opened","O_Month","Opened Day","Opened Date","Opened Time","Duration","Durn in hrs","Floor","TimeWindow","Closed","C_Month","Priority","Short Description","Confirguration","Attribution","Env"]
skip_header => true
}

#Date filter is used to convert date to @Timestamp so that chart in Kibana will show as per date
date {
match => [ "Opened", "yyyy-MM-dd HH:mm:ss"]
timezone => "America/New_York"
target => "Opened"

	}

}

}
output {

if "inc" in [tags] {
elasticsearch {
hosts => "localhost:9200"
manage_template => false
index => "inc-%{+YYYY.MM.dd}"
}
}

}

I think this is because logstash looks for newline at end of the line and only treats as new record.
Please do an enter at the end of the row 2 and try.

input {
file {

    # Set this relative to your log folder...
    path => "C:/Users/xxxxxxx/Desktop/test.csv"
    start_position => "beginning"
}

}

filter {
csv {
autodetect_column_names => true
skip_header => true
separator => ","

	}
    
date {
		match => [ "Opened", "yyyy-MM-dd HH:mm:ss"]
		timezone => "America/New_York"
		target => "Opened"		
	} 
}# End of filter

output {
stdout { codec => rubydebug }

}

Following is the output

{
"Category" => "Application",
"C_Month" => "Jan",
"PriorZZZy" => "3 - Low",
"Technology" => "MS_SQL",
"Assignment group" => "ZZZ MS-SQL Admin",
"Duration" => "28531",
"Attribute" => "Backup",
"Short description" => "Control-M DS: DMS_XXXsqldba0040_YYYY_DL_XXX_P - RC 1 - Ended not OK 6/30/2019 11:05:08 PM",
"Env" => "Prod",
"path" => "C:/Users/kkulkarni/Desktop/test.csv",
"Durn in hrs" => "7.9",
"O_Month" => "Jun",
"Closed" => "1/0/1900",
"TimeWindow" => "10 PM - 12 AM",
"Assigned to" => "Adam SmZZZh",
"Opened" => 2019-07-01T03:05:55.000Z,
"host" => "HYDLRAP000119",
"Number" => "INC0403021",
"Floor" => "10:00 PM",
"Opened Day" => "Sunday",
"@timestamp" => 2019-11-06T05:24:12.679Z,
"Opened Date" => "06/30/19",
"Opened Time" => "11:05 PM",
"message" => "INC0403021,Application,ZZZ MS-SQL Admin,MS_SQL,Adam SmZZZh,2019-06-30 23:05:55,Jun,Sunday,06/30/19,11:05 PM,28531,7.9,10:00 PM,10 PM - 12 AM,1/0/1900,Jan,3 - Low,Control-M DS: DMS_XXXsqldba0040_YYYY_DL_XXX_P - RC 1 - Ended not OK 6/30/2019 11:05:08 PM,Database - SQL Server,Backup,Prod\r",
"@version" => "1",
"Configuration ZZZem" => "Database - SQL Server"
}
{
"Category" => "Application",
"C_Month" => "Jan",
"PriorZZZy" => "3 - Low",
"Technology" => "MS_SQL",
"Assignment group" => "ZZZ MS-SQL Admin",
"Duration" => "41202",
"Attribute" => "Backup",
"Short description" => "Control-M DS: DS_XXXsqldba0015_TRANSLOG_BCK_PROD - RC 1 - Ended not OK 6/30/2019 7:13:53 PM",
"Env" => "Prod",
"path" => "C:/Users/kkulkarni/Desktop/test.csv",
"Durn in hrs" => "11.4",
"O_Month" => "Jun",
"Closed" => "1/0/1900",
"TimeWindow" => "6 PM - 8 PM",
"Assigned to" => "Adam SmZZZh",
"Opened" => 2019-06-30T23:33:47.000Z,
"host" => "HYDLRAP000119",
"Number" => "INC0403009",
"Floor" => "6:00 PM",
"Opened Day" => "Sunday",
"@timestamp" => 2019-11-06T05:24:26.816Z,
"Opened Date" => "06/30/19",
"Opened Time" => "7:33 PM",
"message" => "INC0403009,Application,ZZZ MS-SQL Admin,MS_SQL,Adam SmZZZh,2019-06-30 19:33:47,Jun,Sunday,06/30/19,7:33 PM,41202,11.4,6:00 PM,6 PM - 8 PM,1/0/1900,Jan,3 - Low,Control-M DS: DS_XXXsqldba0015_TRANSLOG_BCK_PROD - RC 1 - Ended not OK 6/30/2019 7:13:53 PM,Database - SQL Server,Backup,Prod\r",
"@version" => "1",
"Configuration ZZZem" => "Database - SQL Server"
}

Hi, Thank you for your response. The issue is still there. I should have mentioned that the setup we have is that we have installed filebeat on a windows server which is pushes the file to elasticsearch.

from filebeat server log:
"@timestamp": "2019-11-06T15:24:05.078Z",
"@metadata": {
"beat": "",
"type": "_doc",
"version": ""
},
"message": "Number,Category,Assignment group,Technology,Assigned to,Opened,O_Month,Opened Day,Opened Date,Opened Time,Duration,Durn in hrs,Floor,TimeWindow,Closed,C_Month,PriorZZZy,Short description,Configuration ZZZem,Attribute,Env
INC0403021,Application,ZZZ MS-SQL Admin,MS_SQL,Adam SmZZZh,2019-06-30 23:05:55,Jun,Sunday,06/30/19,11:05 PM,28531,7.9,10:00 PM,10 PM - 12 AM,1/0/1900,Jan,3 - Low,Control-M DS: DMS_XXXsqldba0040_YYYY_DL_XXX_P - RC 1 - Ended not OK 6/30/2019 11:05:08 PM,Database - SQL Server,Backup,Prod
INC0403009,Application,ZZZ MS-SQL Admin,MS_SQL,Adam SmZZZh,2019-06-30 19:33:47,Jun,Sunday,06/30/19,7:33 PM,41202,11.4,6:00 PM,6 PM - 8 PM,1/0/1900,Jan,3 - Low,Control-M DS: DS_XXXsqldba0015_TRANSLOG_BCK_PROD - RC 1 - Ended not OK 6/30/2019 7:13:53 PM,Database - SQL Server,Backup,Prod"
"tags": [
"incident-mgmt"
],
"input": {
"type": "log"
},
"ecs": {
"version": "1.0.0"
},
"host": {
"os": {
"version": "10.0",
"family": "windows",
"name": "Windows Server 2016 Standard",
"kernel": "10.0.14393.3269 (rs1_release.190929-1234)",
"build": "14393.3274",
"platform": "windows"
},
"id": "c9ee2f53-8d44-4d1a-a813-7e4bed086071",
"hostname": "windows2016svr01",
"architecture": "x86_64",
"name": "windows2016svr01"
},
"agent": {
"ephemeral_id": "f8fdb5b5-027c-4c42-a65a-24e6c7de59a6",
"hostname": "windows2016svr01",
"id": "3fa94c24-af18-4672-9f07-1cd34d4a32e9",
"version": "7.0.0",
"type": "filebeat"
},
"log": {
"offset": 0,
"file": {
"path": "D:\ELK_Logs-data\inc6m-formatted6.csv"
},
"flags": [
"truncated",
"multiline"
]
}
}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.