CSV Timstamp issues

This is what i have now.

input {
file {
path => "/home/bkelley6/flights/*.csv"
type => "flights"
start_position => "beginning"
}
}

filter {
csv {
columns => ["Date", "Time", "SWR", "RSSI(dB)", "RxBt(V)", "Cels(gRe)", "Tmp2(@C)", "RPM(rpm)", "Tmp1(@C)", "Rud", "Ele", "Thr", "Ail", "S1", "S2", "S3", "LS", "RS", "SA", "SB", "SC", "SD" ,"SE", "SF", "SG", "SH"]
separator => ","
}

mutate {
replace => [ "date", "%{Date} %{Time}" ]
}

date {
"locale" => "en"
match => [ "date", "YYYY-MM-dd hh:mm:ss.SSS" ]
timezone => "America/New_York"
target => "@timestamp"
}
}
output {
elasticsearch {
action => "index"
hosts => ["localhost:9200"]
index => "logstash-%{+YYYY.MM.dd}"
workers => 1
}
}