I use logsth and filebeat for ingestion in elastic I have ingestion is OK but i have _dateparsefailure
input {
    beats {
    port => "5044"
    }
}    
              
filter {
    csv {
        separator => ";"
		columns => ["chaine", "job", "date_plan", "statut", "date_debut", "date_fin", "serveur", "numero_passage", "application", "sous_application"]
}
date {
match => [ "date_plan" , "YYYY-MM-dd" ]
}
date {
match => [ "date_debut" , "YYYY-MM-dd HH:mm:ss" ]
}
date {
match => [ "date_fin" , "YYYY-MM-dd HH:mm:ss" ]
}
mutate {
convert => { "numero_passage" => "integer" }
}
fingerprint {}
}
output {
  elasticsearch { 
  hosts => "http://localhost:9200" 
  index => "hello"
  }
stdout {codec => rubydebug}
} 
             
            
               
               
               
            
            
           
          
            
            
                   "date_plan" => "31/03/2020",
          "date_debut" => "01/04/2020 09:28",
         "fingerprint" => "41221aaa7b073606bc1607689508c5a1503db5c8",
              "statut" => "OK",
                 "log" => {
          "file" => {
            "path" => "C:\\Users\\h83710\\Desktop\\elastic\\logstash-7.5.2\\test\\FICHIER.csv"
        },
        "offset" => 436
    },
      "numero_passage" => 0,
                 "ecs" => {
        "version" => "1.4.0"
    },
    "sous_application" => "DMT_FLUX_IP",
            "@version" => "1",
                "tags" => [
        [0] "beats_input_codec_plain_applied",
        [1] "_dateparsefailure"
    ],
            "date_fin" => "01/04/2020 09:28",
         "message" => "RNAH20P;RNAH20PB;31/03/2020;OK;01/04/2020 09:28;01/04/2020 09:28;m-mvs.macif.fr;0;SID_SINISTRE;DMT_FLUX_IP",
           "agent" => {
        "hostname" => "C06513361",
              "id" => "c19672a9-bf8b-4bf7-8d01-10f24fe51d7a",
         "version" => "7.6.1",
            "type" => "filebeat",
    "ephemeral_id" => "4d0d2bcb-7cad-40ed-9c0e-35fb4c99cad3"
},
         "serveur" => "m-mvs.macif.fr",
     "application" => "SID_SINISTRE",
      "@timestamp" => 2020-05-09T10:07:48.185Z,
           "input" => {
    "type" => "log"
},
          "chaine" => "RNAH20P",
            "host" => {
    "name" => "C06513361"
},
             "job" => "RNAH20PB"
} 
             
            
               
               
               
            
            
           
          
            
            
              None of the patterns you have specified match the data you are receiving. date_plan has the parts in the wrong order with an incorrect delimiter while the other also suffers from icorrect date format as well as missing seconds.
             
            
               
               
               
            
            
           
          
            
              
                system  
                (system)
                  Closed 
               
              
                  
                    June 6, 2020,  1:31pm
                   
                   
              4 
               
             
            
              This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.