Error ingestion two csv in elastic search

Hello it is possible to ingest of two csv files in elastic using file beat and logstsh with the same index

Yes.

I use this config logstsh

input {
beats {
host => "localhost"
port => "5044"
 }
}                  

filter {
csv {
    separator => ";"
	columns => [ "chaine", "job", "date_plan", "statut", "date_debut", "date_fin", "serveur", "numero_passage", "application", "sous_application", "date fraicheur" ]
}

    if [sous_application] == "DWH PRODUCTION" {
     mutate {
     add_field => { "UNIVERS" => "Entrepôt de données Production" }
     }
}
     if [statut] == "OK" {
     mutate {
     add_field => { "statut_globale" => "1" }
     }
     }

   if [statut] == "EN-ERREUR" {
     mutate {
     add_field => { "statut_globale" => "0" }
     }	  	  	  
    }

     if [statut] == "EN-ATTENTE" {
     mutate {
     add_field => { "statut_globale" => "3" }
     }	  	  	  
  }

   date {
   match => [ "date_plan" , "YYYY-MM-dd" ]
   timezone => "Europe/Paris"
   }

 date {
 match => [ "date_debut" , "YYYY-MM-dd HH:mm:ss" ]
 timezone => "Europe/Paris"
}

date {
match => [ "date_fin" , "YYYY-MM-dd HH:mm:ss" ]
timezone => "Europe/Paris"
 }

date {
match => [ "date fraicheur" , "YYYY-MM-dd" ]
timezone => "Europe/Paris"
}

}

output {
stdout {
codec => rubydebug
}
   elasticsearch { 
   hosts => "http://localhost:9200" 
   index => "v00"
   }
 }

I have a erroe: the the column date fraicheur not creat in elastic search

Bonjour :wink:

May I recommend you change date fraicheur to date_fraicheur.

I tested this but the date_fraicheur column is not created in elastic

Can you share the full logstash configuration file and the first 10 lines of your CSV file?

this is the first CSV file

this is the second CSV file

and the full logstash configuration file is :

input {
beats {
host => "localhost"
port => "5044"
 }
}                  

filter {
csv {
separator => ";"
columns => [ "chaine", "job", "date_plan", "statut", "date_debut", "date_fin", "serveur", "numero_passage", "application", "sous_application", "date fraicheur" ]
}

if [sous_application] == "DWH PRODUCTION" {
 mutate {
 add_field => { "UNIVERS" => "Entrepôt de données Production" }
 }
} 
 if [statut] == "OK" {
  mutate {
 add_field => { "statut_globale" => "1" }
 }
 }

if [statut] == "EN-ERREUR" {
 mutate {
 add_field => { "statut_globale" => "0" }
 }	  	  	  
}

 if [statut] == "EN-ATTENTE" {
 mutate {
 add_field => { "statut_globale" => "3" }
 }	  	  	  
 }

 date {
 match => [ "date_plan" , "YYYY-MM-dd" ]
 timezone => "Europe/Paris"
 }

date {
match => [ "date_debut" , "YYYY-MM-dd HH:mm:ss" ]
timezone => "Europe/Paris"
}

 date {
 match => [ "date_fin" , "YYYY-MM-dd HH:mm:ss" ]
 timezone => "Europe/Paris"
}

date {
match => [ "date fraicheur" , "YYYY-MM-dd" ]
timezone => "Europe/Paris"
}

}

 output {
 stdout {
 codec => rubydebug
}
   elasticsearch { 
   hosts => "http://localhost:9200" 
   index => "v00"
 }
}

Any idea???

Any Idea???