Individual index for each csv

Here is my current logstash config, What I would like for each file is to have its own index starting with a string so for example

string+Jan01
string+Jan02
so on

Any help appreciated !!

input {
    file {
        type => "csv"
        path => "/home/callum/Desktop/usb/*.csv"
        start_position => beginning
    }
}


filter {
        csv {
		separator => ","
		columns => ["Date","Network","Event Type","Device","Username","File","Size"]
  }
        date { 
		match => ["Date", "yyyy/MM/dd HH:mm:ss ZZZ"]
  }
        mutate {
		add_field => {"hour"=>"%{+HH}"}
		remove_field => ["message","path","host","type","@version"]
		lowercase => ["Date","Event Type","Device","Username","File","Size"]
  }
	mutate {
	
		convert => {"hour" => "integer"}
		convert => {"Size" => "integer"}
    }
}
output {
        elasticsearch {
        hosts => "http://localhost:9200"
        index => "lumension"
     }
stdout { codec => rubydebug }
    }

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.