Logstash input multiple files output one index elasticsearch failure


(Maher Glenza) #1

hi ,
plz i m trying to receive multiple logs then after filteriing i want to have one index containing all filtered logs but i failed this is my config file

input {
file {
path =>"C:\elk\pvin"
type => "pvin"
start_position => beginning
sincedb_path => "C:\elkstack\ELK\logstash-5.1.2-Pvin\data\plugins\inputs\file.sincedb_5eed3ff4207ce42c69ff2b34b669aa79"

}
file {
path =>"C:\elk\router"
type => "router"
start_position => beginning
sincedb_path => "C:\elkstack\ELK\logstash-5.1.2-Pvin\data\plugins\inputs\file.sincedb_5eed3ff4207ce42c69ff2b34b669aa78"

}
file {
path =>"C:\elk\pvout"
type => "pvout"
start_position => beginning
sincedb_path => "C:\elkstack\ELK\logstash-5.1.2-Pvin\data\plugins\inputs\file.sincedb_5eed3ff4207ce42c69ff2b34b669aa77"

}
}

filter {

if [type] == "router"{

mutate {
gsub => ["message","|"," "]
}

grok {

        match => ["message","%{TIMESTAMP_ISO8601:dateRouter} : %{UUID:idFlux} %{NUMBER:dateconsommationrouter} %{NUMBER:datefintraitrouter} %{NUMBER:delai} %{WORD:nomFlux} %{WORD:evt} %{GREEDYDATA:messageGenere} %{NUMBER:reforigin} %{NOTSPACE:contractoidval} %{DATA:useroidval} %{NOTSPACE:servname}"]

}
}

else if [type] == "pvin"

{

mutate {
gsub => ["message","|"," "]
}

grok {
		
		match => ["message","%{TIMESTAMP_ISO8601:datePvin} : %{UUID:id} %{NUMBER:daterecepPvin} %{NUMBER:datefintraitPvin} %{NUMBER:delai} %{WORD:nomFlux} %{GREEDYDATA:evts} %{WORD:BU} %{NUMBER:reforigin} %{NOTSPACE:contractoidval} %{DATA:useroidval} %{DATA:errorStatus} %{GREEDYDATA:errorDesc} %{NOTSPACE:servname}"]

}


}

else if [type] == "pvout"{

mutate {
gsub => ["message","|"," "]
}

grok {
	
		match => ["message","%{NUMBER:messagId} %{NUMBER:dateEnvoiMsgPvout} %{NUMBER:dateRecepRetourPvout} %{NUMBER:delai} %{WORD:nomFlux} %{WORD:evt} %{NUMBER:reforigin} %{WORD:nomBePays} %{NOTSPACE:contractoidval} %{DATA:useroidval} %{DATA:errorStatus} %{NOTSPACE:errorCode} %{NOTSPACE:errorType} %{GREEDYDATA:errorDesc}"]

}
}

}

output {
elasticsearch {
hosts => "localhost:9200"
index => "promise-%{+YYYY.MM.dd}"
document_id => "%{[idFlux]}"

}

}

and this is my inputs:
2017-01-18 16:02:24,166 : 1e045e2f-a06b-40c9-954e-cc26b0ead93a|20170118160224|20170118160224|84|CACC|CONTRACT_CREATION|[912296384,aur][912296385,sui]|44|0x7125838BA5F500010001D64F||inpmsrtr1n

912300867|20170118154716095|20170118154716126|31|CUSE_OFR|USER_CREATION|44|erbOFR|0x7125838ba5f600010001d6bf|0x7125838ba5f600010001d6ef|OK||

2017-01-18 16:02:23,362 : 1e045e2f-a06b-40c9-954e-cc26b0ead93a|20170118160223|20170118160223|154|CACC|[CONTRACT_CREATION]|OFR|44|0x7125838BA5F500010001D64F||OK|||inpmsvin1n


(Mark Walkom) #2

Failed how?


(Maher Glenza) #3

no i had a krok fail i replaced it and it works now but i fail to make join between logs now


(system) #4

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.