Hi,
We are using websphere server, our dev, QA and Production, log4j logs are exposed on http, (http://ausu372a.wm.com/devlogs/alvdapp002/WAS70/logs/ERLWMLTMSserver1/SystemOut.log) i need to read that logs and send to log stash, while i was reading on internet http_poller will do this job, so i configure my logstash config file like this. On LogStash console i am seeing it is reading the log file. But in Kibana i am not seeing any data.
input {
http_poller {
urls => {
test1 => "http://localhost:9200"
test2 => {
# Supports all options supported by ruby's Manticore HTTP client
method => get
url => "http://ausu372a.wm.com/devlogs/alvdapp002/WAS70/logs/ERLWMLTMSserver1/SystemOut.log"
}
test3 => {
# Supports all options supported by ruby's Manticore HTTP client
method => get
url => "http://ausu372a.wm.com/devlogs/alvdapp003/WAS70/logs/ERLWMLTMSserver2/SystemOut.log"
}
}
request_timeout => 60
# Supports "cron", "every", "at" and "in" schedules by rufus scheduler
schedule => { every => "1m"}
codec => multiline {
# Grok pattern names are valid!
pattern => "^%{TIMESTAMP_ISO8601} "
negate => true
what => previous
}
# A hash of request metadata info (timing, response headers, etc.) will be sent here
metadata_target => "http_poller_metadata"
}
}
filter {
grok {
match => { "message" => "%{COMBINEDAPACHELOG}" }
}
grok {
match => { "message" => "%{TIMESTAMP_ISO8601:time} \[%{DATA:loglevel}\]\(%{DATA:method}\)%{GREEDYDATA:msgbody}" }
# add_field => { "time" => "%{time}" }
# add_field => { "loglevel" => "%{loglevel}" }
break_on_match => false
}
}
output {
elasticsearch {
hosts => [ "localhost:9200" ]
}
stdout { codec => rubydebug }
}