I am trying to setup filebeat and logstash on my server1 and send data to elasticsearch located on server2 and visualize it using kibana.
Following are filebeat logs and when i run filebeat test output it showed the result as show in image bleow.
As you can observer, filbeat is not receving logs at all....is this because of the error in logstash?
2020-07-10T07:40:14.852Z DEBUG [input] input/input.go:141 Run input
2020-07-10T07:40:14.852Z DEBUG [input] log/input.go:191 Start next scan
2020-07-10T07:40:14.852Z DEBUG [input] log/input.go:212 input states cleaned up. Before: 0, After: 0, Pending: 0
2020-07-10T07:40:24.853Z DEBUG [input] input/input.go:141 Run input
2020-07-10T07:40:24.853Z DEBUG [input] log/input.go:191 Start next scan
2020-07-10T07:40:24.853Z DEBUG [input] log/input.go:212 input states cleaned up. Before: 0, After: 0, Pending: 0
2020-07-10T07:40:34.853Z DEBUG [input] input/input.go:141 Run input
2020-07-10T07:40:34.853Z DEBUG [input] log/input.go:191 Start next scan
2020-07-10T07:40:34.853Z DEBUG [input] log/input.go:212 input states cleaned up. Before: 0, After: 0, Pending: 0
2020-07-10T07:40:44.828Z INFO [monitoring] log/log.go:145 Non-zero metrics in the last 30s {"monitoring": {"metrics": {"beat":{"cpu":{"system":{"ticks":20},"total":{"ticks":170,"time":{"ms":4},"value":170},"user":{"ticks":150,"time":{"ms":4}}},"handles":{"limit":{"hard":1048576,"soft":1024},"open":7},"info":{"ephemeral_id":"4f97f60d-b9f4-451c-b9f2-1935988798b1","uptime":{"ms":840027}},"memstats":{"gc_next":10220512,"memory_alloc":5959576,"memory_total":24826152},"runtime":{"goroutines":21}},"filebeat":{"harvester":{"open_files":0,"running":0}},"libbeat":{"config":{"module":{"running":0}},"pipeline":{"clients":1,"events":{"active":0}}},"registrar":{"states":{"current":0}},"system":{"load":{"1":0.03,"15":0.08,"5":0.06,"norm":{"1":0.015,"15":0.04,"5":0.03}}}}}}
2020-07-10T07:40:44.853Z DEBUG [input] input/input.go:141 Run input
2020-07-10T07:40:44.856Z DEBUG [input] log/input.go:191 Start next scan
2020-07-10T07:40:44.856Z DEBUG [input] log/input.go:212 input states cleaned up. Before: 0, After: 0, Pending: 0
2020-07-10T07:40:54.856Z DEBUG [input] input/input.go:141 Run input
2020-07-10T07:40:54.856Z DEBUG [input] log/input.go:191 Start next scan
2020-07-10T07:40:54.856Z DEBUG [input] log/input.go:212 input states cleaned up. Before: 0, After: 0, Pending: 0
2020-07-10T07:41:04.856Z DEBUG [input] input/input.go:141 Run input
2020-07-10T07:41:04.856Z DEBUG [input] log/input.go:191 Start next scan
2020-07-10T07:41:04.856Z DEBUG [input] log/input.go:212 input states cleaned up. Before: 0, After: 0, Pending: 0
2020-07-10T07:41:14.828Z INFO [monitoring] log/log.go:145 Non-zero metrics in the last 30s {"monitoring": {"metrics": {"beat":{"cpu":{"system":{"ticks":20},"total":{"ticks":180,"time":{"ms":8},"value":180},"user":{"ticks":160,"time":{"ms":8}}},"handles":{"limit":{"hard":1048576,"soft":1024},"open":7},"info":{"ephemeral_id":"4f97f60d-b9f4-451c-b9f2-1935988798b1","uptime":{"ms":870027}},"memstats":{"gc_next":10216448,"memory_alloc":5117872,"memory_total":25101640},"runtime":{"goroutines":21}},"filebeat":{"harvester":{"open_files":0,"running":0}},"libbeat":{"config":{"module":{"running":0}},"pipeline":{"clients":1,"events":{"active":0}}},"registrar":{"states":{"current":0}},"system":{"load":{"1":0.02,"15":0.08,"5":0.05,"norm":{"1":0.01,"15":0.04,"5":0.025}}}}}}
2020-07-10T07:41:14.856Z DEBUG [input] input/input.go:141 Run input
2020-07-10T07:41:14.856Z DEBUG [input] log/input.go:191 Start next scan
2020-07-10T07:41:14.856Z DEBUG [input] log/input.go:212 input states cleaned up. Before: 0, After: 0, Pending: 0
2020-07-10T07:41:24.856Z DEBUG [input] input/input.go:141 Run input
2020-07-10T07:41:24.856Z DEBUG [input] log/input.go:191 Start next scan
2020-07-10T07:41:24.856Z DEBUG [input] log/input.go:212 input states cleaned up. Before: 0, After: 0, Pending: 0
2020-07-10T07:41:34.857Z DEBUG [input] input/input.go:141 Run input
2020-07-10T07:41:34.857Z DEBUG [input] log/input.go:191 Start next scan
2020-07-10T07:41:34.857Z DEBUG [input] log/input.go:212 input states cleaned up. Before: 0, After: 0, Pending: 0
Following are logstash logs and I am getting some exception errors
[WARN ] 2020-07-10 07:26:21.802 [nioEventLoopGroup-2-2] DefaultChannelPipeline - An
exceptionCaught() event was fired, and it reached at the tail of the pipeline. It usually means the last handler in the pipeline did not handle the exception.
io.netty.handler.codec.DecoderException: org.logstash.beats.InvalidFrameProtocolException: Invalid version of beats protocol: -1
at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:472) ~[netty-all-4.1.30.Final.jar:4.1.30.Final]
at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:278) ~[netty-all-4.1.30.Final.jar:4.1.30.Final]
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) ~[netty-all-4.1.30.Final.jar:4.1.30.Final]
at io.netty.channel.AbstractChannelHandlerContext.access$600(AbstractChannelHandlerContext.java:38) ~[netty-all-4.1.30.Final.jar:4.1.30.Final]
at io.netty.channel.AbstractChannelHandlerContext$7.run(AbstractChannelHandlerContext.java:353) ~[netty-all-4.1.30.Final.jar:4.1.30.Final]
at io.netty.util.concurrent.DefaultEventExecutor.run(DefaultEventExecutor.java:66) ~[netty-all-4.1.30.Final.jar:4.1.30.Final]
at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:897) [netty-all-4.1.30.Final.jar:4.1.30.Final]
at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) [netty-all-4.1.30.Final.jar:4.1.30.Final]
at java.lang.Thread.run(Thread.java:748) [?:1.8.0_252]
Caused by: org.logstash.beats.InvalidFrameProtocolException: Invalid version of beats protocol: -1
at org.logstash.beats.Protocol.version(Protocol.java:22) ~[logstash-input-beats-6.0.9.jar:?]
at org.logstash.beats.BeatsParser.decode(BeatsParser.java:62) ~[logstash-input-beats-6.0.9.jar:?]
at io.netty.handler.codec.ByteToMessageDecoder.decodeRemovalReentryProtection(ByteToMessageDecoder.java:502) ~[netty-all-4.1.30.Final.jar:4.1.30.Final]
at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:441) ~[netty-all-4.1.30.Final.jar:4.1.30.Final]
... 8 more
filebeat.yml
filebeat.inputs:
- type: log
enabled: true
paths:
- /home/mahesh/Documents/refactor/nomi/unity/media/*.log
output.logstash:
enabled: true
hosts: ["localhost:5044"]
logstash.conf
input {
beats {
port => 5044
ssl => false
}
}
filter {
grok {
match => { "message" => "%{TIMESTAMP_ISO8601:timestamp}] %{LOGLEVEL:loglevel}\|%{GREEDYDATA:module}\|%{GREEDYDATA:content}" }
}
date {
locale => "en"
match => [ "timestamp", "YYYY-MM-dd HH:mm:ss"]
target => "@timestamp"
timezone => "America/New_York"
}
}
output {
elasticsearch {
hosts => "elk_server_ip:9200"
manage_template => false
index => "blend_test"
}
stdout { codec => rubydebug { metadata => true } }
}