kafka_0.10.0.1
logstash-5.0.1
java version "1.8.0_111"
os centos-6.5
[2016-11-28T10:28:38,662][INFO ][org.apache.kafka.common.utils.AppInfoParser] Kafka version : 0.10.0.1
[2016-11-28T10:28:38,663][INFO ][org.apache.kafka.common.utils.AppInfoParser] Kafka commitId : a7a17cdec9eaa6c5
[2016-11-28T10:28:38,663][WARN ][org.apache.kafka.common.utils.AppInfoParser] Error registering AppInfo mbean
javax.management.InstanceAlreadyExistsException: kafka.consumer:type=app-info,id=logstash
<<...>>
Exception in thread "Ruby-0-Thread-15: /data/dmp/logstash/vendor/bundle/jruby/1.9/gems/logstash-input-kafka-5.0.6/lib/logstash/inputs/kafka.rb:191" org.apache.kafka.common.protocol.types.SchemaException: Error reading field 'brokers': Error reading field 'host': Error reading string of length 27489, only 114 bytes available
at org.apache.kafka.common.protocol.types.Schema.read(org/apache/kafka/common/protocol/types/Schema.java:73)
at org.apache.kafka.clients.NetworkClient.parseResponse(org/apache/kafka/clients/NetworkClient.java:380)
at org.apache.kafka.clients.NetworkClient.handleCompletedReceives(org/apache/kafka/clients/NetworkClient.java:449)
at org.apache.kafka.clients.NetworkClient.poll(org/apache/kafka/clients/NetworkClient.java:269)
at org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.clientPoll(org/apache/kafka/clients/consumer/internals/ConsumerNetworkClient.java:360)
at org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.poll(org/apache/kafka/clients/consumer/internals/ConsumerNetworkClient.java:224)
at org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.poll(org/apache/kafka/clients/consumer/internals/ConsumerNetworkClient.java:192)
at org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient.poll(org/apache/kafka/clients/consumer/internals/ConsumerNetworkClient.java:163)
at org.apache.kafka.clients.consumer.internals.AbstractCoordinator.ensureCoordinatorReady(org/apache/kafka/clients/consumer/internals/AbstractCoordinator.java:179)
at org.apache.kafka.clients.consumer.KafkaConsumer.pollOnce(org/apache/kafka/clients/consumer/KafkaConsumer.java:974)
at org.apache.kafka.clients.consumer.KafkaConsumer.poll(org/apache/kafka/clients/consumer/KafkaConsumer.java:938)
at java.lang.reflect.Method.invoke(java/lang/reflect/Method.java:498)
at RUBY.thread_runner(/data/dmp/logstash/vendor/bundle/jruby/1.9/gems/logstash-input-kafka-5.0.6/lib/logstash/inputs/kafka.rb:201)
at java.lang.Thread.run(java/lang/Thread.java:745)
[2016-11-28T10:28:41,395][WARN ][logstash.agent ] stopping pipeline {:id=>"main"}
使用的配置文件为:
input {
kafka {
bootstrap_servers => "kafka1.dmp.com:9092"
topics => ["80-client-lz.rili.cn"]
group_id => "logstash"
client_id => "logstash"
codec => plain
consumer_threads => 3
decorate_events => true
type => "nginx-access"
}
}
刚开始还有filter和elasticsearch的output,去掉这些后启动依然是这个错误