I update ELK stack from 7.8.0 to 7.10.0.
I am facing this Error in logstash:
[ERROR][logstash.javapipeline ][main] Pipeline error {:pipeline_id=>"main", :exception=>
javax.net.ssl.SSLException: failed to initialize the server-side SSL context,
:backtrace=>[
"io.netty.handler.ssl.JdkSslServerContext.newSSLContext(io/netty/handler/ssl/JdkSslServerContext.java:288)",
"io.netty.handler.ssl.JdkSslServerContext.<init>(io/netty/handler/ssl/JdkSslServerContext.java:247)",
"io.netty.handler.ssl.SslContext.newServerContextInternal(io/netty/handler/ssl/SslContext.java:465)",
"io.netty.handler.ssl.SslContextBuilder.build(io/netty/handler/ssl/SslContextBuilder.java:571)",
"jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)",
"jdk.internal.reflect.NativeMethodAccessorImpl.invoke(jdk/internal/reflect/NativeMethodAccessorImpl.java:62)",
"jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(jdk/internal/reflect/DelegatingMethodAccessorImpl.java:43)",
"java.lang.reflect.Method.invoke(java/lang/reflect/Method.java:566)",
"org.jruby.javasupport.JavaMethod.invokeDirectWithExceptionHandling(org/jruby/javasupport/JavaMethod.java:426)",
"org.jruby.javasupport.JavaMethod.invokeDirect(org/jruby/javasupport/JavaMethod.java:293)",
"usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_input_minus_tcp_minus_6_dot_0_dot_6_minus_java.lib.logstash.inputs.tcp.compat_ssl_options.toSslContext(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-tcp-6.0.6-java/lib/logstash/inputs/tcp/compat_ssl_options.rb:127)",
"usr.share.logstash.vendor.bundle.jruby2_dot_5_dot_0.gems.logstash_minus_input_minus_tcp_minus_6_dot_0_dot_6_minus_java.lib.logstash.inputs.tcp.compat_ssl_options.RUBY$method$toSslContext$0$__VARARGS__(usr/share/logstash/vendor/bundle/jruby/$2_dot_5_dot_0/gems/logstash_minus_input_minus_tcp_minus_6_dot_0_dot_6_minus_java/lib/logstash/inputs/tcp//usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-tcp-6.0.6-java/lib/logstash/inputs/tcp/compat_ssl_options.rb)",
"usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_input_minus_tcp_minus_6_dot_0_dot_6_minus_java.lib.logstash.inputs.tcp.get_ssl_context(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-tcp-6.0.6-java/lib/logstash/inputs/tcp.rb:369)",
"usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_input_minus_tcp_minus_6_dot_0_dot_6_minus_java.lib.logstash.inputs.tcp.RUBY$method$get_ssl_context$0$__VARARGS__(usr/share/logstash/vendor/bundle/jruby/$2_dot_5_dot_0/gems/logstash_minus_input_minus_tcp_minus_6_dot_0_dot_6_minus_java/lib/logstash/inputs//usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-tcp-6.0.6-java/lib/logstash/inputs/tcp.rb)",
"usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_input_minus_tcp_minus_6_dot_0_dot_6_minus_java.lib.logstash.inputs.tcp.register(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-tcp-6.0.6-java/lib/logstash/inputs/tcp.rb:145)",
"usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_input_minus_tcp_minus_6_dot_0_dot_6_minus_java.lib.logstash.inputs.tcp.RUBY$method$register$0$__VARARGS__(usr/share/logstash/vendor/bundle/jruby/$2_dot_5_dot_0/gems/logstash_minus_input_minus_tcp_minus_6_dot_0_dot_6_minus_java/lib/logstash/inputs//usr/share/logstash/vendor/bundle//2.5.0/gems/logstash-input-tcp-6.0.6-java/lib/logstash/inputs/tcp.rb)",
"usr.share.logstash.logstash_minus_core.lib.logstash.java_pipeline.register_plugins(/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:228)", "org.jruby.RubyArray.each(org/jruby/RubyArray.java:1809)",
"usr.share.logstash.logstash_minus_core.lib.logstash.java_pipeline.register_plugins(/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:227)", "usr.share.logstash.logstash_minus_core.lib.logstash.java_pipeline.RUBY$method$register_plugins$0$__VARARGS__(usr/share/logstash/logstash_minus_core/lib/logstash//usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb)",
"usr.share.logstash.logstash_minus_core.lib.logstash.java_pipeline.start_inputs(/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:386)", "usr.share.logstash.logstash_minus_core.lib.logstash.java_pipeline.RUBY$method$start_inputs$0$__VARARGS__(usr/share/logstash/logstash_minus_core/lib/logstash//usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb)",
"usr.share.logstash.logstash_minus_core.lib.logstash.java_pipeline.start_workers(/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:311)", "usr.share.logstash.logstash_minus_core.lib.logstash.java_pipeline.RUBY$method$start_workers$0$__VARARGS__(usr/share/logstash/logstash_minus_core/lib/logstash//usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb)",
"usr.share.logstash.logstash_minus_core.lib.logstash.java_pipeline.run(/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:185)", "usr.share.logstash.logstash_minus_core.lib.logstash.java_pipeline.RUBY$method$run$0$__VARARGS__(usr/share/logstash/logstash_minus_core/lib/logstash//usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb)",
"usr.share.logstash.logstash_minus_core.lib.logstash.java_pipeline.start(/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:137)",
"org.jruby.RubyProc.call(org/jruby/RubyProc.java:318)",
"java.lang.Thread.run(java/lang/Thread.java:834)"],
"pipeline.sources"=>["/etc/logstash/conf.d/pipeline.conf"],
:thread=>"#<Thread:0x18f5cec9 run>"}
My pipeline.conf looks like :
input {
udp {
port => 5000
type => syslog
}
udp {
port => 5001
type => json
}
tcp {
port => 5001
type => json
ssl_enable => true
ssl_key => "....."
ssl_cert => "...."
ssl_extra_chain_certs => ["...."]
ssl_verify => false
add_field => {"ssl" => "on"}
}
}
## Add your filters / logstash plugins configuration here
filter {
mutate {
remove_field => [ "port" ]
}
if [type] == "syslog" {
grok {
match => { "message" => "\A%{TIMESTAMP_ISO8601:tmptimestamp}\|%{HOSTNAME:cluster}\|%{HOSTNAME:hostname}\|%{HOSTNAME:app}\|%{GREEDYDATA:message}\Z" }
overwrite => [ "message" ]
}
date {
match => [ "tmptimestamp", "ISO8601" ]
remove_field => [ "tmptimestamp" ]
}
}
mutate {
add_field => { "token" => "JnQKKkJNnTmNRpuremJWXQMLFQAVKlwh" }
}
}
mutate {
copy => { "_id" => "[@metadata][_id]" }
remove_field => ["_id"]
}
mutate {
remove_field => ["host"]
remove_field => ["CancelPenalty","Search","Offer","Rateplan","Success","PropertyChangedMessage","Person","day","Rate","SearchRequest","ExternalRatesUpdate","SalesforceOpportunity","Tax"]
}
}
output {
if [type] == "syslog" and "_grokparsefailure" in [tags] {
file { path => "~/log/failed_syslog_events-%{+YYYY-MM-dd}" }
}
elasticsearch {
hosts => "localhost:9200"
}
}