Using docker here. Upgraded elasticsearch and now cannot attach to it with kibana. We have a premium license and have had x-pack configured. I inherited it and honestly do not understand the configuration all that well. I think my settings need to change. Is there a way to start from scratch in the configuration?
The elastic cluster is up and running:
[root@elasticsearch etc]# curl -k -u elastic -XGET 'https://localhost/_cluster/health?pretty' Enter host password for user 'elastic': { "cluster_name" : "logging", "status" : "green", "timed_out" : false, "number_of_nodes" : 1, "number_of_data_nodes" : 1, "active_primary_shards" : 925, "active_shards" : 925, "relocating_shards" : 0, "initializing_shards" : 0, "unassigned_shards" : 0, "delayed_unassigned_shards" : 0, "number_of_pending_tasks" : 0, "number_of_in_flight_fetch" : 0, "task_max_waiting_in_queue_millis" : 0, "active_shards_percent_as_number" : 100.0 }
Log file for elasticsearch:
{"type": "server", "timestamp": "2019-12-13T15:06:10,662Z", "level": "INFO", "component": "o.e.p.PluginsService", "cluster.name": "logging", "node.name": "elasticsearch", "message": "no plugins loaded" }
{"type": "server", "timestamp": "2019-12-13T15:06:14,241Z", "level": "INFO", "component": "o.e.x.s.a.l.LdapUserSearchSessionFactory", "cluster.name": "logging", "node.name": "elasticsearch", "message": "Realm [ldap1] is in user-search mode - base_dn=[ou=Users,o=54899d1f318ab54f7100d8f0,dc=jumpcloud,dc=com], search filter=[(uid={0})]" }
{"type": "server", "timestamp": "2019-12-13T15:06:14,279Z", "level": "INFO", "component": "o.e.x.s.a.s.FileRolesStore", "cluster.name": "logging", "node.name": "elasticsearch", "message": "parsed [0] roles from file [/usr/share/elasticsearch/config/roles.yml]" }
{"type": "server", "timestamp": "2019-12-13T15:06:14,797Z", "level": "INFO", "component": "o.e.x.m.p.l.CppLogMessageHandler", "cluster.name": "logging", "node.name": "elasticsearch", "message": "[controller/263] [Main.cc@110] controller (64 bit): Version 7.5.0 (Build 17d1c724ca38a1) Copyright (c) 2019 Elasticsearch BV" }
{"type": "server", "timestamp": "2019-12-13T15:06:15,344Z", "level": "DEBUG", "component": "o.e.a.ActionModule", "cluster.name": "logging", "node.name": "elasticsearch", "message": "Using REST wrapper from plugin org.elasticsearch.xpack.security.Security" }
{"type": "server", "timestamp": "2019-12-13T15:06:15,499Z", "level": "INFO", "component": "o.e.d.DiscoveryModule", "cluster.name": "logging", "node.name": "elasticsearch", "message": "using discovery type [zen] and seed hosts providers [settings]" }
{"type": "server", "timestamp": "2019-12-13T15:06:16,471Z", "level": "INFO", "component": "o.e.n.Node", "cluster.name": "logging", "node.name": "elasticsearch", "message": "initialized" }
{"type": "server", "timestamp": "2019-12-13T15:06:16,471Z", "level": "INFO", "component": "o.e.n.Node", "cluster.name": "logging", "node.name": "elasticsearch", "message": "starting ..." }
{"type": "server", "timestamp": "2019-12-13T15:06:16,603Z", "level": "INFO", "component": "o.e.t.TransportService", "cluster.name": "logging", "node.name": "elasticsearch", "message": "publish_address {192.168.80.2:9300}, bound_addresses {0.0.0.0:9300}" }
{"type": "server", "timestamp": "2019-12-13T15:06:17,443Z", "level": "INFO", "component": "o.e.b.BootstrapChecks", "cluster.name": "logging", "node.name": "elasticsearch", "message": "bound or publishing to a non-loopback address, enforcing bootstrap checks" }
{"type": "server", "timestamp": "2019-12-13T15:06:17,448Z", "level": "INFO", "component": "o.e.c.c.Coordinator", "cluster.name": "logging", "node.name": "elasticsearch", "message": "cluster UUID [AQkjVSEAR9i2ItC6QVghow]" }
{"type": "server", "timestamp": "2019-12-13T15:06:17,642Z", "level": "INFO", "component": "o.e.c.s.MasterService", "cluster.name": "logging", "node.name": "elasticsearch", "message": "elected-as-master ([1] nodes joined)[{elasticsearch}{05w2lDl6RMyhX60gts6pFw}{_0AwjYJSTOeM6eCsjS2GQw}{192.168.80.2}{192.168.80.2:9300}{dilm}{ml.machine_memory=59087360000, xpack.installed=true, ml.max_open_jobs=20} elect leader, _BECOME_MASTER_TASK_, _FINISH_ELECTION_], term: 5, version: 2700, delta: master node changed {previous [], current [{elasticsearch}{05w2lDl6RMyhX60gts6pFw}{_0AwjYJSTOeM6eCsjS2GQw}{192.168.80.2}{192.168.80.2:9300}{dilm}{ml.machine_memory=59087360000, xpack.installed=true, ml.max_open_jobs=20}]}" }
{"type": "server", "timestamp": "2019-12-13T15:06:17,696Z", "level": "WARN", "component": "o.e.t.TcpTransport", "cluster.name": "logging", "node.name": "elasticsearch", "message": "exception caught on transport layer [Netty4TcpChannel{localAddress=/127.0.0.1:9300, remoteAddress=/127.0.0.1:55634}], closing connection",
"stacktrace": ["io.netty.handler.codec.DecoderException: javax.net.ssl.SSLHandshakeException: No available authentication scheme",
"at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:473) ~[netty-codec-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:281) ~[netty-codec-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:374) [netty-transport-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:360) [netty-transport-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:352) [netty-transport-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1422) [netty-transport-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:374) [netty-transport-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:360) [netty-transport-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:931) [netty-transport-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163) [netty-transport-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:700) [netty-transport-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.channel.nio.NioEventLoop.processSelectedKeysPlain(NioEventLoop.java:600) [netty-transport-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:554) [netty-transport-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:514) [netty-transport-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.util.concurrent.SingleThreadEventExecutor$6.run(SingleThreadEventExecutor.java:1050) [netty-common-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) [netty-common-4.1.43.Final.jar:4.1.43.Final]",
"at java.lang.Thread.run(Thread.java:830) [?:?]",
"Caused by: javax.net.ssl.SSLHandshakeException: No available authentication scheme",
"at sun.security.ssl.Alert.createSSLException(Alert.java:131) ~[?:?]",
"at sun.security.ssl.Alert.createSSLException(Alert.java:117) ~[?:?]",
"at sun.security.ssl.TransportContext.fatal(TransportContext.java:311) ~[?:?]",
"at sun.security.ssl.TransportContext.fatal(TransportContext.java:267) ~[?:?]",
"at sun.security.ssl.TransportContext.fatal(TransportContext.java:258) ~[?:?]",
"at sun.security.ssl.CertificateMessage$T13CertificateProducer.onProduceCertificate(CertificateMessage.java:951) ~[?:?]",
"at sun.security.ssl.CertificateMessage$T13CertificateProducer.produce(CertificateMessage.java:940) ~[?:?]",
"at sun.security.ssl.SSLHandshake.produce(SSLHandshake.java:440) ~[?:?]",
"at sun.security.ssl.ClientHello$T13ClientHelloConsumer.goServerHello(ClientHello.java:1243) ~[?:?]",
"at sun.security.ssl.ClientHello$T13ClientHelloConsumer.consume(ClientHello.java:1179) ~[?:?]",
"at sun.security.ssl.ClientHello$ClientHelloConsumer.onClientHello(ClientHello.java:851) ~[?:?]",
"at sun.security.ssl.ClientHello$ClientHelloConsumer.consume(ClientHello.java:812) ~[?:?]",
"at sun.security.ssl.SSLHandshake.consume(SSLHandshake.java:396) ~[?:?]",
"at sun.security.ssl.HandshakeContext.dispatch(HandshakeContext.java:444) ~[?:?]",
"at sun.security.ssl.SSLEngineImpl$DelegatedTask$DelegatedAction.run(SSLEngineImpl.java:1260) ~[?:?]",
"at sun.security.ssl.SSLEngineImpl$DelegatedTask$DelegatedAction.run(SSLEngineImpl.java:1247) ~[?:?]",
"at java.security.AccessController.doPrivileged(AccessController.java:691) ~[?:?]",
"at sun.security.ssl.SSLEngineImpl$DelegatedTask.run(SSLEngineImpl.java:1192) ~[?:?]",
"at io.netty.handler.ssl.SslHandler.runAllDelegatedTasks(SslHandler.java:1502) ~[netty-handler-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.handler.ssl.SslHandler.runDelegatedTasks(SslHandler.java:1516) ~[netty-handler-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.handler.ssl.SslHandler.unwrap(SslHandler.java:1400) ~[netty-handler-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.handler.ssl.SslHandler.decodeJdkCompatible(SslHandler.java:1227) ~[netty-handler-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.handler.ssl.SslHandler.decode(SslHandler.java:1274) ~[netty-handler-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.handler.codec.ByteToMessageDecoder.decodeRemovalReentryProtection(ByteToMessageDecoder.java:503) ~[netty-codec-4.1.43.Final.jar:4.1.43.Final]",
"at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:442) ~[netty-codec-4.1.43.Final.jar:4.1.43.Final]",
"... 16 more"] }
{