My "Logstash.conf" gives errors

Error registering pl
ugin {:plugin=>"#<LogStash::OutputDelegator:0x3e8d2e9a @namespaced_metric=#<LogS
tash::Instrument::NamespacedMetric:0x32688caf @metric=#<LogStash::Instrument::Me
tric:0x59c15491 @collector=#<LogStash::Instrument::Collector:0x7b4256fa @agent=n
il, @metric_store=#<LogStash::Instrument::MetricStore:0x2cec4ef6 @store=#<Concur
rent::Map:0x5a97a230 @default_proc=nil>, @structured_lookup_mutex=#<Mutex:0x9a10
02f>, @fast_lookup=#<Concurrent::Map:0x60bd6267 @default_proc=nil>>>>, @namespac
e_name=[:stats, :pipelines, :main, :plugins, :outputs, :\"054d561825a4f8783748d6
1689ac18c5fefe4b96-4\"]>, @metric=#<LogStash::Instrument::NamespacedMetric:0x490
9bab7 @metric=#<LogStash::Instrument::Metric:0x59c15491 @collector=#<LogStash::I
nstrument::Collector:0x7b4256fa @agent=nil, @metric_store=#<LogStash::Instrument
::MetricStore:0x2cec4ef6 @store=#<Concurrent::Map:0x5a97a230 @default_proc=nil>,
 @structured_lookup_mutex=#<Mutex:0x9a1002f>, @fast_lookup=#<Concurrent::Map:0x6
0bd6267 @default_proc=nil>>>>, @namespace_name=[:stats, :pipelines, :main, :plug
ins, :outputs]>, @logger=#<LogStash::Logging::Logger:0x430d86f9 @logger=#<Java::
OrgApacheLoggingLog4jCore::Logger:0x5121d097>>, @strategy=#<LogStash::OutputDele
gatorStrategies::Shared:0x29083d34 @output=<LogStash::Outputs::ElasticSearch hos
ts=>[https://127.0.0.1:9200], index=>\"csv_index\", id=>\"054d561825a4f8783748d6
1689ac18c5fefe4b96-4\", enable_metric=>true, codec=><LogStash::Codecs::Plain id=
>\"plain_3f5da417-a002-4fff-a680-96ef558f619d\", enable_metric=>true, charset=>\
"UTF-8\">, workers=>1, manage_template=>true, template_name=>\"logstash\", templ
ate_overwrite=>false, idle_flush_time=>1, doc_as_upsert=>false, script_type=>\"i
nline\", script_lang=>\"painless\", script_var_name=>\"event\", scripted_upsert=
>false, retry_initial_interval=>2, retry_max_interval=>64, retry_on_conflict=>1,
 action=>\"index\", ssl_certificate_verification=>true, sniffing=>false, sniffin
g_delay=>5, timeout=>60, pool_max=>1000, pool_max_per_route=>100, healthcheck_pa
th=>\"/\", absolute_healthcheck_path=>false, sniffing_path=>\"_nodes/http\", abs
olute_sniffing_path=>false, resurrect_delay=>5, validate_after_inactivity=>10000
, ssl=>true>>, @id=\"054d561825a4f8783748d61689ac18c5fefe4b96-4\", @metric_event
s=#<LogStash::Instrument::NamespacedMetric:0x22f5ada7 @metric=#<LogStash::Instru
ment::Metric:0x59c15491 @collector=#<LogStash::Instrument::Collector:0x7b4256fa
@agent=nil, @metric_store=#<LogStash::Instrument::MetricStore:0x2cec4ef6 @store=
#<Concurrent::Map:0x5a97a230 @default_proc=nil>, @structured_lookup_mutex=#<Mute
x:0x9a1002f>, @fast_lookup=#<Concurrent::Map:0x60bd6267 @default_proc=nil>>>>, @
namespace_name=[:stats, :pipelines, :main, :plugins, :outputs, :\"054d561825a4f8
783748d61689ac18c5fefe4b96-4\", :events]>, @output_class=LogStash::Outputs::Elas
ticSearch>", :error=>"Unrecognized SSL message, plaintext connection?"}
[2017-06-16T10:13:43,038][ERROR][logstash.agent           ] Pipeline aborted due
 to error {:exception=>#<Manticore::UnknownException: Unrecognized SSL message,
plaintext connection?>, :backtrace=>["C:/Utilisateurs/A645234/Documents/logstash
-5.4.0/vendor/bundle/jruby/1.9/gems/manticore-0.6.1-java/lib/manticore/response.
rb:37:in `initialize'", "org/jruby/RubyProc.java:281:in `call'", "C:/Utilisateur
s/A645234/Documents/logstash-5.4.0/vendor/bundle/jruby/1.9/gems/manticore-0.6.1-
java/lib/manticore/response.rb:79:in `call'", "C:/Utilisateurs/A645234/Documents
/logstash-5.4.0/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-6.3.0
-java/lib/logstash/outputs/elasticsearch/http_client/manticore_adapter.rb:70:in
`perform_request'", "C:/Utilisateurs/A645234/Documents/logstash-5.4.0/vendor/bun
dle/jruby/1.9/gems/logstash-output-elasticsearch-6.3.0-java/lib/logstash/outputs
/elasticsearch/http_client/pool.rb:285:in `perform_request_to_url'", "C:/Utilisa
teurs/A645234/Documents/logstash-5.4.0/vendor/bundle/jruby/1.9/gems/logstash-out
put-elasticsearch-6.3.0-java/lib/logstash/outputs/elasticsearch/http_client/pool
.rb:253:in `healthcheck!'", "org/jruby/RubyHash.java:1342:in `each'", "C:/Utilis
ateurs/A645234/Documents/logstash-5.4.0/vendor/bundle/jruby/1.9/gems/logstash-ou
tput-elasticsearch-6.3.0-java/lib/logstash/outputs/elasticsearch/http_client/poo
l.rb:243:in `healthcheck!'", "C:/Utilisateurs/A645234/Documents/logstash-5.4.0/v
endor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-6.3.0-java/lib/logstas
h/outputs/elasticsearch/http_client/pool.rb:335:in `update_urls'", "C:/Utilisate
urs/A645234/Documents/logstash-5.4.0/vendor/bundle/jruby/1.9/gems/logstash-outpu
t-elasticsearch-6.3.0-java/lib/logstash/outputs/elasticsearch/http_client/pool.r
b:72:in `start'", "C:/Utilisateurs/A645234/Documents/logstash-5.4.0/vendor/bundl
e/jruby/1.9/gems/logstash-output-elasticsearch-6.3.0-java/lib/logstash/outputs/e
lasticsearch/http_client.rb:264:in `build_pool'", "C:/Utilisateurs/A645234/Docum
ents/logstash-5.4.0/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-6
.3.0-java/lib/logstash/outputs/elasticsearch/http_client.rb:58:in `initialize'",
 "C:/Utilisateurs/A645234/Documents/logstash-5.4.0/vendor/bundle/jruby/1.9/gems/
logstash-output-elasticsearch-6.3.0-java/lib/logstash/outputs/elasticsearch/http
_client_builder.rb:82:in `build'", "C:/Utilisateurs/A645234/Documents/logstash-5
.4.0/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-6.3.0-java/lib/l
ogstash/outputs/elasticsearch.rb:225:in `build_client'", "C:/Utilisateurs/A64523
4/Documents/logstash-5.4.0/vendor/bundle/jruby/1.9/gems/logstash-output-elastics
earch-6.3.0-java/lib/logstash/outputs/elasticsearch/common.rb:20:in `register'",
 "C:/Utilisateurs/A645234/Documents/logstash-5.4.0/logstash-core/lib/logstash/ou
tput_delegator_strategies/shared.rb:9:in `register'", "C:/Utilisateurs/A645234/D
ocuments/logstash-5.4.0/logstash-core/lib/logstash/output_delegator.rb:41:in `re
gister'", "C:/Utilisateurs/A645234/Documents/logstash-5.4.0/logstash-core/lib/lo
gstash/pipeline.rb:268:in `register_plugin'", "C:/Utilisateurs/A645234/Documents
/logstash-5.4.0/logstash-core/lib/logstash/pipeline.rb:279:in `register_plugins'
", "org/jruby/RubyArray.java:1613:in `each'", "C:/Utilisateurs/A645234/Documents
/logstash-5.4.0/logstash-core/lib/logstash/pipeline.rb:279:in `register_plugins'
", "C:/Utilisateurs/A645234/Documents/logstash-5.4.0/logstash-core/lib/logstash/
pipeline.rb:288:in `start_workers'", "C:/Utilisateurs/A645234/Documents/logstash
-5.4.0/logstash-core/lib/logstash/pipeline.rb:214:in `run'", "C:/Utilisateurs/A6
45234/Documents/logstash-5.4.0/logstash-core/lib/logstash/agent.rb:398:in `start
_pipeline'"]}

Anyone ?

the option host in deprecated in elastic 5.x and versions above. Can you try using hosts instead in the output section of your logstash conf file??

It was one of my errors yeah. It's all good now.

Can you share your config file, you should have added username and password for elasticsearch output, because x-pack is installed. You probably need to verify there.

Hello Ranjith_M, thanks for your attention.

I configured my file (.conf) according to that explanation.
Https://www.elastic.co/guide/en/x-pack/current/logstash.html

Here's my .conf file:

input {
#stdin { }
file {
path => '/var/log/glassfish/domain1/server.log'
user => "logstash_internal"
password => "changeme"
type => 'server'
start_position => 'beginning'
}
}

filter {
user => "logstash_internal"
password => "changeme"
if [type] == "server" and "TRACKER" in [message] {
grok{
match => { "message" => "%{ONETRACKER}"}
}
}
if [type] == "server" and "LOGIN" in [message] {
grok{
match => { "message" => "%{ONETLOGIN}"}
}
}
if [type] == "server" and "LOGOUT" in [message] {

                            grok{
                                 match => { "message" => "%{ONETLOGOUT}"}
                                 }
                            }
    if [type] == "server" and "VIEW" in [message] {

                            grok{
                                 match => { "message" => "%{ONETVIEW}"}
                                 }
                            }
    if [type] == "server" and "LOG" in [message] {

                            grok{
                                 match => { "message" => "%{ONETLOG}"}
                                 }
                            }
    if [type] == "server" and "MAP" and "num_operator:1" in [message] {

                            grok{
                                 match => { "message" => "%{ONETMAPA}"}
                                 }
                            }

   if [type] == "server" and "MAP" in [message] and "num_operator:1" not in [message] {

                            grok{
                                 match => { "message" => "%{ONETMAPAPLUS}"}
                                 }
                            }

geoip {
source => "clientip"
target => "geoip"
database => "/etc/logstash/GeoLiteCity.dat"
}
mutate {
convert => [ "[geoip][coordinates]", "float"]
}
mutate {
remove_tag => [ "_grokparsefailure" ]
}
}
output {
elasticsearch { hosts => ["localhost:XX"]
user => "logstash_internal"
password => "changeme"
index => "logstash-web01-%{+YYYY.MM.dd}" }
#stdout { codec => rubydebug }
}

Note:
Without xpacode, it works normally.

User and password configs are not part of logstash file input plugin.
Below is the link for valid configs.

Ranjith_M, thanks for the feedback.
Please, look at the 3 in this explanation, My problem occurs after installing the x-pac plugin.

Hi Alanr,

Agree with what you said, but that is for logstash level security, and user name and password shouldn't be with in file properties.

file {
path => "/tmp/access_log"
start_position => "beginning"
}

From the above snippet, what ever you keep in the braces are specific properties to file type and that doesn't have user and password as configs, instead it should be as below

input {
file {
path => "/tmp/access_log"
start_position => "beginning"
}
user: xxx
password: yyy
}

Please let me know if you still face issues.

Hello Ranjith_M

I tried configuring the file (.conf) as you instructed me, but after setting the logstash service did not work.

Searching the internet, I found this link (https://github.com/elastic/logstash/issues/6332), after following the tips of this topic (configuring only authentication for output) the service logstash returned to work, but did not generate the indices No elastic / kibana!

In short, I'm still having problems after installing x-pac.

@alanr4

I will test your scenario some time today and will let you know. Mean while to understand clearly, sharing your conf(Change your actual details) will help.
Also what version of elastic/logstash you are using?

Hi Ranjith_M

Follows the original (.conf) file

input {
#stdin { }
file {
path => '/var/log/onet/reportLog.log*'
type => 'report'
start_position => 'beginning'
}
}

filter {
if [type] == "report" and "TRACKER" in [message] {
grok{
match => { "message" => "%{ONETRACKER}"}
}
}
if [type] == "report" and "LOGIN" in [message] {
grok{
match => { "message" => "%{ONETLOGIN}"}
}
}
if [type] == "report" and "LOGOUT" in [message] {

                            grok{
                                 match => { "message" => "%{ONETLOGOUT}"}
                                 }
                            }
    if [type] == "report" and "VIEW" in [message] {

                            grok{
                                 match => { "message" => "%{ONETVIEW}"}
                                 }
                            }
    if [type] == "report" and "LOG" in [message] {

                            grok{
                                 match => { "message" => "%{ONETLOG}"}
                                 }
                            }
    if [type] == "report" and "MAP" and "num_operator:1" in [message] {

                            grok{
                                 match => { "message" => "%{ONETMAPA}"}
                                 }
                            }

   if [type] == "report" and "MAP" in [message] and "num_operator:1" not in [message] {

                            grok{
                                 match => { "message" => "%{ONETMAPAPLUS}"}
                                 }
                            }

geoip {
source => "clientip"
target => "geoip"
database => "/etc/logstash/GeoLiteCity.dat"
}
mutate {
convert => [ "[geoip][coordinates]", "float"]
}
mutate {
remove_tag => [ "_grokparsefailure" ]
}
}
output {
elasticsearch { hosts => ["200.98.233.36:9333"]
index => "logstash-lab-%{+YYYY.MM.dd}"
user => "logstash_internal"
password => "123456"
#stdout { codec => rubydebug }
}
}

In the file (logstash.yml) was added the parameters:

xpack.monitoring.enabled:
xpack.monitoring.elasticsearch.url: ["http://200.98.233.36:9333"]
xpack.monitoring.elasticsearch.username: "logstash_system"
xpack.monitoring.elasticsearch.password: "123456"

Note:

  • the default port on my elasticsearc, which is running in docker, has been changed.
  • Without the xpac plugin, it works normal
  • At the moment it is not generating any errors in the logs (logstash, elasticsearch)
  • Versions (kibana, elastic, logstash) 5.4.1

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.