Unable to create index on elasticsearch

I'm trying to ingest the data into Elasticsearch using logstash http_poller but the index is not getting created in Elasticsearch. Can someone help me on this

input {

    http_poller {
        urls => {
            "test_tsr1_html" => "url"
            "test_tsr2_html" => "url"
            "test_tsr3_html" => "url"

            "test_msr1_html" => "url"
            "test_msr2_html" => "url"
            "test_msr3_html" => "url"

            "test_cbr1_html" => "url"
            "test_cbr2_html" => "url"
            "test_cbr3_html" => "url"

            "test_bmp_tsr1_html" => "url"
            "test_bmp_tsr2_html" => "url"
            "test_bmp_tsr3_html" => "url"

            "test_per1_html" => "url"
            "test_per2_html" => "url"
            "test_per3_html" => "url"

            "test_socc_per1_html" => "url"
            "test_socc_per2_html" => "url"
            "test_socc_per3_html" => "url"



            "test_tsr1_ppt" => "url"
            "test_tsr2_ppt" => "url"
            "test_tsr3_ppt" => "url"

            "test_msr1_ppt" => "url"
            "test_msr2_ppt" => "url"
            "test_msr3_ppt" => "url"

            "test_cbr1_ppt" => "url"
            "test_cbr2_ppt" => "url"
            "test_cbr3_ppt" => "url"

            "test_bmp_tsr1_ppt" => "url"
            "test_bmp_tsr2_ppt" => "url"
            "test_bmp_tsr3_ppt" => "url"

            "test_per1_ppt" => "url"
            "test_per2_ppt" => "url"
            "test_per3_ppt" => "url"

            "test_socc_per1_ppt" => "url"
            "test_socc_per2_ppt" => "url"
            "test_socc_per3_ppt" => "url"

            "test2"=> "url"
            "TEST3"=> "url"

            "test-celery"=> "url"

            "TEST4"=>"url"
            "TEST5"=>"url"

        }
        request_timeout => 120
        client_cert => "/app/cert.cer"
	client_key => "/app/key.key"
        cacert => "/app/ca_list.pem"
        schedule => { every => "5m" }
        codec => "line"
    }
}

filter {
   if ([message] =~ "logs: No matching logs found") {
       drop { }
   }
   grok {
        match => {
           "message" => "%{IPV4:hostmachine}: %{TIMESTAMP_ISO8601:timestamp}: %{GREEDYDATA:dup_timestamp} %{LOGLEVEL:loglevel} %{GREEDYDATA:package_method} \[trace_id=%{DATA:trace_id} span_id=%{DATA:span_id} resource.service.name=%{DATA:resource.service.name}] - %{GREEDYDATA:message}$"
                }
        add_field => [ "received_at", "%{@timestamp}" ]
        overwrite => [ "message" ]
        }
    date {
        match => [ "timestamp" , "ISO8601" ]
        target => "@timestamp"
        locale => "en"
        timezone => "UTC"
    }
}

output {
    elasticsearch {
        hosts => ["xxx.xx.xx.xx:9200"]
        index => "prod-logs-%{+YYYY.MM.dd}"
    }
    stdout {
        codec => rubydebug
    }
}

Also logstash config check is not giving any error

root@ubuntu:~# /usr/share/logstash/bin/logstash "--path.settings" "/etc/logstash" -t  -f /etc/logstash/conf.d/prod_logs.conf
Using bundled JDK: /usr/share/logstash/jdk
OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
WARNING: An illegal reflective access operation has occurred
WARNING: Illegal reflective access by org.jruby.ext.openssl.SecurityHelper (file:/tmp/jruby-2417/jruby2031457912748745249jopenssl.jar) to field java.security.MessageDigest.provider
WARNING: Please consider reporting this to the maintainers of org.jruby.ext.openssl.SecurityHelper
WARNING: Use --illegal-access=warn to enable warnings of further illegal reflective access operations
WARNING: All illegal access operations will be denied in a future release
Sending Logstash logs to /app/logs/logstash which is now configured via log4j2.properties
[2021-12-01T11:22:23,802][INFO ][logstash.runner          ] Starting Logstash {"logstash.version"=>"7.10.2", "jruby.version"=>"jruby 9.2.13.0 (2.5.7) 2020-08-03 9a89c94bcc OpenJDK 64-Bit Server VM 11.0.8+10 on 11.0.8+10 +indy +jit [linux-x86_64]"}
[2021-12-01T11:22:26,015][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
[2021-12-01T11:22:30,802][INFO ][org.reflections.Reflections] Reflections took 64 ms to scan 1 urls, producing 23 keys and 47 values
Configuration OK
[2021-12-01T11:22:32,824][INFO ][logstash.runner          ] Using config.test_and_exit mode. Config Validation Result: OK. Exiting Logstash
root@ubuntu:~#

I don't see any error is logstash/Elasticsearch logs

I enabled debug log for logstash. This is what i found

root@ubuntu:/app/logs/logstash# cat logstash-plain.log | grep "prod-logs" | grep -i "error"
[2021-12-01T09:43:10,302][DEBUG][logstash.filters.grok    ][prod-logs] Adding pattern {"BACULA_LOG_NOOPEN"=>"\\s+Cannot open %{DATA}: ERR=%{GREEDYDATA:berror}"}
[2021-12-01T09:43:10,317][DEBUG][logstash.filters.grok    ][prod-logs] Adding pattern {"BACULA_LOG_NOOPENDIR"=>"\\s+Could not open directory %{DATA}: ERR=%{GREEDYDATA:berror}"}
[2021-12-01T09:43:10,343][DEBUG][logstash.filters.grok    ][prod-logs] Adding pattern {"BACULA_LOG_NOSTAT"=>"\\s+Could not stat %{DATA}: ERR=%{GREEDYDATA:berror}"}
[2021-12-01T09:43:10,599][DEBUG][logstash.filters.grok    ][prod-logs] Adding pattern {"BACULA_LOG_MAXSTART"=>"Fatal error: Job canceled because max start delay time exceeded."}
[2021-12-01T09:43:10,604][DEBUG][logstash.filters.grok    ][prod-logs] Adding pattern {"BACULA_LOG_DUPLICATE"=>"Fatal error: JobId %{INT:duplicate} already running. Duplicate job not allowed."}
[2021-12-01T09:43:10,608][DEBUG][logstash.filters.grok    ][prod-logs] Adding pattern {"BACULA_LOG_NOJOBSTAT"=>"Fatal error: No Job status returned from FD."}
[2021-12-01T09:43:10,646][DEBUG][logstash.filters.grok    ][prod-logs] Adding pattern {"BACULA_LOG_FATAL_CONN"=>"Fatal error: bsock.c:133 Unable to connect to (Client: %{BACULA_HOST:client}|Storage daemon) on %{HOSTNAME}:%{POSINT}. ERR=(?<berror>%{GREEDYDATA})"}
[2021-12-01T09:43:10,662][DEBUG][logstash.filters.grok    ][prod-logs] Adding pattern {"BACULA_LOG_NO_CONNECT"=>"Warning: bsock.c:127 Could not connect to (Client: %{BACULA_HOST:client}|Storage daemon) on %{HOSTNAME}:%{POSINT}. ERR=(?<berror>%{GREEDYDATA})"}
[2021-12-01T09:43:10,676][DEBUG][logstash.filters.grok    ][prod-logs] Adding pattern {"BACULA_LOG_NO_AUTH"=>"Fatal error: Unable to authenticate with File daemon at %{HOSTNAME}. Possible causes:"}
[2021-12-01T09:43:10,768][DEBUG][logstash.filters.grok    ][prod-logs] Adding pattern {"BACULA_LOG_JOB"=>"(Error: )?Bacula %{BACULA_HOST} %{BACULA_VERSION} \\(%{BACULA_VERSION}\\):"}
[2021-12-01T09:43:10,846][DEBUG][logstash.filters.grok    ][prod-logs] Adding pattern {"HTTPDERROR_DATE"=>"%{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR}"}
[2021-12-01T09:43:10,888][DEBUG][logstash.filters.grok    ][prod-logs] Adding pattern {"HTTPD20_ERRORLOG"=>"\\[%{HTTPDERROR_DATE:timestamp}\\] \\[%{LOGLEVEL:loglevel}\\] (?:\\[client %{IPORHOST:clientip}\\] ){0,1}%{GREEDYDATA:message}"}
[2021-12-01T09:43:10,929][DEBUG][logstash.filters.grok    ][prod-logs] Adding pattern {"HTTPD24_ERRORLOG"=>"\\[%{HTTPDERROR_DATE:timestamp}\\] \\[%{WORD:module}:%{LOGLEVEL:loglevel}\\] \\[pid %{POSINT:pid}(:tid %{NUMBER:tid})?\\]( \\(%{POSINT:proxy_errorcode}\\)%{DATA:proxy_message}:)?( \\[client %{IPORHOST:clientip}:%{POSINT:clientport}\\])?( %{DATA:errorcode}:)? %{GREEDYDATA:message}"}
[2021-12-01T09:43:10,956][DEBUG][logstash.filters.grok    ][prod-logs] Adding pattern {"HTTPD_ERRORLOG"=>"%{HTTPD20_ERRORLOG}|%{HTTPD24_ERRORLOG}"}
[2021-12-01T09:43:12,974][DEBUG][logstash.filters.grok    ][prod-logs] Adding pattern {"CISCOFW313005"=>"%{CISCO_REASON:reason} for %{WORD:protocol} error message: %{WORD:err_protocol} src %{DATA:err_src_interface}:%{IP:err_src_ip}(\\(%{DATA:err_src_fwuser}\\))? dst %{DATA:err_dst_interface}:%{IP:err_dst_ip}(\\(%{DATA:err_dst_fwuser}\\))? \\(type %{INT:err_icmp_type}, code %{INT:err_icmp_code}\\) on %{DATA:interface} interface\\.  Original IP payload: %{WORD:protocol} src %{IP:orig_src_ip}/%{INT:orig_src_port}(\\(%{DATA:orig_src_fwuser}\\))? dst %{IP:orig_dst_ip}/%{INT:orig_dst_port}(\\(%{DATA:orig_dst_fwuser}\\))?"}
[2021-12-01T09:43:13,066][DEBUG][logstash.filters.grok    ][prod-logs] Adding pattern {"S3_ACCESS_LOG"=>"%{WORD:owner} %{NOTSPACE:bucket} \\[%{HTTPDATE:timestamp}\\] %{IP:clientip} %{NOTSPACE:requester} %{NOTSPACE:request_id} %{NOTSPACE:operation} %{NOTSPACE:key} (?:\"%{S3_REQUEST_LINE}\"|-) (?:%{INT:response:int}|-) (?:-|%{NOTSPACE:error_code}) (?:%{INT:bytes:int}|-) (?:%{INT:object_size:int}|-) (?:%{INT:request_time_ms:int}|-) (?:%{INT:turnaround_time_ms:int}|-) (?:%{QS:referrer}|-) (?:\"?%{QS:agent}\"?|-) (?:-|%{NOTSPACE:version_id})"}
[2021-12-01T09:43:13,092][DEBUG][logstash.filters.grok    ][prod-logs] Adding pattern {"RUBY_LOGLEVEL"=>"(?:DEBUG|FATAL|ERROR|WARN|INFO)"}
root@ubuntu:/app/logs/logstash#

Those aren't error messages. Those are just default grok patterns that are being loaded.

My first recommendation would be to limit your use of GREEDYDATA as much as possible. It can be quite expensive and may not yield the results you want. Be as specific as possible with your grok patterns.

Secondly, is your stdout outputting anything after 5 minutes?
Are there any errors in Elasticsearch? Try tailing the Elasticsearch log while you run this pipeline and see if any messages are popping up when the http_poller starts to work.

Get rid of the drop {} and the Elasticsearch output, and verify that the stdout output processes all of the events that you expect. Once the data looks right add the Elasticsearch output back in.

I found this in logstash debug logs

[2021-12-09T05:44:10,219][DEBUG][logstash.inputs.http_poller][prod-logs][e40553a2729c64542a4e97ac34b00633f8d472d5f502974b92836f4202826e1e] Fetching URL {:name=>"test-cbr1_html", :url=>[:get, "URL"]}
{ 1960443942 rufus-scheduler intercepted an error:
  1960443942   job:
  1960443942     Rufus::Scheduler::EveryJob "5m" {:first_in=>0.01}
  1960443942   error:
  1960443942     1960443942
  1960443942     Java::JavaSecuritySpec::InvalidKeySpecException
  1960443942     java.security.InvalidKeyException: IOException : algid parse error, not a sequence
  1960443942       sun.security.rsa.RSAKeyFactory.engineGeneratePrivate(sun/security/rsa/RSAKeyFactory.java:251)
  1960443942       java.security.KeyFactory.generatePrivate(java/security/KeyFactory.java:390)
  1960443942       jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  1960443942       jdk.internal.reflect.NativeMethodAccessorImpl.invoke(jdk/internal/reflect/NativeMethodAccessorImpl.java:62)
  1960443942       jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(jdk/internal/reflect/DelegatingMethodAccessorImpl.java:43)
  1960443942       java.lang.reflect.Method.invoke(java/lang/reflect/Method.java:566)
  1960443942       org.jruby.javasupport.JavaMethod.invokeDirectWithExceptionHandling(org/jruby/javasupport/JavaMethod.java:426)
  1960443942       org.jruby.javasupport.JavaMethod.invokeDirect(org/jruby/javasupport/JavaMethod.java:293)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.manticore_minus_0_dot_7_dot_0_minus_java.lib.manticore.client.setup_key_store(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/manticore-0.7.0-java/lib/manticore/client.rb:682)
  1960443942       org.jruby.RubyArray.each(org/jruby/RubyArray.java:1809)
  1960443942       org.jruby.RubyArray$INVOKER$i$0$0$each.call(org/jruby/RubyArray$INVOKER$i$0$0$each.gen)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.manticore_minus_0_dot_7_dot_0_minus_java.lib.manticore.client.setup_key_store(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/manticore-0.7.0-java/lib/manticore/client.rb:677)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.manticore_minus_0_dot_7_dot_0_minus_java.lib.manticore.client.RUBY$method$setup_key_store$0$__VARARGS__(usr/share/logstash/vendor/bundle/jruby/$2_dot_5_dot_0/gems/manticore_minus_0_dot_7_dot_0_minus_java/lib/manticore//usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/manticore-0.7.0-java/lib/manticore/client.rb)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.manticore_minus_0_dot_7_dot_0_minus_java.lib.manticore.client.ssl_socket_factory_from_options(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/manticore-0.7.0-java/lib/manticore/client.rb:623)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.manticore_minus_0_dot_7_dot_0_minus_java.lib.manticore.client.RUBY$method$ssl_socket_factory_from_options$0$__VARARGS__(usr/share/logstash/vendor/bundle/jruby/$2_dot_5_dot_0/gems/manticore_minus_0_dot_7_dot_0_minus_java/lib/manticore//usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/manticore-0.7.0-java/lib/manticore/client.rb)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.manticore_minus_0_dot_7_dot_0_minus_java.lib.manticore.client.pool_builder(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/manticore-0.7.0-java/lib/manticore/client.rb:398)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.manticore_minus_0_dot_7_dot_0_minus_java.lib.manticore.client.RUBY$method$pool_builder$0$__VARARGS__(usr/share/logstash/vendor/bundle/jruby/$2_dot_5_dot_0/gems/manticore_minus_0_dot_7_dot_0_minus_java/lib/manticore//usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/manticore-0.7.0-java/lib/manticore/client.rb)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.manticore_minus_0_dot_7_dot_0_minus_java.lib.manticore.client.pool(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/manticore-0.7.0-java/lib/manticore/client.rb:406)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.manticore_minus_0_dot_7_dot_0_minus_java.lib.manticore.client.initialize(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/manticore-0.7.0-java/lib/manticore/client.rb:209)
  1960443942       org.jruby.RubyClass.newInstance(org/jruby/RubyClass.java:939)
  1960443942       org.jruby.RubyClass$INVOKER$i$newInstance.call(org/jruby/RubyClass$INVOKER$i$newInstance.gen)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_mixin_minus_http_client_minus_7_dot_0_dot_0.lib.logstash.plugin_mixins.http_client.make_client(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-mixin-http_client-7.0.0/lib/logstash/plugin_mixins/http_client.rb:178)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_mixin_minus_http_client_minus_7_dot_0_dot_0.lib.logstash.plugin_mixins.http_client.RUBY$method$make_client$0$__VARARGS__(usr/share/logstash/vendor/bundle/jruby/$2_dot_5_dot_0/gems/logstash_minus_mixin_minus_http_client_minus_7_dot_0_dot_0/lib/logstash/plugin_mixins//usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-mixin-http_client-7.0.0/lib/logstash/plugin_mixins/http_client.rb)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_mixin_minus_http_client_minus_7_dot_0_dot_0.lib.logstash.plugin_mixins.http_client.client(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-mixin-http_client-7.0.0/lib/logstash/plugin_mixins/http_client.rb:183)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_mixin_minus_http_client_minus_7_dot_0_dot_0.lib.logstash.plugin_mixins.http_client.RUBY$method$client$0$__VARARGS__(usr/share/logstash/vendor/bundle/jruby/$2_dot_5_dot_0/gems/logstash_minus_mixin_minus_http_client_minus_7_dot_0_dot_0/lib/logstash/plugin_mixins//usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-mixin-http_client-7.0.0/lib/logstash/plugin_mixins/http_client.rb)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_input_minus_http_poller_minus_5_dot_0_dot_2.lib.logstash.inputs.http_poller.request_async(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-http_poller-5.0.2/lib/logstash/inputs/http_poller.rb:153)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_input_minus_http_poller_minus_5_dot_0_dot_2.lib.logstash.inputs.http_poller.RUBY$method$request_async$0$__VARARGS__(usr/share/logstash/vendor/bundle/jruby/$2_dot_5_dot_0/gems/logstash_minus_input_minus_http_poller_minus_5_dot_0_dot_2/lib/logstash/inputs//usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-http_poller-5.0.2/lib/logstash/inputs/http_poller.rb)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_input_minus_http_poller_minus_5_dot_0_dot_2.lib.logstash.inputs.http_poller.run_once(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-http_poller-5.0.2/lib/logstash/inputs/http_poller.rb:141)
  1960443942       org.jruby.RubyHash$11.visit(org/jruby/RubyHash.java:1434)
  1960443942       org.jruby.RubyHash$11.visit(org/jruby/RubyHash.java:1431)
  1960443942       org.jruby.RubyHash.visitLimited(org/jruby/RubyHash.java:698)
  1960443942       org.jruby.RubyHash.visitAll(org/jruby/RubyHash.java:683)
  1960443942       org.jruby.RubyHash.iteratorVisitAll(org/jruby/RubyHash.java:1391)
  1960443942       org.jruby.RubyHash.each_pairCommon(org/jruby/RubyHash.java:1426)
  1960443942       org.jruby.RubyHash.each(org/jruby/RubyHash.java:1415)
  1960443942       org.jruby.RubyHash$INVOKER$i$0$0$each.call(org/jruby/RubyHash$INVOKER$i$0$0$each.gen)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_input_minus_http_poller_minus_5_dot_0_dot_2.lib.logstash.inputs.http_poller.run_once(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-http_poller-5.0.2/lib/logstash/inputs/http_poller.rb:140)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_input_minus_http_poller_minus_5_dot_0_dot_2.lib.logstash.inputs.http_poller.RUBY$method$run_once$0$__VARARGS__(usr/share/logstash/vendor/bundle/jruby/$2_dot_5_dot_0/gems/logstash_minus_input_minus_http_poller_minus_5_dot_0_dot_2/lib/logstash/inputs//usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-http_poller-5.0.2/lib/logstash/inputs/http_poller.rb)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_input_minus_http_poller_minus_5_dot_0_dot_2.lib.logstash.inputs.http_poller.setup_schedule(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-http_poller-5.0.2/lib/logstash/inputs/http_poller.rb:135)
  1960443942       org.jruby.RubyProc.call(org/jruby/RubyProc.java:275)
  1960443942       org.jruby.RubyProc$INVOKER$i$call.call(org/jruby/RubyProc$INVOKER$i$call.gen)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.rufus_minus_scheduler_minus_3_dot_0_dot_9.lib.rufus.scheduler.jobs.do_call(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/rufus-scheduler-3.0.9/lib/rufus/scheduler/jobs.rb:234)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.rufus_minus_scheduler_minus_3_dot_0_dot_9.lib.rufus.scheduler.jobs.RUBY$method$do_call$0$__VARARGS__(usr/share/logstash/vendor/bundle/jruby/$2_dot_5_dot_0/gems/rufus_minus_scheduler_minus_3_dot_0_dot_9/lib/rufus/scheduler//usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/rufus-scheduler-3.0.9/lib/rufus/scheduler/jobs.rb)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.rufus_minus_scheduler_minus_3_dot_0_dot_9.lib.rufus.scheduler.jobs.do_trigger(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/rufus-scheduler-3.0.9/lib/rufus/scheduler/jobs.rb:258)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.rufus_minus_scheduler_minus_3_dot_0_dot_9.lib.rufus.scheduler.jobs.RUBY$method$do_trigger$0$__VARARGS__(usr/share/logstash/vendor/bundle/jruby/$2_dot_5_dot_0/gems/rufus_minus_scheduler_minus_3_dot_0_dot_9/lib/rufus/scheduler//usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/rufus-scheduler-3.0.9/lib/rufus/scheduler/jobs.rb)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.rufus_minus_scheduler_minus_3_dot_0_dot_9.lib.rufus.scheduler.jobs.start_work_thread(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/rufus-scheduler-3.0.9/lib/rufus/scheduler/jobs.rb:300)
  1960443942       org.jruby.RubyProc.call(org/jruby/RubyProc.java:275)
  1960443942       org.jruby.RubyProc$INVOKER$i$call.call(org/jruby/RubyProc$INVOKER$i$call.gen)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.rufus_minus_scheduler_minus_3_dot_0_dot_9.lib.rufus.scheduler.jobs.start_work_thread(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/rufus-scheduler-3.0.9/lib/rufus/scheduler/jobs.rb:299)
  1960443942       org.jruby.RubyKernel.loop(org/jruby/RubyKernel.java:1442)
  1960443942       org.jruby.RubyKernel$INVOKER$s$0$0$loop.call(org/jruby/RubyKernel$INVOKER$s$0$0$loop.gen)
  1960443942       usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.rufus_minus_scheduler_minus_3_dot_0_dot_9.lib.rufus.scheduler.jobs.start_work_thread(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/rufus-scheduler-3.0.9/lib/rufus/scheduler/jobs.rb:289)
  1960443942       org.jruby.RubyProc.call(org/jruby/RubyProc.java:318)
  1960443942       java.lang.Thread.run(java/lang/Thread.java:834)
  1960443942   tz:
  1960443942     ENV['TZ']:
  1960443942     Time.now: 2021-12-09 05:44:10 +0000
  1960443942   scheduler:
  1960443942     object_id: 2054
  1960443942     opts:
  1960443942       {:max_work_threads=>1}
  1960443942       frequency: 0.3
  1960443942       scheduler_lock: #<Rufus::Scheduler::NullLock:0x77eacba7>
  1960443942       trigger_lock: #<Rufus::Scheduler::NullLock:0x739fa85e>
  1960443942     uptime: 301.023918 (5m1s28)
  1960443942     down?: false
  1960443942     threads: 2
  1960443942       thread: #<Thread:0x4fc8ff52>
  1960443942       thread_key: rufus_scheduler_2054
  1960443942       work_threads: 1
  1960443942         active: 1
  1960443942         vacant: 0
  1960443942         max_work_threads: 1
  1960443942       mutexes: {}
  1960443942     jobs: 1
  1960443942       at_jobs: 0
  1960443942       in_jobs: 0
  1960443942       every_jobs: 1
  1960443942       interval_jobs: 0
  1960443942       cron_jobs: 0
  1960443942     running_jobs: 1
  1960443942     work_queue: 0
} 1960443942 .
[2021-12-09T05:44:12,216][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
[2021-12-09T05:44:12,225][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}

Issue was with private key. currently key was in pkcs#1 and I changed it to pkcs#8.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.