How move configuration from logstash 5.5.1 to logstash 6.5 for tomcat logs

Hai,

I am a newbie to Elastic Stack.

We have a nice working Elastic Stack 5.5 for prod, and I`m trying to create separate instance on Elastic Stack 6.5.4 for test, and then switch prod to new version

No problem with installation )
But I`m stuck on logs filtering

Elastic Stack 5.5 configs

############################# Filebeat ################################
filebeat:
  # List of prospectors to fetch data.
  prospectors:
    -
      paths:
        - /opt/hybris/log/tomcat/access*.log
      input_type: log
      document_type: access_log
    -
      paths:
        - /opt/hybris/log/tomcat/console*.log
      input_type: log
      document_type: console_log
      multiline:
        pattern: '^*\|[[:space:]]*at|^*\|[[:space:]]Caused by:|^*\|[[:space:]]*\.\.\.[[:space:]]'
        negate: false
        match: after
        max_lines: 700
        timeout: 5s

  registry_file: /var/lib/filebeat/registry

output:
 logstash:
    hosts: ["10.127.10.24:5044"]
    worker: 1
    bulk_max_size: 4096
    compression_level: 1
    loadbalance: false
    #index: filebeat
shipper:

############################# Logging ##################################
logging:
  to_files: true
  files:
    path: /tmp/filebeat/mybeat
    name: mybeat
    rotateeverybytes: 10485760 # = 10MB
    #keepfiles: 7
  #selectors: [ ]
  #level: error




############################# Logstash################################
input {
  beats{
    port => 5044
  }
}

filter {
    if[type] =~ "access_log" {
        grok {
          match => {
            "message" => ["%{IPORHOST:clientip} %{HTTPDUSER:ident} %{USER:auth} \[%{HTTPDATE:timestamp}\] %{WORD:verb} %{NOTSPACE:request} HTTP                                                                                              /%{NUMBER:httpversion} %{NUMBER:response:int} (?:%{NUMBER:bytes:int}|-) \[%{DATA:agent}\] \[%{DATA:referrer}\] %{NUMBER:ms:int}+ms"]
            }
        }
        date {
          match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss Z" ]
          target => "@timestamp"
        }
        geoip {
          source => "clientip"
        }
        mutate{
            remove_field => [ "timestamp" ]
        }
    }
}

filter {
    if[type] =~ "console_log" {
        grok {
          match => { "message" => "%{YEAR:year}/%{MONTHNUM:month}/%{MONTHDAY:day} %{TIME:time}" }
        }
        mutate {
             add_field => { "timestamp" => "%{day}/%{month}/%{year}:%{time}" }
        }
        date {
          match => [ "timestamp", "dd/MM/yyyy:HH:mm:ss.SSS", "dd/MM/yyyy:HH:mm:ss.sss" ]
          target => "@timestamp"
        }
        mutate{
            remove_field => [ "timestamp", "day", "month", "year", "time" ]
        }
        mutate {
            gsub => [
                "message", "^(?>\d\d){1,2}/(?:0?[1-9]|1[0-2])/(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9])?\s(?!<[0-9])(?:2[0123]|[01]?[0-9]):(                                                                                              ?:[0-5][0-9])(?::(?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?))(?![0-9])?\s\|?\s", "",
                "message", "^.*(?>\d\d){1,2}/(?:0?[1-9]|1[0-2])/(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9])?\s(?!<[0-9])(?:2[0123]|[01]?[0-9])                                                                                              :(?:[0-5][0-9])(?::(?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?))(?![0-9])?\s\|?\s", ""

            ]
        }
        if [message] =~ ".*\[m$" {
                drop{}
        }

    }
}

output {
    if[type] == "access_log" {
        elasticsearch {
            hosts => [ "localhost:9200" ]
            manage_template => false
            index => "access_log-%{+YYYY.MM.dd}"
            document_type => "%{[@metadata][type]}"
        }
    }
    if[type] == "access_log_stage" {
        elasticsearch {
            hosts => [ "localhost:9200" ]
            manage_template => false
            index => "stage_access_log-%{+YYYY.MM.dd}"
            document_type => "%{[@metadata][type]}"
        }
    }
    if[type] == "console_log" {
        elasticsearch {
            hosts => [ "localhost:9200" ]
            manage_template => false
            index => "console_log-%{+YYYY.MM.dd}"
            document_type => "%{[@metadata][type]}"
        }
    }
    if[type] == "console_log_stage" {
        elasticsearch {
            hosts => [ "localhost:9200" ]
            manage_template => false
            index => "stage_console_log-%{+YYYY.MM.dd}"
            document_type => "%{[@metadata][type]}"
        }
    }
}

logs transfer to logstash without problem, tested with

output {
    stdout { codec => json }
}

So what is the problem?

Elastic Stack 6.5.4 configs

    #=========================== Filebeat inputs =============================

    filebeat.inputs:

    - type: log

      enabled: true
      
      paths:
        - /opt/app/log/tomcat/access*.log

    filebeat.config.modules:
      # Glob pattern for configuration loading
      path: ${path.config}/modules.d/*.yml

      # Set to true to enable config reloading
      reload.enabled: false

    setup.template.settings:
      index.number_of_shards: 3
      #index.codec: best_compression
      #_source.enabled: false

    #================================ Outputs =====================================

    # Configure what output to use when sending the data collected by the beat.

    #----------------------------- Logstash output --------------------------------
    output.logstash:
      # The Logstash hosts
      hosts: ["10.129.10.7:5044"]

    #================================ Procesors =====================================

    # Configure processors to enhance or manipulate events generated by the beat.

    processors:
      - add_host_metadata: ~
      - add_cloud_metadata: ~

Logstash.conf

    ############################# Logstash################################
    # Beats -> Logstash -> Elasticsearch pipeline.

    input {
      beats {
        port => 5044
      }
    }

    #output {
    #  elasticsearch {
    #    stdout { codec => json }

    #    hosts => ["http://localhost:9200"]
    #    index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}"
    #  }
    #}

    filter {
        if[type] =~ "access_log" {
            grok {
                match => {
                  "message" => ["%{IPORHOST:clientip} %{HTTPDUSER:ident} %{USER:auth} \[%{HTTPDATE:timestamp}\] %{WORD:verb} %{NOTSPACE:request} HTTP                                                                                              /%{NUMBER:httpversion} %{NUMBER:response:int} (?:%{NUMBER:bytes:int}|-) \[%{DATA:agent}\] \[%{DATA:referrer}\] %{NUMBER:ms:int}+ms"]
              }
    }
                date {
                    match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss Z" ]
                    target => "@timestamp"
                }
                geoip {
                    source => "clientip"
                }
                mutate {
                    remove_field => [ "timestamp" ]
                }
            }
    }

                filter {
                    if[type] =~ "console_log" {
                        grok {
                            match => { "message" => "%{YEAR:year}/%{MONTHNUM:month}/%{MONTHDAY:day} %{TIME:time}" }
                        }
                            mutate {
                               add_field => { "timestamp" => "%{day}/%{month}/%{year}:%{time}" }
                        }
                            date {
                                match => [ "timestamp", "dd/MM/yyyy:HH:mm:ss.SSS", "dd/MM/yyyy:HH:mm:ss.sss" ]
                                target => "@timestamp"
                        }
                           mutate {
                                remove_field => [ "timestamp", "day", "month", "year", "time" ]
                        }
                           mutate {
                                gsub => [
                                   "message", "^(?>\d\d){1,2}/(?:0?[1-9]|1[0-2])/(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9])?\s(?!<[0-9])(?:2[0123]|[01]?[0-9]):(?:[0-5][0-9])(?::(?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?))(?![0-9])?\s\|?\s", "",
                                   "message", "^.*(?>\d\d){1,2}/(?:0?[1-9]|1[0-2])/(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9])?\s(?!<[0-9])(?:2[0123]|[01]?[0-9]):(?:[0-5][0-9])(?::(?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?))(?![0-9])?\s\|?\s", ""
                                ]
                        }

                    if [message] =~ ".*\[m$" {
                               drop{}
                    }
                }
    }

    output {
        if[type] == "access_log" {
                elasticsearch {
                hosts => [ "http://localhost:9200" ]
                manage_template => false
                index => "access_log-%{+YYYY.MM.dd}"
                document_type => "%{[@metadata][type]}"
                }
        }
        if[type] == "access_log_stage" {
                elasticsearch {
                hosts => [ "http://localhost:9200" ]
                manage_template => false
                index => "stage_access_log-%{+YYYY.MM.dd}"
                document_type => "%{[@metadata][type]}"
                }
        }
        if[type] == "console_log" {
                elasticsearch {
                hosts => [ "http://localhost:9200" ]
                manage_template => false
                index => "console_log-%{+YYYY.MM.dd}"
                document_type => "%{[@metadata][type]}"
                }
        }
        if[type] == "console_log_stage" {
                elasticsearch {
                hosts => [ "http://localhost:9200" ]
                manage_template => false
                index => "stage_console_log-%{+YYYY.MM.dd}"
                document_type => "%{[@metadata][type]}"
        }
                                                                                                                                                                                                }
                                                                                                                                                                                                                                                                                                    }

Now problem is

2019-01-17T16:36:14.654+0300    ERROR   logstash/async.go:256   Failed to publish events caused by: write tcp 10.129.10.8:58559->10.129.10.7:5044: write: connection reset by peer
2019-01-17T16:36:15.655+0300    ERROR   pipeline/output.go:121  Failed to publish events: write tcp 10.129.10.8:58559->10.129.10.7:5044: write: connection reset by peer

telnet to this port work fine
test fo logstash config is fine

/usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/logstash_input.conf --config.test_and_exit
Config Validation Result: OK. Exiting Logstash

on Logstash server found messages in LS log

[2019-01-18T10:44:51,528][DEBUG][io.netty.util.internal.NativeLibraryLoader] netty_tcnative_linux_x86_64 cannot be loaded from java.libary.path, now trying export to -Dio.netty.native.workdir: /tmp
[2019-01-18T10:44:51,555][DEBUG][io.netty.util.internal.NativeLibraryLoader] Successfully loaded the library /tmp/libnetty_tcnative_linux_x86_644437496018928109928.so

This is correct output section ?

output {
if[type] == "access_log" {
elasticsearch {
hosts => [ "http://localhost:9200" ]
manage_template => false
index => "access_log-%{+YYYY.MM.dd}"
document_type => "%{[@metadata][type]}"
}

in LS log i see messages

[DEBUG][logstash.pipeline ] output received {"event"=>{"host"=>{"containerized"=>true, "name"=>"pre-stage1.shop.shop.ru", "architecture"=>"x86_64", "os"=>{"platform"=>"centos", "codename"=>"Final", "version"=>"6.5 (Final)", "family"=>"redhat"}}, "@version"=>"1", "source"=>"/opt/app/log/tomcat/access.2019-01-18.log", "prospector"=>{"type"=>"log"}, "@timestamp"=>2019-01-18T08:15:08.873Z, "beat"=>{"hostname"=>"pre-stage1.shop.shop.ru", "name"=>"pre-stage1.shop.rivegauche.ru", "version"=>"6.5.4"}, "input"=>{"type"=>"log"}, "message"=>"10.129.10.9 - - [18/Jan/2019:11:14:59 +0300] GET /solrfacetsearch/master_rive_Product/replication?command=indexversion&wt=javabin&qt=%2Freplication&version=2 HTTP/1.1 200 80 [Solr[org.apache.solr.client.solrj.impl.HttpSolrServer] 1.0] [-] 1ms", "tags"=>["beats_input_codec_plain_applied"], "offset"=>598210}}

but no data in ES

and on filebeats server log

2019-01-18T11:24:16.925+0300    ERROR   logstash/async.go:256   Failed to publish events caused by: write tcp 10.129.10.8:34037->10.129.10.7:5044: write: connection reset by peer
2019-01-18T11:24:16.925+0300    DEBUG   [logstash]      logstash/async.go:116   close connection
2019-01-18T11:24:17.926+0300    ERROR   pipeline/output.go:121  Failed to publish events: write tcp 10.129.10.8:34037->10.129.10.7:5044: write: connection reset by peer

I assume that the mistake was in my logstash.conf in filter section, was fixed

and now i have error in LS log

[2019-01-18T12:02:45,709][ERROR][logstash.agent ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of #, input, filter, output at line 13, column 5 (byte 196) after ", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:41:incompile_imperative'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:49:in compile_graph'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:11:inblock in compile_sources'", "org/jruby/RubyArray.java:2486:in map'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:10:incompile_sources'", "org/logstash/execution/AbstractPipelineExt.java:149:in initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:22:ininitialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:90:in initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:42:inblock in execute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:92:in block in exclusive'", "org/jruby/ext/thread/Mutex.java:148:insynchronize'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:92:in exclusive'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:38:inexecute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:317:in block in converge_state'"]}

Please post the logstash.conf.

error is gone

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.