Logstash stopped processing because of an error

First time installing logstash and need some help please and thanks!

[2022-11-15T18:45:54,229][INFO ][logstash.runner          ] Starting Logstash {"logstash.version"=>"8.5.0", "jruby.version"=>"jruby 9.3.8.0 (2.6.8) 2022-09-13 98d69c9461 OpenJDK 64-Bit Server VM 17.0.4+8 on 17.0.4+8 +indy +jit [x86_64-linux]"}
[2022-11-15T18:45:54,231][INFO ][logstash.runner          ] JVM bootstrap flags: [-Xms1g, -Xmx1g, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djruby.compile.invokedynamic=true, -Djruby.jit.threshold=0, -XX:+HeapDumpOnOutOfMemoryError, -Djava.security.egd=file:/dev/urandom, -Dlog4j2.isThreadContextMapInheritable=true, -Djruby.regexp.interruptible=true, -Djdk.io.File.enableADS=true, --add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED, --add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED, --add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED, --add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED, --add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED, --add-opens=java.base/java.security=ALL-UNNAMED, --add-opens=java.base/java.io=ALL-UNNAMED, --add-opens=java.base/java.nio.channels=ALL-UNNAMED, --add-opens=java.base/sun.nio.ch=ALL-UNNAMED, --add-opens=java.management/sun.management=ALL-UNNAMED]
[2022-11-15T18:45:55,890][INFO ][logstash.agent           ] Successfully started Logstash API endpoint {:port=>9600, :ssl_enabled=>false}
[2022-11-15T18:45:57,899][ERROR][logstash.agent           ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of [ \\t\\r\\n], \"#\", \"input\", \"filter\", \"output\" at line 18, column 1 (byte 776) after ", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:32:in `compile_imperative'", "org/logstash/execution/AbstractPipelineExt.java:182:in `initialize'", "org/logstash/execution/JavaBasePipelineExt.java:72:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:48:in `initialize'", "org/jruby/RubyClass.java:911:in `new'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:50:in `execute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:386:in `block in converge_state'"]}
[2022-11-15T18:45:57,979][INFO ][logstash.runner          ] Logstash shut down.
[2022-11-15T18:45:57,989][FATAL][org.logstash.Logstash    ] Logstash stopped processing because of an error: (SystemExit) exit
org.jruby.exceptions.SystemExit: (SystemExit) exit
	at org.jruby.RubyKernel.exit(org/jruby/RubyKernel.java:790) ~[jruby.jar:?]
	at org.jruby.RubyKernel.exit(org/jruby/RubyKernel.java:753) ~[jruby.jar:?]
	at usr.share.logstash.lib.bootstrap.environment.<main>(/usr/share/logstash/lib/bootstrap/environment.rb:91) ~[?:?]

input {  
  tcp {
    type => "syslog"
    port => 5140
  }
}
input {  
  udp {
    type => "syslog"
    port => 5140
  }
}
filter {
  if [type] == "syslog" {
    if [host] =~ /192\.168\.1\.1/ {
      mutate {
        add_tag => ["pfsense", "Ready"]
      }
    }
    if [host] =~ /172\.2\.22\.1/ {
      mutate {
        add_tag => ["pfsense-2", "Ready"]
      }
    }

    if "Ready" not in [tags] {
      mutate {
        add_tag => [ "syslog" ]
      }
    }
  }
}
filter {
  if [type] == "syslog" {
    mutate {
      remove_tag => "Ready"
    }
  }
}
filter {
  if "syslog" in [tags] {
    grok {
      match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
      add_field => [ "received_at", "%{@timestamp}" ]
      add_field => [ "received_from", "%{host}" ]
    }
    syslog_pri { }
    date {
      match => [ "syslog_timestamp", "MMM  d HH:mm:ss", "MMM  dd HH:mm:ss" ]
      locale => "en"
    }
    if !("_grokparsefailure" in [tags]) {
      mutate {
        replace => [ "@source_host", "%{syslog_hostname}" ]
        replace => [ "@message", "%{syslog_message}" ]
      }
    }
    mutate {
      remove_field => [ "syslog_hostname", "syslog_message", "syslog_timestamp" ]
    }
  }
}
filter {
  if "pfsense" in [tags] {
    grok {
      add_tag => [ "firewall" ]
      match => [ "message", "<(?<evtid>.*)>(?<datetime>(?:Jan(?:uary)?|Feb(?:ruary)?|Mar(?:ch)?|Apr(?:il)?|May|Jun(?:e)?|Jul(?:y)?|Aug(?:ust)?|Sep(?:tember)?|Oct(?:ober)?|Nov(?:ember)?|Dec(?:ember)?)\s+(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9]) (?:2[0123]|[01]?[0-9]):(?:[0-5][0-9]):(?:[0-5][0-9])) (?<prog>.*?): (?<msg>.*)" ]
            }
            mutate {
              gsub => ["datetime","  "," "]
            }
            date {
              match => [ "datetime", "MMM dd HH:mm:ss" ]
              timezone => "America/New_York"
            }
            mutate {
              replace => [ "message", "%{msg}" ]
            }
            mutate {
              remove_field => [ "msg", "datetime" ]
            }
            if [prog] =~ /^dhcpd$/ {
              mutate {
                add_tag => [ "dhcpd" ]
              }
             grok {
                 patterns_dir => ["/etc/logstash/conf.d/patterns"]
                 match => [ "message", "%{DHCPD}"]
             }
            }
            if [prog] =~ /^suricata/ {
              mutate {
                add_tag => [ "SuricataIDPS" ]
              }
              grok {
                  patterns_dir => ["/etc/logstash/conf.d/patterns"]
                  match => [ "message", "%{PFSENSE_SURICATA}"]
             }
                if ![geoip] and [ids_src_ip] !~ /^(10\.|192\.168\.)/ {
                  geoip {
                    add_tag => [ "GeoIP" ]
                    source => "ids_src_ip"
                    database => "/etc/logstash/GeoLite2-City.mmdb"
                  }
                }
            if [prog] =~ /^suricata/ {
                mutate {
                    add_tag => [ "ET-Sig" ]
                    add_field => [ "Signature_Info", "http://doc.emergingthreats.net/bin/view/Main/%{[ids_sig_id]}" ]
                }
              }
            }
            if [prog] =~ /^charon$/ {
              mutate {
                add_tag => [ "ipsec" ]
              }
            }
            if [prog] =~ /^barnyard2/ {
              mutate {
                add_tag => [ "barnyard2" ]
              }
            }
            if [prog] =~ /^openvpn/ {
              mutate {
                add_tag => [ "openvpn" ]
              }
            }
            if [prog] =~ /^ntpd/ {
              mutate {
                add_tag => [ "ntpd" ]
              }
            }
            if [prog] =~ /^php-fpm/ {
              mutate {
                add_tag => [ "web_portal" ]
              }
              grok {
                  patterns_dir => ["/etc/logstash/conf.d/patterns"]
                  match => [ "message", "%{PFSENSE_APP}%{PFSENSE_APP_DATA}"]
              }
              mutate {
                  lowercase => [ 'pfsense_ACTION' ]
              }
            }
            if [prog] =~ /^apinger/ {
              mutate {
                add_tag => [ "apinger" ]
              }
            }
            if [prog] =~ /^filterlog$/ {
                mutate {
                    remove_field => [ "msg", "datetime" ]
                }
                grok {
                    add_tag => [ "firewall" ]
                    patterns_dir => ["/etc/logstash/conf.d/patterns"]
                    match => [ "message", "%{PFSENSE_LOG_DATA}%{PFSENSE_IP_SPECIFIC_DATA}%{PFSENSE_IP_DATA}%{PFSENSE_PROTOCOL_DATA}",
                               "message", "%{PFSENSE_IPv4_SPECIFIC_DATA}%{PFSENSE_IP_DATA}%{PFSENSE_PROTOCOL_DATA}",
                               "message", "%{PFSENSE_IPv6_SPECIFIC_DATA}%{PFSENSE_IP_DATA}%{PFSENSE_PROTOCOL_DATA}"]
                }
                mutate {
                    lowercase => [ 'proto' ]
                }
                if ![geoip] and [src_ip] !~ /^(10\.|192\.168\.)/ {
                  geoip {
                    add_tag => [ "GeoIP" ]
                    source => "src_ip"
                    database => "/etc/logstash/GeoLite2-City.mmdb"
                  }
                }
            }
        }
}
output {
      elasticsearch {                
        hosts => ["https://192.168.1.111:9200"]
        index => "logstash-%{+YYYY.MM.dd}" }
}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.