Forwarding Rsyslog to Logstash

Hello,

I am forwarding my rsyslog to Logstash to which I can see traffic coming on 5001 within logstash using tcpdump command:

tcpdump -Xni ens192 port 5001 and src <linux_server_IP>

However, I am not seeing this server's syslog events in Kibana. I can see all other syslogs in Kibana. They all using the same input , filter and output logstash conf. So this tells me there is nothing wrong with the logstash conf files.

I have also allowed port 5001 in selinux, using command:

semanage port -m -t syslogd_port_t -p tcp 5001

My /etc/rsyslog.conf is:

$template jsonRfc5424Template,"{\"type\":\"syslog\",\"host\":\"%HOSTNAME%\",\"logsource\":\"rsyslog\",\"rawmsg\":\"<%PRI%>1 %TIMESTAMP:::date-rfc3339% %HOSTNAME% %APP-NAME% %PROCID% %SYSLOGFACILITY-TEXT% %MSGID% %STRUCTURED-DATA% %msg:::json%\"}\n"
*.* @@logstash_server:5001;jsonRfc5424Template

/etc/rsyslog.d/20-logstash-rsyslog.conf

*.*;syslog;auth,authpriv.none action(
  type="omfwd"
  Target="logstash_server"
  Port="5001"
  Protocol="udp"
)

My logstash input conf file is:

input {
        tcp {
                port => 5001
                codec => json
                add_field => { "service" => 'linux' }
                tags => [ 'linux']
        }
        udp {
                port => 5001
                codec => json
                add_field => { "service" => 'linux' }
                tags => ['linux']
        }
}

logstash filter conf is:

filter {
  if  "linux" in [service] {
    grok {
      match => { "rawmsg" => "(?m)%{SYSLOG5424LINE}" }
    }
    syslog_pri { }
      if !("_grokparsefailure" in [tags]) {
         mutate {
           replace => [ "message", "%{syslog5424_msg}" ]
           replace => [ "timestamp", "%{syslog5424_ts}" ]
           replace => [ "priority", "%{syslog5424_pri}" ]
           replace => [ "program", "%{syslog5424_app}" ]
           replace => [ "facility", "%{syslog_facility}" ]
           replace => [ "severity", "%{syslog_severity}" ]
           replace => [ "received_at", "%{@timestamp}" ]
         }
         mutate {
           rename => { "host" => "host_name" }
           remove_field => [ "syslog5424_host", "syslog5424_msg", "syslog5424_ts", "syslog5424_pri", "syslog5424_app", "syslog5424_proc", "syslog5424_ver", "syslog_facility", "syslog_facility_code" , "syslog_severity", "syslog_severity_code" ]
           rename => { "host" => "host_name" }
         }
      }
  }
}

logstash output conf is:

output {

  if "linux" in [service] and "_grokparsefailure" not in [tags] {
    elasticsearch{
      user => "**********"
      password => "***********"
      hosts => ["elasticserver:9200"]
      cacert => '/etc/logstash/certs/ca.crt'
      index => "logstash-syslog-%{+YYYY.MM}"
      template_name => "linux"
      template_overwrite => true
      template => "/etc/logstash/templates.d/linux.json"
      ssl => true
      ssl_certificate_verification => false
    }

    file {
      path => "/srv/log/rsyslog/%{host_name}/%{+YYYY-MM}-%{syslog5424_msgid}.log"
    }
  }
}

Any ideas?
Thanks.

Okay I think I may have just resolved the issue. Updated the tcp and udp plugins using:

/usr/share/logstash/bin/logstash-plugin update logstash-input-tcp

AND

/usr/share/logstash/bin/logstash-plugin update logstash-input-udp

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.