Filebeat > elasticsearch to filebeat > logstash > elasticsearch (syslog)

Hi there.

i'm was using syslog module from filebeat to send data to elasticseach.
In my kibana a can saw my data with all the additionnal field (ex: system.syslog.hostname, system.syslog.message system.syslog.pid, etc ...)

Now i have to check an other file with the same filebeat.
I use logstash for parse that file and redirect syslog file to filebeat index but i lost all the syslog field.
How can i keep the syslog field from syslog module of filebeat inside my logstash/elasticsearch ?

Here my pipeline :

input {
beats {
port => "5043"
}
}

filter {
if [type] == "cpu_log" {
grok {
match => {
"message" => "%{SYSLOGBASE} %{NUMBER:status:int}"
}
}
}
}

output {
if [type] == "cpu_log" {
elasticsearch {
hosts => ["my_elk:9200"]
}
} else {
elasticsearch {
hosts => ["my_elk:9200"]
index => "filebeat-%{+YYYY.MM.dd}"
}
}
}

Thanks by advance

Did you find a solution?

I think i find a beginning of solution.
i find in the doc the attribute pipeline in elastisearch output (logstash conf) . I'm trying use it for apply it to my different log. But i got error at that step.

WARN logstash.outputs.elasticsearch - Failed action. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"filebeat-2017.05.30", :_type=>"log", :_routing=>nil, :pipeline=>"filebeat-5.4.0-system-auth-pipeline"}, ******************, :response=>{"index"=>{"_index"=>"filebeat-2017.05.30", "_type"=>"log", "_id"=>nil, "status"=>400, "error"=>{"type"=>"illegal_argument_exception", "reason"=>"pipeline with id [filebeat-5.4.0-system-auth-pipeline] does not exist"}}}}

it's said id pipeline "filebeat-5.4.0-system-auth-pipeline" does exist. But in my kibana dev tool the requet : GET _ingest/pipeline/filebeat-5.4.0-system-auth-pipeline send me back the pipeline.

I'm little lost....
here my news logstash pipeline

input {
beats {
port => "5043"
}
}

filter {
if [type] == "cpu_log" {
grok {
match => {
"message" => "%{SYSLOGTIMESTAMP} %{HOSTNAME} cpu_status: %{NUMBER:stats_cpu:int} memory_status: %{NUMBER:stats_mem:int} disk_status: %{NUMBER:stats_disk:int} ssh_status: %{NUMBER:stats_ssh:int} io_status: %{NUMBER:stats_io:int}"
}
}
}
}

output {
if [type] == "cpu_log" {
elasticsearch {
hosts => ["94.23.19.32:9200"]
}
} else if [source] == "/var/log/syslog" {
elasticsearch {
hosts => ["my_elk:9200"]
index => "filebeat-%{+YYYY.MM.dd}"
pipeline => "filebeat-5.4.0-system-syslog-pipeline"
}
} else if [source] == "/var/log/auth.log" {
elasticsearch {
hosts => ["my_elk:9200"]
index => "filebeat-%{+YYYY.MM.dd}"
pipeline => "filebeat-5.4.0-system-auth-pipeline"
}
}

i finaly reproduce the elasticsearch ingest node for syslog inside a grok filter in logstash, and it's works.

if someone is interesting by it, here my new logstash pipeline:

input {
      beats {
            port => "5043"
      }
}

filter {
       if [type] == "cpu_log" {
          grok {
               match => {
                     "message" => "%{SYSLOGTIMESTAMP} %{HOSTNAME} cpu_status: %{NUMBER:stats_cpu:int} memory_status: %{NUMBER:stats_mem:int} disk_status: %{NUMBER:stats_disk:int} ssh_status: %{NUMBER:stats_ssh:int} io_status: %{NUMBER:stats_io:int}"
               }
          }
        } else if [source] == "/var/log/syslog" {
                grok {
                    match => {
                          "message" => ["%{SYSLOGTIMESTAMP:system.syslog.timestamp} %{SYSLOGHOST:system.syslog.hostname} %{DATA:system.syslog.program}(?:\[%{POSINT:system.syslog.pid}\])?: %{GREEDYMULTILINE:system.syslog.message}",
                                       "%{SYSLOGTIMESTAMP:system.syslog.timestamp} %{GREEDYMULTILINE:system.syslog.message}"]
                    }
                    pattern_definitions => { "GREEDYMULTILINE" => "(.|\n)*" }
                    remove_field => ["message"]
                }
        } else if [source] == "/var/log/auth.log" {
               grok {
                    match => {
                        "message" => ["%{SYSLOGTIMESTAMP:system.auth.timestamp} %{SYSLOGHOST:system.auth.hostname} sshd(?:\[%{POSINT:system.auth.pid}\])?: %{DATA:system.auth.ssh.event} %{DATA:system.auth.ssh.method} for (invalid user )?%{DATA:system.auth.user} from %{IPORHOST:system.auth.ssh.ip} port %{NUMBER:system.auth.ssh.port} ssh2(: %{GREEDYDATA:system.auth.ssh.signature})?",
                                       "%{SYSLOGTIMESTAMP:system.auth.timestamp} %{SYSLOGHOST:system.auth.hostname} sshd(?:\[%{POSINT:system.auth.pid}\])?: %{DATA:system.auth.ssh.event} user %{DATA:system.auth.user} from %{IPORHOST:system.auth.ssh.ip}",
                                       "%{SYSLOGTIMESTAMP:system.auth.timestamp} %{SYSLOGHOST:system.auth.hostname} sshd(?:\[%{POSINT:system.auth.pid}\])?: Did not receive identification string from %{IPORHOST:system.auth.ssh.dropped_ip}",
                                       "%{SYSLOGTIMESTAMP:system.auth.timestamp} %{SYSLOGHOST:system.auth.hostname} sudo(?:\[%{POSINT:system.auth.pid}\])?: \s*%{DATA:system.auth.user} :( %{DATA:system.auth.sudo.error} ;)? TTY=%{DATA:system.auth.sudo.tty} ; PWD=%{DATA:system.auth.sudo.pwd} ; USER=%{DATA:system.auth.sudo.user} ; COMMAND=%{GREEDYDATA:system.auth.sudo.command}",
                                       "%{SYSLOGTIMESTAMP:system.auth.timestamp} %{SYSLOGHOST:system.auth.hostname} groupadd(?:\[%{POSINT:system.auth.pid}\])?: new group: name=%{DATA:system.auth.groupadd.name}, GID=%{NUMBER:system.auth.groupadd.gid}",
                                       "%{SYSLOGTIMESTAMP:system.auth.timestamp} %{SYSLOGHOST:system.auth.hostname} useradd(?:\[%{POSINT:system.auth.pid}\])?: new user: name=%{DATA:system.auth.useradd.name}, UID=%{NUMBER:system.auth.useradd.uid}, GID=%{NUMBER:system.auth.useradd.gid}, home=%{DATA:system.auth.useradd.home}, shell=%{DATA:system.auth.useradd.shell}$",
                                       "%{SYSLOGTIMESTAMP:system.auth.timestamp} %{SYSLOGHOST:system.auth.hostname} %{DATA:system.auth.program}(?:\[%{POSINT:system.auth.pid}\])?: %{GREEDYMULTILINE:system.auth.message}"]
                    }
                    pattern_definitions => { "GREEDYMULTILINE" => "(.|\n)*" }
                    remove_field => ["message"]
               }
               geoip {
                    source => "system.auth.ssh.ip"
                    target => "system.auth.ssh.geoip"
               }
        }
}

output {
        if [type] == "cpu_log" {
               elasticsearch {
                        hosts => ["my_elk:9200"]
               }
        } else {
               elasticsearch {
                        hosts => ["my_elk:9200"]
                        index => "filebeat-%{+YYYY.MM.dd}"
                }
        }
}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.