Parsing MongoDB with Logstash

Hi,

I am running Logstash 6.8 and trying to parse MongoDB logs which is well- specified here.

Is there a more elegant/efficient way to parse MongoDB logs? Here is my filter

    filter {
        if [fields][application] == 'mongodb' {
            json {
                source => "message"
                target => "json_msg"
            }
            if "_jsonparsefailure" not in [tags] {
                grok { match => { "[json_msg][log]" => "^([^,]*),(?<s>[^,]*),(?<c>[^,]*),(?<i>[^,]*),(?<ctx>[^,]*),(?<m>[^,]*),(?<attr>[^,]*),(?<ci>[^,]*),%{GREEDYDATA:cc}"} }
                grok { match => { "s" => "^[^:]*[^\"]*\"(?<level>[^\"]*)" } }
                grok { match => { "c" => "^[^:]*[^\"]*\"(?<component>[^\"]*)" } }
                grok { match => { "ctx" => "^[^:]*[^\"]*\"(?<context>[^\"]*)" } }
                grok { match => { "i" => "^[^:]*:(?<id>[\d]*)" } }
                grok { match => { "m" => "^[^:]*[^\"]*\"(?<msg>[^\"]*)" } }
                grok { match => { "attr" => "^[^:]*[^\"]*\"(?<attribute>[^\"]*)" } }
                grok { match => { "ci" => "^[^:]*:(?<connection_id>[\d]*)" } }
                grok { match => { "cc" => "^[^:]*:(?<connection_count>[\d]*)" } }
                mutate {
                    replace  => { "[@metadata][timestamp]" => "%{[json_msg][time]}"}
                    add_field => {"stream" => "%{[json_msg][stream]}"}
                    remove_field => ["s", "c", "ctx", "i", "m", "attr", "ci", "cc"]
                }
            }
        }
    }

Thanks!

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.