Hi,
I am trying to create a vritualization for adit.log. I have the config file as follows.
input {
file {
path => [ "/var/log/audit/audit.log" ]
start_position => beginning
sincedb_path => "/dev/null"
ignore_older => 0
}
}
filter {
grok {
match => { "pattern" => "type=%{WORD:audit_type} msg=audit\(%{NUMBER:audit_epoch}:%{NUMBER:audit_counter}\): pid=%{NUMBER:audit_pid} uid=%{NUMBER:audit_uid} auid=%{NUMBER:audit_audid} ses=%{NUMBER:ses} subj=%{GREEDYDATA:subj} msg=\'op=%{WORD:operation}:%{WORD:detail_operation} grantors=%{WORD:grnt}.%{WORD:detail_grnt} acct=\"%{WORD:acct_user}\" exe=\"%{GREEDYDATA:exec}\" hostname=%{GREEDYDATA:hostname} addr=%{GREEDYDATA:ipaddr} terminal=%{WORD:terminal} res=%{WORD:result}\'" }
add_tag => "selinux_audit"
}
}
output {
elasticsearch {
hosts => ["localhost:9200"]
index => "logs"
}
}
I was seeing_grokparsefailure
tags all over Kibana dashboard.
When I am trying to create an index. I am not able to pull the fields.
When I checked the logstash logs, this is what I can see.
[2017-04-05T08:56:47,063][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>nil}
[2017-04-05T08:56:47,098][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-*", "version"=>50001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"_default_"=>{"_all"=>{"enabled"=>true, "norms"=>false}, "dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"*", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword"}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date", "include_in_all"=>false}, "@version"=>{"type"=>"keyword", "include_in_all"=>false}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}}
[2017-04-05T08:56:47,104][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>[#<URI::Generic:0x773fbda1 URL://40.71.198.172:9200>]}
[2017-04-05T08:56:47,172][INFO ][logstash.pipeline ] Starting pipeline {"id"=>"main", "pipeline.workers"=>2, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>5, "pipeline.max_inflight"=>250}
[2017-04-05T08:56:47,470][INFO ][logstash.pipeline ] Pipeline main started
[2017-04-05T08:56:47,545][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9604}
Kindly help
TIA.