Trying to figure out an issue with file input
It just doesn't seem to be reading the file. It is a suricata eve.json file
- permissions look fine (world readable)
- verbose output says it registers the file
- no output to elasticsearch
- no output to standard out either
:timestamp=>"2016-08-15T17:15:16.313000-0700", :message=>"starting agent", :level=>:info}
{:timestamp=>"2016-08-15T17:15:16.318000-0700", :message=>"starting pipeline", :id=>"main", :level=>:info}
{:timestamp=>"2016-08-15T17:15:16.663000-0700", :message=>"Registering file input", :path=>["/var/log/suricata/eve.json"], :level=>:info}
{:timestamp=>"2016-08-15T17:15:16.776000-0700", :message=>"** WARNING ** Detected UNSAFE options in elasticsearch output configuration!\n** WARNING ** You have enabled encryption but DISABLED certificate verification.\n** WARNING ** To make sure your data is secure change :ssl_certificate_verification to true", :level=>:warn}
{:timestamp=>"2016-08-15T17:15:17.239000-0700", :message=>"Using mapping template from", :path=>nil, :level=>:info}
{:timestamp=>"2016-08-15T17:15:17.453000-0700", :message=>"Attempting to install template", :manage_template=>{"template"=>"logstash-", "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"default"=>{"_all"=>{"enabled"=>true, "omit_norms"=>true}, "dynamic_templates"=>[{"message_field"=>{"match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"string", "index"=>"analyzed", "omit_norms"=>true, "fielddata"=>{"format"=>"disabled"}}}}, {"string_fields"=>{"match"=>"", "match_mapping_type"=>"string", "mapping"=>{"type"=>"string", "index"=>"analyzed", "omit_norms"=>true, "fielddata"=>{"format"=>"disabled"}, "fields"=>{"raw"=>{"type"=>"string", "index"=>"not_analyzed", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"string", "index"=>"not_analyzed"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"float"}, "longitude"=>{"type"=>"float"}}}}}}}, :level=>:info}
{:timestamp=>"2016-08-15T17:15:17.832000-0700", :message=>"New Elasticsearch output", :class=>"LogStash::Outputs::ElasticSearch", :hosts=>["10.136.255.160:9200"], :level=>:info}
{:timestamp=>"2016-08-15T17:15:17.943000-0700", :message=>"Starting pipeline", :id=>"main", :pipeline_workers=>2, :batch_size=>125, :batch_delay=>5, :max_inflight=>250, :level=>:info}
{:timestamp=>"2016-08-15T17:15:17.983000-0700", :message=>"Pipeline main started"}
@elk-receiver ~]# ls -l /var/log/suricata/eve.json
-rw-r--r--. 1 suricata suricata 498738317 Aug 15 17:18 /var/log/suricata/eve.json
cat /etc/logstash/conf.d/10-ids-suricata-input.conf
input {
file {
path => ["/var/log/suricata/eve.json"]
sincedb_path => ["/var/lib/logstash/suricata.sincedb"]
start_position => "beginning"
codec => "json"
type => "Suricata"
tags => ["Suricata","IDS"]
}
}
]# cat /etc/logstash/conf.d/50-ids-suricata-output.conf
output {
if [type] == "Suricata" {
elasticsearch {
hosts => ["10.10.10.12:9200"]
ssl => true
ssl_certificate_verification => false
keystore => "/var/certs/elkstack.jks"
keystore_password => password
user => "user"
password => password
index => "unv_suricata-%{+YYYY.MM.dd}"
}
stdout {
codec => rubydebug
}
}
}
I have other TCP/UDP inputs and the elasticsearch output working but have it striped down to this at the moment trying to find the issue.
any ideas would be appreciated