I am using Logstash to input NMAP scan data into Elasticsearch.
input{
http{
host => "127.0.0.1"
port => 8000
codec => nmap
}
}
filter{
date{
match => [ "start_time", "ISO8601"]
target => "@timestamp"
}
}
output{
stdout{codec => rubydebug}
}
Using the above config file, @timestamp will not update to match the start_time.
{ "start_time" => "2016-02-12T19:40:33.000Z", "end_time" => "2016-02-12T21:19:44.000Z", "addresses" => [ [0] { :type => "ipv4", :addr => "192.168.30.224" } ], "address" => "192.168.30.224", "ip" => "192.168.30.224", "ipv4" => "192.168.30.224", "ipv6" => nil, "mac" => nil, "status" => { "state" => "up", "reason" => "timestamp-reply" }, "hostname" => nil, "uptime" => nil, "os" => nil, "arguments" => "/usr/bin/nmap -T5 -oX /home/NMAP/20160212_1200__INT_DEV.xml -oG /home/NMAP/20160212_1200__INT_DEV.gnmap -iL /home/NMAP/ip-list.txt", "version" => "6.47", "scan_id" => "bc9e8106-4536-412e-b44a-1de67451318d", "type" => "nmap_port", "port" => { "number" => 445, "reason" => "syn-ack", "protocol" => "tcp", "service" => { "name" => "microsoft-ds", "ssl" => false, "protocol" => nil, "product" => nil, "hostname" => nil, "device_type" => nil, "fingerprint_method" => "table", "fingerprint" => nil, "confidence" => 3 }, "state" => "open" }, "scan_host_id" => "bc9e8106-4536-412e-b44a-1de67451318d-h663", "id" => "bc9e8106-4536-412e-b44a-1de67451318d-h663-p2", "@version" => "1", "@timestamp" => "2016-02-15T22:46:50.986Z",
Any ideas? Im sure im missing something. The one example i can find of using the nmap codec is on elastic searches site but does not do the timestamp match.
Thanks