As title, when I use logstash to parse netflow and ingest flow data into elasticsearch, I found the data in ES is lack of "OUT_BYTES" field.
this is the data in ES:
{
"_index": "netflow-%{year}-%{month}-%{day}",
"_type": "doc",
"_id": "QnjmS2EBwY8ORgd4n7yl",
"_version": 1,
"_score": null,
"_source": {
"netflow": {
"in_pkts": 13,
"version": 5,
"src_tos": 0,
"engine_type": 1,
"ipv4_next_hop": "120.127.163.193",
"dst_as": 0,
"dst_mask": 0,
"sampling_algorithm": 0,
"src_mask": 0,
"flow_records": 24,
"output_snmp": 0,
"last_switched": "2018-01-31T10:58:40.971Z",
"first_switched": "2018-01-31T10:58:40.971Z",
"ipv4_dst_addr": "120.127.163.193",
"tcp_flags": 0,
"ipv4_src_addr": "216.58.200.234",
"protocol": 6,
"src_as": 0,
"input_snmp": 191,
"l4_dst_port": 10029,
"sampling_interval": 0,
"l4_src_port": 443,
"in_bytes": 5548,
"flow_seq_num": 1259678413,
"engine_id": 2
},
"@timestamp": "2018-01-31T10:58:44.972Z",
"host": "120.127.163.4",
"@version": "1"
},
"fields": {
"netflow.first_switched": [
"2018-01-31T10:58:40.971Z"
],
"@timestamp": [
"2018-01-31T10:58:44.972Z"
],
"netflow.last_switched": [
"2018-01-31T10:58:40.971Z"
]
},
"sort": [
1517396324972
]
}
logstash config:
input{
udp{
host => "120.127.XXX.XX"
port => 5556
codec => netflow
}
}
filter{
}
output{
elasticsearch {
hosts => ["120.127.XXX.XX:9200"]
index => "netflow-%{year}-%{month}-%{day}"
}
stdout{codec=> rubydebug}
}
thank you in advance