Added pipeline all fields disappear

Hi,

I had my kibana working great! I added an index to pull in data from another source and lost all visualizations. When I click on the tabs in the overview dashboard they all say "No results displayed because all values equal 0." I have tried searching on specific indexes as well as ones that were displaying previously and nothing.

My visualizations are set to query on elastiflow*.
My index names are elastiflow-asi-asa%date% and elastiflow-cvh-asa%date%

I am getting data from logstash as my indices are increasing.

Here is an example of the data pulling to show I am getting something.

{
"_index": "elastiflow-cvh-asa2018.05.08",
"_type": "doc",
"_id": "jzvQP2MBEabB04r_a4Wr",
"_version": 1,
"_score": null,
"_source": {
"@version": "1",
"netflow": {
"event_time_msec": 1525784080181,
"icmp_code": 0,
"rev_flow_delta_bytes": 204,
"xlate_dst_addr_ipv4": "8.8.8.8",
"responderPackets": 1,
"l4_src_port": 9688,
"icmp_type": 0,
"xlate_src_addr_ipv4": "96.10.27.74",
"input_snmp": 3,
"xlate_src_port": 9688,
"flow_seq_num": 93920,
"l4_dst_port": 53,
"conn_id": 51531889,
"ipv4_dst_addr": "8.8.8.8",
"xlate_dst_port": 53,
"fw_event": 2,
"initiatorPackets": 1,
"protocol": 17,
"fw_ext_event": 2020,
"version": 9,
"output_snmp": 2,
"flow_start_msec": 1525784080141,
"fwd_flow_delta_bytes": 30,
"flowset_id": 263,
"ipv4_src_addr": "192.168.1.106"
},
"tags": [
"port_9597"
],
"node": {
"ipaddr": "10.10.1.254",
"hostname": "10.10.1.254"
},
"event": {
"host": "10.10.1.254",
"type": "netflow v9"
},
"flow": {
"geoip": {
"autonomous_system": "private"
}
},
"@timestamp": "2018-05-08T12:54:41.000Z"
},
"fields": {
"netflow.flow_start_msec": [
"2018-05-08T12:54:40.141Z"
],
"netflow.event_time_msec": [
"2018-05-08T12:54:40.181Z"
],
"@timestamp": [
"2018-05-08T12:54:41.000Z"
]
},
"sort": [
1525784081000
]
}

and

{
"_index": "elastiflow-asi-asa-2018.05.08",
"_type": "doc",
"_id": "3z3VP2MBEabB04r_X2m6",
"_version": 1,
"_score": null,
"_source": {
"@version": "1",
"netflow": {
"event_time_msec": 1525784115490,
"icmp_code": 0,
"rev_flow_delta_bytes": 436,
"xlate_dst_addr_ipv4": "40.97.147.194",
"l4_src_port": 55679,
"icmp_type": 0,
"xlate_src_addr_ipv4": "64.246.214.189",
"input_snmp": 4,
"xlate_src_port": 31633,
"flow_seq_num": 7358466,
"l4_dst_port": 443,
"conn_id": 388635648,
"ipv4_dst_addr": "40.97.147.194",
"xlate_dst_port": 443,
"fw_event": 5,
"protocol": 6,
"fw_ext_event": 2031,
"version": 9,
"output_snmp": 3,
"flow_start_msec": 1525784096470,
"fwd_flow_delta_bytes": 0,
"flowset_id": 263,
"ipv4_src_addr": "10.15.41.25"
},
"tags": [
"port_9596"
],
"node": {
"ipaddr": "192.168.200.1",
"hostname": "192.168.200.1"
},
"event": {
"host": "192.168.200.1",
"type": "netflow v9"
},
"flow": {
"geoip": {
"autonomous_system": "private"
}
},
"@timestamp": "2018-05-08T12:55:15.000Z"
},
"fields": {
"netflow.flow_start_msec": [
"2018-05-08T12:54:56.470Z"
],
"netflow.event_time_msec": [
"2018-05-08T12:55:15.490Z"
],
"@timestamp": [
"2018-05-08T12:55:15.000Z"
]
},
"sort": [
1525784115000
]
}

Any ideas????

Hmm, strange, the fields look the same.

How do the fields compare between the two indexes in management? Maybe in one index they are slightly different?

Can you try re-creating the same visualization using each index specifically? Maybe one works and one doesn't and it might give you some ideas as to why they aren't being combined into a single index pattern properly.

When I try to make a visualization, I tried a graph chart. When I choose 24 hours, there is data. When I change that to 4 hours there is nothing.

Are you sure data actually exists within the last 4 hours? The discover app may help you see when your data is getting ingested and indexed.

Stacey, my indices is growing and I do have data in the discover tab.

Can you show data for the last 4 hours but include the columns flow.geoip.city_name and flow.bytes?

Could also check out the request and response on the spy panel to see if data is coming back from elastic search.

Stacey,

Where is the spy panel? I do not have that.

This little arrow opens up the spy panel:

I have found some traffic. But using netflow_fwd and netflow_rev. I did go back a couple of days and did not have a flow.bytes field even though it was working.

Stacey,

Here is the request

{
"size": 0,
"_source": {
"excludes": []
},
"aggs": {
"2": {
"terms": {
"field": "flow.client_hostname",
"size": 50,
"order": {
"1": "desc"
}
},
"aggs": {
"1": {
"sum": {
"field": "flow.bytes"
}
}
}
}
},
"version": true,
"stored_fields": [
""
],
"script_fields": {},
"docvalue_fields": [
"@timestamp",
"netflow.event_time_msec",
"netflow.flow_start_msec"
],
"query": {
"bool": {
"must": [
{
"query_string": {
"query": "
",
"analyze_wildcard": true,
"default_field": ""
}
},
{
"query_string": {
"analyze_wildcard": true,
"query": "
",
"default_field": ""
}
},
{
"range": {
"@timestamp": {
"gte": 1525768914449,
"lte": 1525812114449,
"format": "epoch_millis"
}
}
}
],
"filter": [],
"should": [],
"must_not": []
}
},
"highlight": {
"pre_tags": [
"@kibana-highlighted-field@"
],
"post_tags": [
"@/kibana-highlighted-field@"
],
"fields": {
"
": {}
},
"fragment_size": 2147483647
}
}

And the reply

{
"took": 19378,
"timed_out": false,
"_shards": {
"total": 18,
"successful": 18,
"skipped": 0,
"failed": 0
},
"hits": {
"total": 17900325,
"max_score": 0,
"hits": []
},
"aggregations": {
"2": {
"doc_count_error_upper_bound": 0,
"sum_other_doc_count": 0,
"buckets": []
}
},
"status": 200
}

Stacey,

I have also tried a pie graph with the slices being source and destination IP address.

Here is the code in my .conf file. It should be converting the fwd_flow and rev_flow

Populate normalized ElastiFlow fields with bytes transferred in the flow.

  if [netflow][in_bytes] {
    mutate {
      id => "netflow_9_in_bytes"
      add_field => { "[flow][bytes]" => "%{[netflow][in_bytes]}" }
    }
  } else if [netflow][out_bytes] {
    mutate {
      id => "netflow_9_out_bytes"
      add_field => { "[flow][bytes]" => "%{[netflow][out_bytes]}" }
    }
  } else if [netflow][in_permanent_bytes] {
    mutate {
      id => "netflow_9_in_permanent_bytes"
      add_field => { "[flow][bytes]" => "%{[netflow][in_permanent_bytes]}" }
    }
  } else if [netflow][fwd_flow_delta_bytes] or [netflow][rev_flow_delta_bytes] {
    ruby {
      id => "netflow_9_normalize_bytes_from_fwd_rev_bytes"
      code => "
        event.set( '[flow][bytes]', event.get('[netflow][fwd_flow_delta_bytes]').to_i + event.get('[netflow][rev_flow_delta_bytes]').to_i )
      "
    }
  }
  if [flow][bytes] {
    mutate {
      id => "netflow_9_convert_bytes"
      convert => { "[flow][bytes]" => "integer" }
    }
  }

Stacey,

Here is a request and response from yesterday. I can pull up that data in a graph

{
"size": 0,
"_source": {
"excludes": []
},
"aggs": {
"2": {
"terms": {
"field": "flow.client_hostname",
"size": 50,
"order": {
"1": "desc"
}
},
"aggs": {
"1": {
"sum": {
"field": "flow.bytes"
}
}
}
}
},
"version": true,
"stored_fields": [
""
],
"script_fields": {},
"docvalue_fields": [
"@timestamp",
"netflow.event_time_msec",
"netflow.flow_start_msec"
],
"query": {
"bool": {
"must": [
{
"query_string": {
"query": "
",
"analyze_wildcard": true,
"default_field": ""
}
},
{
"query_string": {
"analyze_wildcard": true,
"default_field": "
",
"query": ""
}
},
{
"range": {
"@timestamp": {
"gte": 1525665600000,
"lte": 1525812912210,
"format": "epoch_millis"
}
}
}
],
"filter": [],
"should": [],
"must_not": []
}
},
"highlight": {
"pre_tags": [
"@kibana-highlighted-field@"
],
"post_tags": [
"@/kibana-highlighted-field@"
],
"fields": {
"
": {}
},
"fragment_size": 2147483647
}
}

Response

{
"took": 15000,
"timed_out": false,
"_shards": {
"total": 18,
"successful": 18,
"skipped": 6,
"failed": 0
},
"hits": {
"total": 52419268,
"max_score": 0,
"hits": []
},
"aggregations": {
"2": {
"doc_count_error_upper_bound": -1,
"sum_other_doc_count": 3957447,
"buckets": [
{
"1": {
"value": 117663411070
},
"key": "10.15.20.44",
"doc_count": 22838
},
{
"1": {
"value": 16970322328
},
"key": "10.15.43.2",
"doc_count": 398011
},
{
"1": {
"value": 6137660341
},
"key": "71.71.102.134",
"doc_count": 649
},
{
"1": {
"value": 3922909619
},
"key": "52.114.188.27",
"doc_count": 86
},
{
"1": {
"value": 3178533278
},
"key": "10.15.43.93",
"doc_count": 25069
},
{
"1": {
"value": 2842703973
},
"key": "66.76.190.205",
"doc_count": 36824
},
{
"1": {
"value": 2262909881
},
"key": "10.15.100.97",
"doc_count": 15740
},
{
"1": {
"value": 2051331626
},
"key": "10.15.41.10",
"doc_count": 2359618
},
{
"1": {
"value": 2008112993
},
"key": "10.1.1.12",
"doc_count": 58502
},
{
"1": {
"value": 2002242041
},
"key": "24.143.223.90",
"doc_count": 82817
},
{
"1": {
"value": 1617100964
},
"key": "74.202.227.226",
"doc_count": 77556
},
{
"1": {
"value": 1523385499
},
"key": "172.17.200.55",
"doc_count": 9744
},
{
"1": {
"value": 1479248292
},
"key": "10.15.41.9",
"doc_count": 1179
},
{
"1": {
"value": 1471095687
},
"key": "10.15.10.53",
"doc_count": 16883
},
{
"1": {
"value": 1438655810
},
"key": "10.15.41.2",
"doc_count": 23904
},
{
"1": {
"value": 1376762321
},
"key": "204.111.84.66",
"doc_count": 122360
},
{
"1": {
"value": 1326359430
},
"key": "216.201.215.154",
"doc_count": 75463
},
{
"1": {
"value": 1212821578
},
"key": "10.15.20.55",
"doc_count": 2715
},
{
"1": {
"value": 1199431785
},
"key": "10.15.10.52",
"doc_count": 13582
},
{
"1": {
"value": 1170899214
},
"key": "108.169.151.106",
"doc_count": 157234
},
{
"1": {
"value": 1154793312
},
"key": "10.15.41.23",
"doc_count": 11104
},
{
"1": {
"value": 1138719373
},
"key": "10.15.41.37",
"doc_count": 3016
},
{
"1": {
"value": 1029625910
},
"key": "10.15.41.6",
"doc_count": 11431
},
{
"1": {
"value": 1010658626
},
"key": "64.246.214.189",
"doc_count": 16366
},
{
"1": {
"value": 966816900
},
"key": "10.15.10.46",
"doc_count": 16745
},
{
"1": {
"value": 853682208
},
"key": "50.58.80.182",
"doc_count": 33767
},
{
"1": {
"value": 789523579
},
"key": "10.15.10.38",
"doc_count": 20064
},
{
"1": {
"value": 788419290
},
"key": "10.15.41.49",
"doc_count": 13932
},
{
"1": {
"value": 773352856
},
"key": "10.15.43.90",
"doc_count": 30405
},
{
"1": {
"value": 754616693
},
"key": "208.45.245.194",
"doc_count": 71347
},
{
"1": {
"value": 747842952
},
"key": "10.15.43.92",
"doc_count": 25674
},
},
"status": 200
}

Here is a sample flow from yesterday
{
"_index": "elastiflow-asi-asa-2018.05.07",
"_type": "doc",
"_id": "akhqPGMBxenACWdiGiog",
"_version": 1,
"_score": null,
"_source": {
"event": {
"host": "192.168.200.1",
"type": "netflow v9"
},
"tags": [
"port_9596"
],
"netflow": {
"version": 9,
"protocol": 6,
"flowset_id": 263,
"fw_event": 5,
"xlate_dst_port": 88,
"fw_ext_event": 2031,
"xlate_src_addr_ipv4": "10.15.43.1",
"ipv4_src_addr": "10.15.43.1",
"input_snmp": 4,
"l4_src_port": 64996,
"output_snmp": 3,
"l4_dst_port": 88,
"conn_id": 384495818,
"flow_seq_num": 6520957,
"icmp_type": 0,
"xlate_dst_addr_ipv4": "10.77.25.10",
"event_time_msec": 1525726848977,
"rev_flow_delta_bytes": 152,
"fwd_flow_delta_bytes": 301,
"flow_start_msec": 1525726848577,
"ipv4_dst_addr": "10.77.25.10",
"icmp_code": 0,
"xlate_src_port": 64996
},
"node": {
"ipaddr": "192.168.200.1",
"hostname": "192.168.200.1"
},
"flow": {
"geoip": {
"autonomous_system": "private"
}
},
"@version": "1",
"@timestamp": "2018-05-07T21:00:49.000Z"
},
"fields": {
"netflow.flow_start_msec": [
"2018-05-07T21:00:48.577Z"
],
"netflow.event_time_msec": [
"2018-05-07T21:00:48.977Z"
],
"@timestamp": [
"2018-05-07T21:00:49.000Z"
]
},
"sort": [
1525726849000
]
}

Stacey,

Any ideas?

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.