The [string] field is deprecated

Hi,

I've spent most of the day trying to nail down this problem. I have a conf file and a template set up for our firewall. It works well enough, logs are entering ES and I can use Kibana to query logs and display graphs. However, I also wanted to make use of Grafana but it's struggling to pull the same data that Kibana is managing to pull. This lead me to look into the logs again and I've found that the Elasticsearch log is kicking out repeated entries stating:

[2017-05-24T19:40:49,786][WARN ][o.e.d.i.m.StringFieldMapper$TypeParser] The [string] field is deprecated, please use [text] or [keyword] instead on [column56]
[2017-05-24T19:40:49,786][WARN ][o.e.d.i.m.StringFieldMapper$TypeParser] The [string] field is deprecated, please use [text] or [keyword] instead on [keyword]

I understand it's referencing 2 of the column headers in the GREEDYDATA but what I don't understand is why it's failing. I've amended the template file as much as I can to be compatible with the latest version of ES (the version I'm currently running) but I've clearly missed something. I have no reference to "string" in my json template other than for the match_mapping_type but I believe this to be correct. I've replaced them all the the newer keyword and text entries.

Would anyone mind giving my template a look over and point out where this error is being generated from?

{
  "template" : "palo-firewall-*",
  "settings" : {
    "index.refresh_interval" : "5s"
  },
  "mappings" : {
    "_default_" : {
       "_all" : {"enabled" : true},
       "dynamic_templates" : [ {
         "message_field" : {
           "match" : "message",
           "match_mapping_type" : "string",
           "mapping" : {
             "type" "text", "index" : true, "omit_norms" : true
           }
         }
       }, {
         "strings" : {
           "match" : "*",
           "match_mapping_type" : "string",
           "mapping" : {
             "type" "text", "index" : true, "omit_norms" : true,
               "fields" : {
                 "text" : {"type" "keyword", "index" : true, "ignore_above" : 256}
               }
           }
         }
       } ],
       "properties" : {
         "@version": { "type": "keyword", "index": true},
         "geoip"  : {
           "type" : "object",
             "dynamic": true,
             "properties" : {
               "location" : { "type" : "geo_point" }
             }
         },
         "SourceGeo"  : {
           "type" : "object",
             "dynamic": true,
             "properties" : {
               "location" : { "type" : "geo_point" }
             }
         },
         "DestinationGeo"  : {
           "type" : "object",
             "dynamic": true,
             "properties" : {
               "location" : { "type" : "geo_point" }
             }
         }
       }
    }
  }
}

Thanks for your help.

It's not a failure, it's a warning.

Look at the actual mapping that has been applied, can you share that?

Hi Mark,

Thanks for your reply. Granted, it's not a failure. However, I'd like to fix it for 2 reasons, 1). I'm writing ES logs out to a text file that with this warning on every input into ES is adding to this log file and getting huge! 2). If I can resolve all warnings then it's hopefully going to make resolving why Grafana can't graph anything.
Happy to share whatever is required to find a fix. I don't want to fill the post with config files so I'll send what I believe is relevant. Ask for more if you need it or I've misunderstood what you're looking for.
Here's my filter (excluding the geosource refs:

filter {
  if [type] == "paloalto" {
    grok {
      #strips timestamp and host off of the front of the syslog message leaving the raw message generated by the syslog client and saves it as "raw_message"
      #patterns_dir => "C:\ELK\logstash\config\patterns"
      match => [ "message", "%{SYSLOGTIMESTAMP:@timestamp} %{HOSTNAME} %{POSINT},%{GREEDYDATA:raw_message}" ]
	}
  }

    csv {
      source => "raw_message"
      columns => [ "ReceiveTime","SerialNum","Type","Threat-ContentType","ConfigVersion","GenerateTime","SourceAddress","DestinationAddress","NATSourceIP","NATDestinationIP","Rule","SourceUser","DestinationUser","Application","VirtualSystem","SourceZone","DestinationZone","InboundInterface","OutboundInterface","LogAction","TimeLogged","SessionID","RepeatCount","SourcePort","DestinationPort","NATSourcePort","NATDestinationPort","Flags","IPProtocol","Action","Bytes","BytesSent","BytesReceived","Packets","StartTime","ElapsedTimeInSec","Category","Padding","seqno","actionflags","SourceCountry","DestinationCountry","cpadding","pkts_sent","pkts_received" ]

    }
    date {
      timezone => "Europe/London"
      match => [ "GenerateTime", "YYYY/MM/dd HH:mm:ss" ]
    }
    #convert fields to proper format
    mutate {
	  
      convert => [ "Bytes", "integer" ]
      convert => [ "BytesReceived", "integer" ]
      convert => [ "BytesSent", "integer" ]
      convert => [ "ElapsedTimeInSec", "integer" ]
      convert => [ "geoip.area_code", "integer" ]
      convert => [ "geoip.dma_code", "integer" ]
      convert => [ "geoip.latitude", "float" ]
      convert => [ "geoip.longitude", "float" ]
      convert => [ "NATDestinationPort", "integer" ]
      convert => [ "NATSourcePort", "integer" ]
      convert => [ "Packets", "integer" ]
      convert => [ "pkts_received", "integer" ]
      convert => [ "pkts_sent", "integer" ]
      convert => [ "seqno", "integer" ]
      gsub => [ "Rule", " ", "_",
                "Application", "( |-)", "_" ]
                #"SourceUser", "globalservs\\", "" ]
      remove_field => [ "message", "raw_message" ]
    }

And looking at the actual index created:

{
  "_index": "palo-firewall-2017.05.25",
  "_type": "paloalto",
  "_id": "AVxAz-dT16iezoLvHpg6",
  "_score": 1,
  "_source": {
    "RepeatCount": "1",
    "DestinationCountry": "UK",
    "SourceCountry": "10.0.0.0-10.255.255.255",
    "type": "paloalto",
    "column49": "0",
    "DestinationPort": "53",
    "column48": "43",
    "column47": "15",
    "column46": "aged-out",
    "SerialNum": "002201003352",
    "Bytes": 317,
    "SourceZone": "Main-Zone",
    "TimeLogged": "2017/05/25 19:13:05",
    "host": "192.168.0.1",
    "fingerprint": "a96b0230aaad1761472be67c1fb5c9b1dae4e5f4",
    "NATSourceIP": "1.2.3.4",
    "NATDestinationPort": 53,
    "column53": "from-policy",
    "pkts_received": 1,
    "column52": "FW1",
    "column51": null,
    "column50": "0",
    "ConfigVersion": "0",
    "StartTime": "2017/05/25 19:12:31",
    "DestinationGeo": {
      "timezone": "Europe/Paris",
      "ip": "1.2.3.4",
      "latitude": 24.5432,
      "country_code2": "FR",
      "country_name": "France",
      "continent_code": "EU",
      "country_code3": "FR",
"location": [
        1.234,
        48.6576
      ],
      "longitude": 1.234
    },
    "SourceUser": null,
    "ReceiveTime": "2017/05/25 19:13:05",
    "BytesSent": 90,
    "GenerateTime": "2017/05/25 19:13:05",
    "Rule": "Allow out",
    "DestinationZone": "untrust",
    "Application": "dns",
    "SessionID": "228758",
    "NATSourcePort": 43404,
    "VirtualSystem": "vsys1",
    "Action": "allow",
    "Category": "any",
    "InboundInterface": "ae1.701",
    "cpadding": "0",
    "SourcePort": "60960",
    "OutboundInterface": "ethernet1/1",
    "LogAction": "Logstash",
    "actionflags": "0x0",
    "Packets": 2,
    "@version": "1",
    "Padding": "0",
    "DestinationUser": null,
    "Threat-ContentType": "end",
    "SourceAddress": "192.168.0.1",
    "seqno": 2737249635,
    "NATDestinationIP": "1.2.3.4",
    "IPProtocol": "udp",
    "Flags": "0x400019",
    "DestinationAddress": "192.168.0.56",
    "BytesReceived": 227,
    "Type": "TRAFFIC",
    "pkts_sent": 1,
    "@timestamp": "2017-05-25T18:13:05.000Z",
    "ElapsedTimeInSec": 31
},
  "fields": {
    "StartTime": [
      1495739551000
    ],
    "ReceiveTime": [
      1495739585000
    ],
    "TimeLogged": [
      1495739585000
    ],
    "GenerateTime": [
      1495739585000
    ],
    "@timestamp": [
      1495735985000
    ]
  }
}

The weirdest thing is that when I examine the log getting sent from the FW, there is no column 56. There are only 53. Equally, where is it pulling this mystery "keyword" column from. I'mhappy to bin them regardless. Maybe a mutate/remove_field?

Thanks for your help.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.