Elasticsearch plugin for Logstash: SearchParseException[No mapping found for

I have two data sets; one is meta data on end points e.g. mac-address, O/S, computer type etc. The other is network authentication events, which include pass/fail, ip address, mac address etc.

Both sets of data are loaded, via logstash into a single elastic index.

What I am trying to do is, using the logstash-filter-elasticsearch, query the end-point meta data documents, based on authenticating machine's mac-address, and copy the relevant fields into the authentication event document (e.g. copy the O/S type, manufacture, software image etc.), so I can visualise authentication fails based on end-point meta data.

However I receive the following error (from elasticsearch.log)

    Caused by: SearchParseException[failed to parse search source [{"size":1,"query":{"query_string":{"query":"type:apple_mac AND ep_mac:3C-07-54-3B-0C-91","lowercase_expanded_terms":true,"analyze_wildcard":false}},"sort":[{"m_name":{"order":"desc"}}]}]]; nested: 
SearchParseException[No mapping found for [m_name] in order to sort on];

Now obviously there's an issue with the mapping for the m_name field used to sort the query response. However I have the following mapping template applied in the output section of the logstash configuration, which includes the ignore_unmapped : true, which I thought should fix my error:

{
  "template" : "test-*",
  "settings" : {
    "index.refresh_interval" : "5s"
  },
  "mappings" : {
    "_default_" : {
      "_all" : {"enabled" : true, "omit_norms" : true},
      "dynamic_templates" : [ {
        "message_field" : {
          "match" : "message",
          "match_mapping_type" : "string",
          "mapping" : {
            "type" : "string", "index" : "not_analyzed", "omit_norms" : true, "ignore_unmapped" : true,
            "fielddata" : { "format" : "enabled" }
          }
        }
      }, {
        "string_fields" : {
          "match" : "*",
          "match_mapping_type" : "string",
          "mapping" : {
            "type" : "string", "index" : "not_analyzed", "omit_norms" : true, "ignore_unmapped" : true,
            "fielddata" : { "format" : "enabled" },
            "fields" : {
              "raw" : {"type": "string", "index" : "not_analyzed", "ignore_above" : 256}
            }
          }
        }
      } ],
      "<content...omitted>
          }
        }
      }
    }
  }
}

Any help with resolving this would be greatly appreciated. I also realise that parent/child documents maybe another avenue, but am keen for simplicity to try and get this approach working.

Below is the logstash configuration file

input {

 file {
        path => "/opt/log/ise/ise-mobility.xxx.xxx/*"
        start_position => "beginning"
        type => "ise_syslog"
        sincedb_path => "/opt/logstash/sincedb-access"
 }
}

filter {

if [type] == "ise_syslog" {

 if "CISE_Failed_Attempts" in [message] {

        grok {
                break_on_match => false
                match => { "message" => "FailureReason=%{INT:f_reason}" }
                match => { "message" => "AD-User-Candidate-Identities=%{NOTSPACE:user_id}" }
                match => { "message" => "NAS-IP-Address=%{IP:src_ip}" }
                match => { "message" => "NAS-Port-Id=%{NOTSPACE:ep_intf}" }
                match => { "message" => "Calling-Station-ID=%{MAC:ep_mac}" }
                match => { "message" => "Framed-IP-Address=%{IP:ep_ip}" }
                match => { "message" => "UserName=%{NOTSPACE:username}" }
                match => { "message" => "RequestLatency=%{INT:r_lat}" }
        }

        if "StepData=74=STATUS" in [message] {
                grok { match => { "message" => "StepData=74=%{NOTSPACE:f_detail}" } }
        }

        if "StepData=75=STATUS" in [message] {
                grok { match => { "message" => "StepData=74=%{NOTSPACE:f_detail}" } }
        }

        if "StepData=74=ERROR" in [message] {
                grok { match => { "message" => "StepData=75=%{NOTSPACE:f_detail}" } }
        }

        if "StepData=75=STATUS" in [message] {
                grok { match => { "message" => "StepData=75=%{NOTSPACE:f_detail}" } }
        }

        mutate {

                add_tag => "fail"
                add_field => { "switch_FQDN" => "%{src_ip}" }
                add_field => { "ep_hostname" => "%{ep_ip}" }
                add_field => { "m_name" => "not set" }
        }

        elasticsearch {
                hosts => ["localhost:9200"]
                query => "type:apple_mac AND ep_mac:%{[ep_mac]}"
                fields => [ "m_name", "m_name", "os" ]
                sort => "m_name:desc"
        }
 }

 else if "CISE_Passed_Authentications" in [message] {

        grok {

                break_on_match => false
                match => { "message" => "NAS-IP-Address=%{IP:src_ip}" }
                match => { "message" => "NAS-Port-Id=%{NOTSPACE:ep_intf}" }
                match => { "message" => "Calling-Station-ID=%{MAC:ep_mac}" }
                match => { "message" => "Framed-IP-Address=%{IP:ep_ip}" }
                match => { "message" => "EndPointMatchedProfile=%{WORD:ep_profile}" }
                match => { "message" => "User-Name=%{NOTSPACE:username}" }
                match => { "message" => "UseCase=%{WORD:usecase}" }
                match => { "message" => "RequestLatency=%{INT:r_lat}" }
        }
        mutate {

                add_field => { "switch_FQDN" => "%{src_ip}" }
                add_field => { "ep_hostname" => "%{ep_ip}" }
                add_field => { "m_name" => "not set" }
                add_tag => "pass"
        }

 }

 else {

        mutate {
                add_tag => "no_result"
                remove_field => ["message"]
        }
  }


  dns {

                hit_cache_size => "100"
                hit_cache_ttl => "300"
                max_retries => "1"
                action => "replace"
                nameserver => "128.250.66.5"
                reverse => [ "switch_FQDN" ]
                reverse => [ "ep_ip" ]
  }
 }
}

output {
 elasticsearch {
  hosts => ["localhost:9200"]
  template_overwrite => "true"
  template => "/opt/elasticsearch/elasticsearch-template.json"
  index => "test-%{+YYY.MM.dd}"
 }

 stdout { codec => rubydebug }
}