Elasticsearch ignoring custom template file defined in Logstash

Hi,

I use ELK GA 5.0.0. I push to kafka using filebeat, then I consume it using logstash. Everything works fine, I pulled mapping from elasticsearch using curl like;

curl -XGET 'localhost:9200/my-logs-2016-11-11/_mapping?pretty' --user elastic:elasticpassword

which gave me;

{
  "my-logs-2016-11-11" : {
    "mappings" : {
      "logfile" : {
        "properties" : {
          "@timestamp" : {
            "type" : "date"
          },
          "@version" : {
            "type" : "text",
            "fields" : {
              "keyword" : {
                "type" : "keyword",
                "ignore_above" : 256
              }
            }
          },
          "beat" : {
            "properties" : {
              "hostname" : {
                "type" : "text",
                "fields" : {
                  "keyword" : {
                    "type" : "keyword",
                    "ignore_above" : 256
                  }
                }
              },
              "name" : {
                "type" : "text",
                "fields" : {
                  "keyword" : {
                    "type" : "keyword",
                    "ignore_above" : 256
                  }
                }
              },
              "version" : {
                "type" : "text",
                "fields" : {
                  "keyword" : {
                    "type" : "keyword",
                    "ignore_above" : 256
                  }
                }
              }
            }
          },
          "fields" : {
            "properties" : {
              "logtype" : {
                "type" : "text",
                "fields" : {
                  "keyword" : {
                    "type" : "keyword",
                    "ignore_above" : 256
                  }
                }
              }
            }
          },
          "reference" : {
            "type" : "text",
            "fields" : {
              "keyword" : {
                "type" : "keyword",
                "ignore_above" : 256
              }
            }
          },
          "message" : {
            "type" : "text",
            "fields" : {
              "keyword" : {
                "type" : "keyword",
                "ignore_above" : 256
              }
            }
          },
          "offset" : {
            "type" : "long"
          }
        }
      }
    }
  }
}

I wanted to give it a custom mapping, so that I created a mapping json file like;

{
  "template": "my-logs-*",{
    "mappings" : {
      "logfile" : {
        "properties" : {
          "@timestamp" : {
            "type" : "date"
          },
          "@version" : {
            "type": "int"
          },
          "beat" : {
            "properties" : {
              "hostname": {
                "index": "not_analyzed",
                "type": "string"
              },
              "name": {
                "index": "not_analyzed",
                "type": "string"
              },
              "version": {
                "index": "not_analyzed",
                "type": "string"
              }
            }
          },
          "fields" : {
            "properties" : {
              "logtype": {
                "index": "not_analyzed",
                "type": "string"
              }
            }
          },
          "reference" : {
            "type" : "long"
          },
          "message" : {
            "index": "not_analyzed",
                "type": "string"
          },
          "offset" : {
            "type" : "long"
          }
        }
      }
    }
  }
}

In logstash, i use;

output{
	stdout{ codec => json }
	elasticsearch {
		template => "/ELK/logstash-5.0.0/mylogsmap.json"
		template_overwrite => true
		hosts => ["192.168.0.1"]
		index => "my-logs-%{+YYYY-MM-dd}"
		user => elastic
		password => elasticpassword
	}
}	

But still, I am getting the previous mapping. I have deted all indices and re indexed, but still new indices are having old mapping. How can I fix this?

Thanks in advance.

somebody tell me why this is happening? :disappointed:

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.