Mutate convert not working

I am trying to convert a field named " sf_query_time" from string to integer field using mutate convert in filters in logstash.conf. But its not getting converted

Here is the code (filter in logstash)

input {

beats {
port => 5045
type => "logs"
ssl => false
ssl_certificate => "/etc/pki/tls/certs/connectandsell-logging.crt"
ssl_key => "/etc/pki/tls/private/connectandsell-logging.key"
}
}

filter {
mutate {
convert => { "sf_query_time" => "integer" }
}
metrics {
meter => "events"
add_tag => "metric"
}

if [input_type] == "log"{
grok{
match => [ "source", "%{GREEDYDATA}[/\]%{WORD:log_server}-%{GREEDYDATA}" ]
}
}else{
grok{
match => [ "path", "%{GREEDYDATA}[/\]%{WORD:log_server}-%{DATA:type}-%{GREEDYDATA}" ]
}
}

if "_grokparsefailure" in [tags] {
mutate {
replace => [ "message" , "Incorrect filename format" ]
add_tag => [ "incorrectFilenameFormat" ]
}
} else if [type] == "lightning" {
grok {
patterns_dir => "/etc/logstash/conf.d/pattern"
match => [ "message", "(?m)%{LIGLOG}" ] }
if "_grokparsefailure" in [tags] {
grok {
match => { "message" => "(?m)%{GREEDYDATA}" }
}
}else{
mutate {
replace => [ "message" , "%{temp_message}" ]
replace => [ "timestamp" , "%{source_timestamp}" ]
remove_field => [ "temp_message" ]
remove_field => [ "source_timestamp" ]
}
date {
match => [ "timestamp" , "yyyy-MM-dd HH:mm:ss,SSS" ]
timezone => "US/Pacific"
}
}
} else if [type] == "engineactivity" {
grok {
patterns_dir => "/etc/logstash/conf.d/pattern"
match => [ "message", "(?m)%{ENGLOG}" ] }
if "_grokparsefailure" in [tags] {
grok {
match => { "message" => "(?m)%{GREEDYDATA}" }
}
}else{
mutate {
# For some reason for this index type is not getting set automatically.
# It throws following error: InvalidIndexNameException[Invalid index name [engineactivity,4-2016.01.12], must not contain the following characters [, /, *, ?, ", <, >, |, , ,]]
# Therefore, we are specifying the correct type.
replace => [ "type" , "engineactivity" ]
replace => [ "message" , "%{temp_message}" ]
replace => [ "timestamp" , "%{source_timestamp}" ]
remove_field => [ "temp_message" ]
remove_field => [ "source_timestamp" ]
}
date {
match => [ "timestamp" , "yyyy-MM-dd HH:mm:ss.SSS" ]
timezone => "UTC"
}
}
} else if [type] == "engineasterisk" {
grok {
patterns_dir => "/etc/logstash/conf.d/pattern"
match => [ "message", "(?m)%{ENGAST_LOG}" ]
}
if "_grokparsefailure" in [tags] {
grok {
match => { "message" => "(?m)%{GREEDYDATA}" }
}
}else{
mutate {
gsub => [ "kvpairs", ": \n", ": dummy
" ] # please do not edit this line. For details: https://github.com/elastic/logstash/issues/1645
}

  kv {
    field_split => "\n"
    value_split => ":"
    source => "kvpairs"
    remove_field => [ "kvpairs" ]
  }

  if [source_timestamp] {
    mutate {
      replace => [ "timestamp" , "%{source_timestamp}" ]
      remove_field => [ "source_timestamp" ]
    }
  }

  date {
    match => [ "timestamp" , "MM.dd.yy HH:mm:ss.SSS" ]
    timezone => "US/Pacific"
  }
}

}else if [type] == "asterisk" or [type] == "proxy" {
grok {
patterns_dir => "/etc/logstash/conf.d/pattern"
match => [ "message", "(?m)%{ASTLOG}" ]
}
if "_grokparsefailure" in [tags] {
grok {
match => { "message" => "(?m)%{GREEDYDATA}" }
}
}else{
mutate {
replace => [ "timestamp" , "%{source_timestamp}" ]
remove_field => [ "source_timestamp" ]
}
date {
match => [ "timestamp" , "yyyy-MM-dd HH:mm:ss.SSS" , "yyyy-MM-dd HH:mm:ss" ]
timezone => "US/Pacific"
}
}
}else{
grok {
match => { "message" => "(?m)%{GREEDYDATA}" }
}
mutate {
replace => [ "type" , "junk-data" ]
}
}
}

Are there any errors in your logs? Also I've had some problems with Elasticsearch dynamic mapping not working for integer fields. They still got put in as strings. My fix was to add the field in the mapping with the correct data type. This only lets new indexes from having the correct mapping though unless you reindex the old indexes with the new mapping.

NO there are no error in my logs