I am trying to convert a field named " sf_query_time" from string to integer field using mutate convert in filters in logstash.conf. But its not getting converted
Here is the code (filter in logstash)
input {
beats {
port => 5045
type => "logs"
ssl => false
ssl_certificate => "/etc/pki/tls/certs/connectandsell-logging.crt"
ssl_key => "/etc/pki/tls/private/connectandsell-logging.key"
}
}
filter {
mutate {
convert => { "sf_query_time" => "integer" }
}
metrics {
meter => "events"
add_tag => "metric"
}
if [input_type] == "log"{
grok{
match => [ "source", "%{GREEDYDATA}[/\]%{WORD:log_server}-%{GREEDYDATA}" ]
}
}else{
grok{
match => [ "path", "%{GREEDYDATA}[/\]%{WORD:log_server}-%{DATA:type}-%{GREEDYDATA}" ]
}
}
if "_grokparsefailure" in [tags] {
mutate {
replace => [ "message" , "Incorrect filename format" ]
add_tag => [ "incorrectFilenameFormat" ]
}
} else if [type] == "lightning" {
grok {
patterns_dir => "/etc/logstash/conf.d/pattern"
match => [ "message", "(?m)%{LIGLOG}" ] }
if "_grokparsefailure" in [tags] {
grok {
match => { "message" => "(?m)%{GREEDYDATA}" }
}
}else{
mutate {
replace => [ "message" , "%{temp_message}" ]
replace => [ "timestamp" , "%{source_timestamp}" ]
remove_field => [ "temp_message" ]
remove_field => [ "source_timestamp" ]
}
date {
match => [ "timestamp" , "yyyy-MM-dd HH:mm:ss,SSS" ]
timezone => "US/Pacific"
}
}
} else if [type] == "engineactivity" {
grok {
patterns_dir => "/etc/logstash/conf.d/pattern"
match => [ "message", "(?m)%{ENGLOG}" ] }
if "_grokparsefailure" in [tags] {
grok {
match => { "message" => "(?m)%{GREEDYDATA}" }
}
}else{
mutate {
# For some reason for this index type is not getting set automatically.
# It throws following error: InvalidIndexNameException[Invalid index name [engineactivity,4-2016.01.12], must not contain the following characters [, /, *, ?, ", <, >, |, , ,]]
# Therefore, we are specifying the correct type.
replace => [ "type" , "engineactivity" ]
replace => [ "message" , "%{temp_message}" ]
replace => [ "timestamp" , "%{source_timestamp}" ]
remove_field => [ "temp_message" ]
remove_field => [ "source_timestamp" ]
}
date {
match => [ "timestamp" , "yyyy-MM-dd HH:mm:ss.SSS" ]
timezone => "UTC"
}
}
} else if [type] == "engineasterisk" {
grok {
patterns_dir => "/etc/logstash/conf.d/pattern"
match => [ "message", "(?m)%{ENGAST_LOG}" ]
}
if "_grokparsefailure" in [tags] {
grok {
match => { "message" => "(?m)%{GREEDYDATA}" }
}
}else{
mutate {
gsub => [ "kvpairs", ": \n", ": dummy
" ] # please do not edit this line. For details: https://github.com/elastic/logstash/issues/1645
}
kv {
field_split => "\n"
value_split => ":"
source => "kvpairs"
remove_field => [ "kvpairs" ]
}
if [source_timestamp] {
mutate {
replace => [ "timestamp" , "%{source_timestamp}" ]
remove_field => [ "source_timestamp" ]
}
}
date {
match => [ "timestamp" , "MM.dd.yy HH:mm:ss.SSS" ]
timezone => "US/Pacific"
}
}
}else if [type] == "asterisk" or [type] == "proxy" {
grok {
patterns_dir => "/etc/logstash/conf.d/pattern"
match => [ "message", "(?m)%{ASTLOG}" ]
}
if "_grokparsefailure" in [tags] {
grok {
match => { "message" => "(?m)%{GREEDYDATA}" }
}
}else{
mutate {
replace => [ "timestamp" , "%{source_timestamp}" ]
remove_field => [ "source_timestamp" ]
}
date {
match => [ "timestamp" , "yyyy-MM-dd HH:mm:ss.SSS" , "yyyy-MM-dd HH:mm:ss" ]
timezone => "US/Pacific"
}
}
}else{
grok {
match => { "message" => "(?m)%{GREEDYDATA}" }
}
mutate {
replace => [ "type" , "junk-data" ]
}
}
}