Hi
I am using following configuration file to process multi-line logs. It seems logstash is always skipping first message from log file and not saving in elasticsearch
input {
file {
type => "logs"
#path => ["C:/Users/x7358/Documents/logs/Test4/**/*performance*soap*"]
path => ["C:/Users/Perf/*performance*soap*"]
start_position => "beginning"
ignore_older => 0
codec => multiline {
pattern => "^%{TIMESTAMP_ISO8601}"
negate => true
what => "previous"
}
}
}
filter {
grok {
match => { "message" => "%{TIMESTAMP_ISO8601:timestamp}%{SPACE}\{(?<thread>[^)]+)\}%{SPACE}%{WORD:payload}:\s%{GREEDYDATA:msg}" }
}
date {
match => [ "timestamp", "yyyy-MM-dd HH:mm:ss,SSS" ]
}
mutate {
gsub => [
"msg", "<", "<",
"msg", ">", ">"
]
}
xml {
source => "msg"
target => "message_parsed"
remove_namespaces => true
store_xml => false
xpath => [
"/Envelope/Body/Execute_SHResponse/Execute_SHResult/*/@RequestID", "RequestID",
"/Envelope/Body/Execute_SHResponse/Execute_SHResult/CalcOut/*/@ReturnCode", "ReturnCode",
"local-name(/Envelope/Body/Execute_SHResponse/Execute_SHResult/*)", "RequestType",
"local-name(/Envelope/Body/Execute_SH/Request/*)", "RequestType"
]
remove_field => [ "xml","@timestamp","thread","payload","message","tags" ]
}
}
output {
stdout {
codec => rubydebug
}
elasticsearch {
hosts => ["127.0.0.1:9200"]
action => "create"
index => "test"
}
}
-
can you guide me what could be the wrong in my config file which skipping 1sets of message.
-
Also what could be the write configuration for Elasticsearch as it's not creating index automatically when I start processing my file
Thanks