Hi Team,
Please find the below details :
input{
file {
path => "C:\Users\priya_shukla\Desktop\SAPLogsERR*.log"
start_position => "beginning"
sincedb_path => "NUL"
}
}
filter {
grok {
match => [ "message", "%{DATA:core_Id} %{DATA:date} %{DATA:timestamp} %{WORD:guiid} %{WORD:book_refId} %{DATA:lastupdate} %{WORD:type} %{WORD:class} %{WORD:status} %{NUMBER:num} %{WORD:text} %{NUMBER:source_id} %{WORD:carrier} %{DATA:related_booking_ids}"]
}
if "*_SUC.log" in [path]
{
grok{
#For Other_TransData
add_field => { "correlation_Id" => "%{core_Id}" }
add_field => { "booking_ref" => "%{book_refId}" }
add_field => { "sap_process_status" => "Successful" }
add_field => { "sap_process_time" => "%{date} %{timestamp}" }
add_field => { "sap_message" => "%{message}" }
add_field => { "message_GUI_ID" => "%{gui_Id}" }
add_field => { "lara_last_update_date" => "%{date} %{timestamp}" }
add_field => { "message_type" => "%{type}" }
add_field => { "message_class" => "%{class}" }
add_field => { "message_status" => "%{status}" }
add_field => { "message_number" => "%{num}" }
add_field => { "message_text" => "%{text}" }
add_field => { "source_port_call_id" => "%{source_id}" }
add_field => { "carrier" => "%{carrier}" }
add_field => { "related_booking_ids" => "%{related_id}" }
remove_tag => ["logs","_grokparsefailure"]
}
}
else
{
grok{
#For Other_TransData
add_field => { "sap_process_status" => "Failure" }
add_field => { "sap_process_time" => "%{date} %{timestamp}" }
add_field => { "sap_message" => "%{message}" }
add_field => { "message_GUI_ID" => "%{gui_Id}" }
add_field => { "lara_last_update_date" => "%{date} %{timestamp}" }
add_field => { "message_type" => "%{type}" }
add_field => { "message_class" => "%{class}" }
add_field => { "message_status" => "%{status}" }
add_field => { "message_number" => "%{num}" }
add_field => { "message_text" => "%{text}" }
add_field => { "source_port_call_id" => "%{source_id}" }
add_field => { "carrier" => "%{carrier}" }
add_field => { "related_booking_ids" => "%{related_id}" }
#For SAP_TransData
add_field => { "trx_id" => "%{core_Id}_SAP_AIF" }
#For both Other_TransData and SAP_TransData
add_field => { "correlation_Id" => "%{core_Id}" }
add_field => { "booking_ref" => "%{book_refId}" }
remove_tag => ["logs","_grokparsefailure"]
}
}
if "Correlation ID" in [correlation_id]{
drop { }
}
if "_grokparsefailure" in [tags]{
drop { }
}
mutate {
gsub => ["value", "\'", ""]
remove_field => ["timestamp", "path","host","procurve","_grokparsefailure","booking_ref","date","type","message","@version" ]
}
date {
match => ["sap_process_time", "dd.MM.yyyy HH:mm:ss"]
target => "sap_process_time"
}
clone {
clones => ["Other_sap"]
remove_field => ["trx_id","@version","@timestamp"]
}
}
output{
if [type] != "Other_sap"{
file {
path => "C:\Users\priya_shukla\Downloads\logstash-6.2.4\logstash-6.2.4\output_data.txt"
}
}
if [type] == "Other_sap"{
elasticsearch{
hosts => ["localhost:9200"]
index => "test_demo"
action => 'update'
document_id => "%{booking_ref}"
doc_as_upsert => true
}
}
}
output in logs :
[2018-06-14T13:34:51,554][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"6.2.4"}
[2018-06-14T13:34:52,559][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2018-06-14T13:34:57,251][INFO ][logstash.pipeline ] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>4, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50}
[2018-06-14T13:34:58,416][INFO ][logstash.pipeline ] Pipeline started successfully {:pipeline_id=>"main", :thread=>"#<Thread:0x4c557ce0 sleep>"}
[2018-06-14T13:34:58,541][INFO ][logstash.agent ] Pipelines running {:count=>1, :pipelines=>["main"]}
Kindly hrlp me out in this.
Priya.