Below is my config
input {
file {
path => "C:/logstash-7.16.2/data/input/test*.json"
start_position => "beginning"
sincedb_path => "null"
}
}
filter {
json {
source => "message"
target => "document"
}
mutate {
add_field => {
"Company"=>"%{[document][company]}"
"Submit_Date_format"=>"%{[document][created_on]}"
"Status_state"=>"%{[document][status]}"
"Last_Modified_Date_format"=>"%{[document][updated_on]}"
}
}
mutate{
add_field => {
"document_id" => "%{company}"
}
}
ruby { code => 'event.set("decoded_company", Base64.decode64(event.get("Company"})))' }
}
output {
stdout { }
elasticsearch {
index => "client_test_index"
hosts => ["${ES_HOST}"]
action => "create"
doc_as_upsert => "true"
document_id => "%{document_id}"
}
}
I am getting encrypted few of the fields encrypted in JSON something like this
{"created_on":"2023-11-11 10:05:01","company":"41681857db9a79d0f7f9d278f49619d3","updated_on":"2023-11-16 10:05:01","status":"open"}
I am trying to decode company using ruby code
ruby { code => 'event.set("decoded_company", Base64.decode64(event.get("Company"})))' }
But in ES index i get decrypted values as below
"decoded_company" : "�7���7�^tm�9������ߗ�",
Please help me with this
I tried ciper plugin but it does not work for our current setup
Thanks in advance