Logstash--Using ruby to convert JSON to fields. Data going to elastic as text. Pls let me know how to set to long type?
Below is my json log string:
{
"traceId": "o0uyveRU/8BkjL+lbpDlnQ==",
"spanId": "73rmqIRmo34=",
"operationName": "ProcessWorkItem",
"startTime": "2024-07-22T06:07:12.650881409Z",
"duration": "0.256620153s",
"tags": [
{
"key": "otel.library.name",
"vStr": "com.mdi.core.workmgmt.TypedConsumer"
},
{
"key": "ta.work.type",
"vStr": "INBOUND_ENTITY_INTEGRATION"
},
{
"key": "ta.companyCode",
"vStr": "AMBER_ROAD"
},
{
"key": "ta.work.status",
"vStr": "COMPLETED"
},
{
"key": "ta.work.loadDuration",
"vType": "INT64"
},
{
"key": "service.version",
"vStr": "24.2"
},
{
"key": "work.processorclass",
"vStr": "com.mdi.core.entity.integration.EntityStdIntegrationWorkProcessor"
},
{
"key": "ta.work.receivedTime",
"vType": "INT64",
"vInt64": "1721628429423"
},
{
"key": "ta.work.delegateDuration",
"vType": "INT64",
"vInt64": "257"
},
{
"key": "ta.work.busKeySearch",
"vStr": "AMBER_ROAD;SO_TEST_93;SALES_ORDER"
},
{
"key": "ta.orgCode",
"vStr": "AMBER_ROAD"
},
{
"key": "ta.work.id",
"vStr": "33267221"
},
{
"key": "length",
"vType": "INT64",
"vInt64": "3680"
},
{
"key": "ta.work.reportStatusDuration",
"vType": "INT64",
"vInt64": "4"
},
{
"key": "ta.work.errorCount",
"vType": "INT64"
},
{
"key": "ta.work.delegateConsumerTime",
"vType": "INT64",
"vInt64": "253"
},
{
"key": "span.kind",
"vStr": "internal"
},
{
"key": "otel.status_code",
"vStr": "OK"
},
{
"key": "internal.span.format",
"vStr": "otlp"
}
],
"process": {
"serviceName": "TRADE_SINGLETON",
"tags": [
{
"key": "deployment.environment",
"vStr": "PSR"
},
{
"key": "e2.environment.name",
"vStr": "OTEL_PST"
},
{
"key": "e2.instance.name",
"vStr": "TRADE_SINGLETON_1"
},
{
"key": "e2.product.family",
"vStr": "GTM"
},
{
"key": "e2.product.tenant.id",
"vStr": "WMMERCURYTA"
},
{
"key": "service.instance.id",
"vStr": "URN:PSR:GTM:TA:OTEL_PST:TRADE_SINGLETON:TRADE_SINGLETON_1"
},
{
"key": "service.namespace",
"vStr": "TA"
},
{
"key": "telemetry.sdk.language",
"vStr": "java"
},
{
"key": "telemetry.sdk.name",
"vStr": "opentelemetry"
},
{
"key": "telemetry.sdk.version",
"vStr": "1.34.0"
}
]
},
"tag": "jaeger_spans"
}
I am using filters as below:
filter
{
json
{
source => "message"
}
json
{
source => "message"
target => "tmessage"
}
#ruby {
# code => "event.set('startTimeMillis', ((event.get('startTime').to_f*1000).to_i))"
# }
mutate
{
add_field => {"process.serviceName" => "%{[tmessage][process][serviceName]}"}
}
ruby {
code => '
event.get("[tmessage][tags]").each { |a|
name = a["key"]
name=name.gsub(/\./,"@")
if a["vStr"]
value = a["vStr"]
elsif a["vInt64"]
value = a["vInt64"]
else
value = ""
end
event.set("tag." + name, value)
}
'
}
ruby {
code => '
event.get("[tmessage][process][tags]").each { |a|
name = a["key"]
name=name.gsub(/\./,"@")
if a["vStr"]
value = a["vStr"]
elsif a["vInt64"]
value = a["vInt64"]
else
value = ""
end
event.set("process.tag." + name, value)
}
'
}
mutate {
convert => { "duration" => "float" }
convert => { "tag.length" => "float" }
}
mutate {
remove_field => [ "message", "tmessage","tags"]
}
}
Pls let me know where would be the issue or how to get fields of int type as long in Elastic?