Hi guys!
Long ago that I haven't used elk. Now that I want to use the same conf with same elk versions that it worked for me back then, I haven't been able to show the info with Kibana this time, because it says: "Lens is unable to create visualizations with this field because it does not contain data. To create a visualization, drag and drop a different field."
But, according with logstash, the values has been getting without any problem:
{ │ /opt/kibana-8.7.1/bin sudo mv ~/Descargas/kibana-8.5.3 /opt ✔ 12m 4s
"logLevel" => "INFORMATIVE:", │[sudo] contraseña para german:
"@timestamp" => 2024-05-13T19:26:57.907Z, │ /opt/kibana-8.7.1/bin cd /opt/kibana-8.5.3/bin ✔
"executorThread" => "http-nio-8910-exec-19", │ /opt/kibana-8.5.3/bin ./kibana ✔
"classname" => "(LogNMERule)", │[2024-05-23T12:07:34.457-05:00][INFO ][node] Kibana process configured with roles: [background_tasks, ui]
"logs" => "/home/german/Documentos/Trabajo/Actividades diarias/2024/elk/filebeat/logs/new_log_2.log", │[2024-05-23T12:07:40.807-05:00][INFO ][plugins-service] Plugin "cloudExperiments" is disabled.
"newDate" => "05/13/2024 13:26:57.907", │[2024-05-23T12:07:40.815-05:00][INFO ][plugins-service] Plugin "profiling" is disabled.
"session_server" => "new_log_2", │[2024-05-23T12:07:40.854-05:00][INFO ][http.server.Preboot] http server running at http://localhost:5601
"newMessage" => "Inside SuccessResponse Response From AMSS system for Add SOC Operation Attributes :: BSide_Intf_rcsInd_String=, BSid│[2024-05-23T12:07:40.876-05:00][INFO ][plugins-system.preboot] Setting up [1] plugins: [interactiveSetup]
e_Intf_rcsIneligFtrInd_String=, BSide_Intf_vowifiRoamInd_String=, BSide_Intf_vowifiInd_String=, BSide_Intf_volteInd_String=, BSide_Intf_volte│[2024-05-23T12:07:40.878-05:00][INFO ][preboot] "interactiveSetup" plugin is holding setup: Validating Elasticsearch connection configuration
AcctypInd_String=, BSide_Intf_volteSbmktInd_String=, BSide_Intf_volteSocInd_String=, BSide_Intf_voltePrepaidInd_String=, BSide_Intf_volteData│…
FtrInd_String=, BSide_Intf_lteSimInd_String=, BSide_Intf_lteAcctypInd_String=, BSide_Intf_lteSubmktInd_String=, BSide_Intf_lteProvisioningInd│[2024-05-23T12:07:40.898-05:00][INFO ][root] Holding setup until preboot stage is completed.
_String=, BSide_Intf_lteSocFtrInd_String=, BSide_Intf_wifiUnauthCorpSubInd_String=, BSide_Intf_lteDeviceInd_String=, BSide_Intf_networkSubTyp│
e_String=, BSide_InterfaceResponseCodeString=2010300523, BSide_InterfaceMessageIDString=NISA-BULKPR-1715621216417amssInternal_dcade4_2_969055│
9, BSide_InterfaceServerTSString=2024-05-13T13:26:57.896-04:00" │i Kibana has not been configured.
}
However, the fields newMessage, executorThread, logLevel and others aren't in Kibana:
I have tried with different elk versions, but I got the same results.
My logstash conf is:
# Sample Logstash configuration for creating a simple
# Beats -> Logstash -> Elasticsearch pipeline.
input {
beats {
port => 5046
}
}
filter {
mutate {
copy => { "[log][file][path]" => "logs" }
}
mutate {
split => { "[log][file][path]" => "/" }
}
ruby {
code => '
m = event.get("[log][file][path]")
event.set("ss", m.to_a.last)
'
}
mutate {
split => { "ss" => "." }
}
ruby {
code => '
m = event.get("ss")
event.set("session_server", m.to_a.first)
'
}
mutate {
split => { "message" => " " }
}
ruby {
code => '
m = event.get("message")
if m.is_a? Array
event.set("newMessage", m.last(m.length - 6).join(" "))
event.set("newDate", m.take(2).join(" "))
event.set("executorThread", m.at(3))
event.set("classname", m.at(4))
event.set("logLevel", m.at(5))
end
'
}
if [newMessage] =~ /\(\)/ {
mutate {
copy => { "newMessage" => "newClass" }
}
mutate {
split => { "newClass" => " " }
}
mutate {
copy => { "[newClass][0]" => "classname" }
}
}
if [classname] =~ /\(07\.003\.19\)/ {
mutate {
replace => { "classname" => "FlushingClassGeneric" }
}
}
if [classname] =~ /\(06\.024\.02\)/ {
mutate {
replace => { "classname" => "DatabaseClassGeneric" }
}
}
if [classname] =~ /\(02\..*\..*\)/ {
mutate {
replace => { "classname" => "ClassesLoadedStartingIUM" }
}
}
if [classname] =~ /\(39\.009\.03\)/ {
mutate {
replace => { "classname" => "PluginsLoadedStartingIUM" }
}
}
mutate {
gsub => [
# replace all forward slashes with underscore
"@timestamp", "Z", ""
]
}
date {
match => [ "newDate", "MM/dd/yyyy HH:mm:ss.SSSSSS" ]
timezone => "-06:00"
target => "@timestamp"
}
mutate {
copy => { "[newDate][0]" => "@timestamp" }
}
mutate {
remove_field => [ "offset", "@version", "agent", "ecs", "[host][mac]", "[host][os]", "[host][ip]", "[host][id]", "[host][architecture]", "[host][containerized]", "[host][name]", input, event, host, tags, container, instance, region, cloud, offset, _id, _index, _score, _type, data_stream, log, ss, message, newClass]
}
}
output {
stdout { codec => rubydebug { metadata => false } }
elasticsearch {
hosts => ["192.168.1.20:9200"]
index => "logstash-ium-%{+YYYY.MM.dd}"
}
}
Any idea that you could give me? I don't know if it's my Linux version (I use Manjaro Linux).
UPDATE:
Another thing that I could notice, it's that the fields that I expect finding in kibana, I could find them in elastic, but they are into _source field:
{
"_index" : "logstash-ium-2024.05.22",
"_id" : "GjFupo8BIM3eBKrHeNol",
"_score" : 1.0,
"_source" : {
"logLevel" : "DEBUG3:",
"@timestamp" : "2024-05-22T02:37:00.000Z",
"executorThread" : "SAML",
"classname" : "PatternDirectoryPolicy.getNextFileName()",
"logs" : "/home/german/Documentos/Trabajo/Actividades diarias/2024/elk/filebeat/logs/sso_cbl_txt5.log",
"newDate" : "05/21/2024 20:37:00.000805",
"session_server" : "sso_cbl_txt5",
"newMessage" : "PatternDirectoryPolicy.getNextFileName()"
}
},
But, I found an image of last year when my elk worked and I could notice the fields are outside of _source field:
According with kibana documentation, the _source field is ignored by kibana, but I don't know why the fields aren't outside of _source field, this time.
Thanks a lot for your time.