Initially, I wrote a Logstash configuration file to pull Balena information in JSON data and then parse it with Logstash ruby filter plugin which made the data look like this:
{
"@version" => "1",
"values" => [
[ 0] {
"machineId" => 1415733,
"release" => "031d61fb13fb362afefb59143e5ae5d2",
"timestamp" => "2019-03-22T22:12:50.761Z"
},
[ 1] {
"machineId" => 1415733,
"release" => "0a67dbf3644916bdbb4e36710131720a",
"timestamp" => "2019-05-09T01:06:25.489Z"
},
[ 2] {
"machineId" => 1415733,
"release" => "0e3641dffad6fb901227270c60a9639d",
"timestamp" => "2019-03-29T00:10:59.828Z"
},
[ 3] {
"machineId" => 1415733,
"release" => "178dddaa4a3dd66bd9844f11b9016949",
"timestamp" => "2019-04-04T16:28:55.552Z"
},
[ 4] {
"machineId" => 1415733,
"release" => "2200c40d0972f4fe25bba661e7945112",
"timestamp" => "2019-06-12T18:22:53.281Z"
},
[ 5] {
"machineId" => 1415733,
"release" => "228245f20d67ac801f5b4f4f111caa10",
"timestamp" => "2019-04-09T22:52:52.100Z"
},
[ 6] {
"machineId" => 1415733,
"release" => "29bdf4d57ea97a9f1b4f6b7357beb1bc",
"timestamp" => "2019-06-20T18:13:27.244Z"
},
[ 7] {
"machineId" => 1415733,
"release" => "2be83890252b2599f5c16bbc773a89d8",
"timestamp" => "2019-04-29T22:58:58.475Z"
},
[ 8] {
"machineId" => 1415733,
"release" => "40905efa747878c5ab5d7f238fd8d048",
"timestamp" => "2019-06-20T21:05:55.330Z"
},
[ 9] {
"machineId" => 1415733,
"release" => "47bccb6aa9f65bd044d14c213894ea10",
"timestamp" => "2019-05-08T22:42:02.767Z"
},
[10] {
"machineId" => 1415733,
"release" => "488dceed662b85dc604161ac834e97dc",
"timestamp" => "2019-05-03T18:18:44.272Z"
},
[11] {
"machineId" => 1415733,
"release" => "4fae8f7aa5f01425f1c8c882350ee488",
"timestamp" => "2019-04-22T23:34:26.274Z"
},
[12] {
"machineId" => 1415733,
"release" => "57441b6fbb6382b980f704bb27bdcca0",
"timestamp" => "2019-03-25T19:01:28.316Z"
},
[13] {
"machineId" => 1415733,
"release" => "65cafefe43a8faa082388e3d6a6c76c0",
"timestamp" => "2019-04-05T22:09:08.675Z"
},
[14] {
"machineId" => 1415733,
"release" => "6cb260fe85023253659a9955ba56bf3b",
"timestamp" => "2019-05-13T23:24:50.640Z"
},
[15] {
"machineId" => 1415733,
"release" => "798bd260cc74601363cb4653774a1003",
"timestamp" => "2019-03-29T16:48:59.544Z"
}
*** deleted some of them ***
],
"@timestamp" => 2019-06-21T15:21:28.145Z
}
Then I sent the data to Elasticsearch for indexing purposes, and I wanted to show a table in Kibana where we have a timestamp, machine id, and the release values. However, if you look in the picture below you can see the release number is the same and it is being repeated for every timestamp which is the wrong data. Every timestamp should have a new release values (it is unique in the json data above). Is there anyway to fix this in either Logstash, Elasticsearch or Kibana?
Here is my logstash configuration for reference:
input{
http_poller {
urls => {
authentication => {
method => get
user => "myEmailAddress"
password => "myPassword"
url => "https://api.balena-cloud.com/v4/release?$filter=belongs_to__application%20eq%20<APP ID>"
headers => {
"Content-Type" => "application/json"
"Authorization" => "Bearer <AUTH_TOKEN>"
}
}
}
request_timeout => 60
schedule => { every => "5s"}
codec => "json"
}
}
filter{
if ["event"] != "" {
# ***** filters json data ***** #
ruby {
code => '
a = []
i = 0
event.get("d").each { |x|
h = {}
h["release"] = x["commit"]
h["timestamp"] = x["created_at"]
h["machineId"] = x["belongs_to__application"]["__id"]
a[i] = h
i += 1
}
event.set("values", a)
'
remove_field => ["d"]
}
date {
match => ["log-datestamp", "YYYY-MM-dd HH:mm:ss,SSS"]
target => "@timestamp"
timezone => "UTC"
}
date {
match => ["log-datestamp", "YY-MM-dd HH:mm:ss,SSS"]
target => "@timestamp"
timezone => "UTC"
}
date {
match => ["log-datestamp", "ISO8601"]
target => "@timestamp"
timezone => "UTC"
}
date {
match => ["log-epoch", "UNIX"]
target => "@timestamp"
timezone => "UTC"
}
date {
match => ["log-epoch", "UNIX_MS"]
target => "@timestamp"
timezone => "UTC"
}
}
}
output{
stdout {
codec => rubydebug
}
}
I appreciate any help/suggestion.