Json file input to and viewing in logstash console

logstash version : 8.17.2, installed on windows machine.
Requirement : ingest json file into logstash

json file (its bigger but for testing purpose, trimmed it to one entry only)

{{"id":"ocid1.clem.oc1.iad.","compartmentId":"ocid1.compartment.oc1..","detectorRuleId":"BUCKET_IS_PUBLIC","riskLevel":"CRITICAL","riskScore":null,"resourceId":"ciaas/speech_test","resourceName":"speech_test","resourceType":"Bucket","labels":["C.1_OBJECTSTORAGE","ObjectStorage","CIS_.0"],"timeDue":null,"timeFirstDetected":"2025-11-20T11:06:18.367Z","timeLastDetected":"2025-11-24T03:06:32.689Z","lifecycleState":"ACTIVE","lifecycleDetail":"OPEN","detectorId":"ICTOR","region":"a-1","regions":["a1"],"targetId":"ocid1.cet.oc1.iad.","manageType":"GOVERNANCE","locks":null}

input.conf

input {
file {
path => ["<Path>/file001.json"]
start_position => "beginning"
sincedb_path => "NUL"		
codec => "json"
}
}

filter {
json {
source => messages
}
}
output {

stdout {
codec => rubydebug
}

elasticsearch {
hosts => ["http://localhost:9200"]
data_stream => "false"
index => "demo-index001"

}
}

Issue :

  1. json file is not getting picked up and neither appearing in the logstash console, below is the output of the console
    \logstash-8.17.2\bin>logstash.bat -f \logstash-8.17.2\input.conf
    "Using bundled JDK: \logstash-8.17.2\jdk\bin\java.exe"
    Sending Logstash logs to /logstash-8.17.2/logs which is now configured via log4j2.properties
    [2025-11-24T13:41:44,072][WARN ][logstash.runner ] NOTICE: Running Logstash as a superuser is strongly discouraged as it poses a security risk. Set 'allow_superuser' to false for better security.
    [2025-11-24T13:41:44,083][INFO ][logstash.runner ] Log4j configuration path used is: \logstash-8.17.2\config\log4j2.properties
    [2025-11-24T13:41:44,087][WARN ][logstash.runner ] The use of JAVA_HOME has been deprecated. Logstash 8.0 and later ignores JAVA_HOME and uses the bundled JDK. Running Logstash with the bundled JDK is recommended. The bundled JDK has been verified to work with each specific version of Logstash, and generally provides best performance and reliability. If you have compelling reasons for using your own JDK (organizational-specific compliance requirements, for example), you can configure LS_JAVA_HOME to use that version instead.
    [2025-11-24T13:41:44,089][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"8.17.2", "jruby.version"=>"jruby 9.4.9.0 (3.1.4) 2024-11-04 547c6b150e OpenJDK 64-Bit Server VM 21.0.6+7-LTS on 21.0.6+7-LTS +indy +jit [x86_64-mswin32]"}
    [2025-11-24T13:41:44,094][INFO ][logstash.runner ] JVM bootstrap flags: [-Xms1g, -Xmx1g, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djruby.compile.invokedynamic=true, -XX:+HeapDumpOnOutOfMemoryError, -Djava.security.egd=file:/dev/urandom, -Dlog4j2.isThreadContextMapInheritable=true, -Dlogstash.jackson.stream-read-constraints.max-string-length=200000000, -Dlogstash.jackson.stream-read-constraints.max-number-length=10000, -Djruby.regexp.interruptible=true, -Djdk.io.File.enableADS=true, --add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED, --add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED, --add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED, --add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED, --add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED, --add-opens=java.base/java.security=ALL-UNNAMED, --add-opens=java.base/java.io=ALL-UNNAMED, --add-opens=java.base/java.nio.channels=ALL-UNNAMED, --add-opens=java.base/sun.nio.ch=ALL-UNNAMED, --add-opens=java.management/sun.management=ALL-UNNAMED, -Dio.netty.allocator.maxOrder=11]
    [2025-11-24T13:41:44,168][INFO ][org.logstash.jackson.StreamReadConstraintsUtil] Jackson default value override logstash.jackson.stream-read-constraints.max-string-length configured to 200000000
    [2025-11-24T13:41:44,169][INFO ][org.logstash.jackson.StreamReadConstraintsUtil] Jackson default value override logstash.jackson.stream-read-constraints.max-number-length configured to 10000
    [2025-11-24T13:41:44,236][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
    [2025-11-24T13:41:46,648][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600, :ssl_enabled=>false}
    [2025-11-24T13:41:46,983][INFO ][org.reflections.Reflections] Reflections took 116 ms to scan 1 urls, producing 152 keys and 530 values
    [2025-11-24T13:41:47,477][INFO ][logstash.codecs.json ] ECS compatibility is enabled but target option was not specified. This may cause fields to be set at the top-level of the event where they are likely to clash with the Elastic Common Schema. It is recommended to set the target option to avoid potential schema conflicts (if your data is ECS compliant or non-conflicting, feel free to ignore this message)
    [2025-11-24T13:41:47,954][INFO ][logstash.javapipeline ] Pipeline main is configured with pipeline.ecs_compatibility: v8 setting. All plugins in this pipeline will default to ecs_compatibility => v8 unless explicitly configured otherwise.
    [2025-11-24T13:41:47,968][INFO ][logstash.outputs.elasticsearch][main] New Elasticsearch output {:class=>"LogStash::Outputs::Elasticsearch", :hosts=>["``http://localhost:9200``"]}
    [2025-11-24T13:41:48,180][INFO ][logstash.outputs.elasticsearch][main] Elasticsearch pool URLs updated {:changes=>{:removed=>, :added=>[``http://localhost:9200/``]}}
    [2025-11-24T13:41:48,301][WARN ][logstash.outputs.elasticsearch][main] Restored connection to ES instance {:url=>"``http://localhost:9200/``"}
    [2025-11-24T13:41:48,303][INFO ][logstash.outputs.elasticsearch][main] Elasticsearch version determined (8.17.2) {:es_version=>8}
    [2025-11-24T13:41:48,313][INFO ][logstash.filters.json ][main] ECS compatibility is enabled but target option was not specified. This may cause fields to be set at the top-level of the event where they are likely to clash with the Elastic Common Schema. It is recommended to set the target option to avoid potential schema conflicts (if your data is ECS compliant or non-conflicting, feel free to ignore this message)
    [2025-11-24T13:41:48,320][INFO ][logstash.outputs.elasticsearch][main] Using a default mapping template {:es_version=>8, :ecs_compatibility=>:v8}
    [2025-11-24T13:41:48,336][INFO ][logstash.javapipeline ][main] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>8, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50, "pipeline.max_inflight"=>1000, "pipeline.sources"=>["/logstash-8.17.2/input.conf"], :thread=>"#<Thread:0x6381902d /logstash-8.17.2/logstash-core/lib/logstash/java_pipeline.rb:138 run>"}
    [2025-11-24T13:41:49,244][INFO ][logstash.javapipeline ][main] Pipeline Java execution initialization time {"seconds"=>0.91}
    [2025-11-24T13:41:49,270][INFO ][logstash.javapipeline ][main] Pipeline started {"pipeline.id"=>"main"}
    [2025-11-24T13:41:49,275][INFO ][filewatch.observingtail ][main][68ac931e1786c2696b752503a1676572958c90062ac0ffd25b57534347010225] START, creating Discoverer, Watch with file and sincedb collections
    [2025-11-24T13:41:49,286][INFO ][logstash.agent ] Pipelines running {:count=>1, :running_pipelines=>[:main], :non_running_pipelines=>}

  2. No index is getting created (curl localhost:9200/demo-index001/_search?pretty=true)

{"error" : {"root_cause" : [{"type" : "index_not_found_exception","reason" : "no such index [demo-index001]","resource.type" : "index_or_alias","resource.id" : "demo-index001","index_uuid" : "na","index" : "demo-index001"}],"type" : "index_not_found_exception","reason" : "no such index [demo-index001]","resource.type" : "index_or_alias","resource.id" : "demo-index001","index_uuid" : "na","index" : "demo-index001"},"status" : 404}

If you don't specify username/pass in the connection string. you will access to ES as built-in logstash_system user (Elastic Jedi Order will correct me) which is visible in Kibana/ES.

"index_not_found_exception","reason" : "no such index [demo-index001]"

If you don't have created that index, you should:

  • manually create index&template
  • use power user, such as elastic or which is created by yourself in Kibana, with create_index privileges. I wouldn't assign that role to logstash_system
1 Like

Thanks for the inputs @Rios but tweaking the input.conf and using a different json filed helped but currently facing _jsonparsefailure error

input.conf

input {
file {
path => ["<path>/savedqueries001.json"]
start_position => "beginning"
sincedb_path => "NUL"

}
}

filter {
json { source => "message" }

mutate {
remove_field => ["@timestamp", "path", "host", "@version","[event][original]"]
}

}

output {
stdout {
codec => rubydebug
}

elasticsearch {
hosts => ["http://localhost:9200"]
data_stream => "false"
index => "demo-index001"

}
}
json file

{
"items": [
{
"id": "4kjba",
"description": "osquery run",
"displayName": "osquery",
"compartmentId": "owsda",
"timeCreated": "2025-10-23T12:48:31.397Z",
"timeUpdated": "2025-10-23T12:48:31.397Z",
"lifecycleState": "ACTIVE",
"locks": null,
"lifecycleDetails": null,
"freeformTags": {},
"definedTags": {
"Oracle-Tags": {
"CreatedBy": "mancom",
"CreatedOn": "2025-10-23T12:48:31.252Z"
}
},
"systemTags": {}
},
{
"id": "ibyvq",
"description": null,
"displayName": "Memory Usage",
"query": "SELECT * FROM table;",
"compartmentId": "piwca",
"timeCreated": "2024-08-03T10:08:54.555Z",
"timeUpdated": "2024-08-03T10:08:54.555Z",
"lifecycleState": "ACTIVE",
"locks": null,
"lifecycleDetails": null,
"freeformTags": {},
"definedTags": {
"Oracle-Tags": {
"CreatedBy": "gun",
"CreatedOn": "2024-08-03T10:08:54.467Z"
}
},
"systemTags": {}
},
{
"id": "tmjaqma",
"description": null,
"displayName": "Process Open Sockets",
"compartmentId": "iwca",
"timeCreated": "2024-08-03T09:59:35.843Z",
"timeUpdated": "2024-08-03T09:59:35.843Z",
"lifecycleState": "ACTIVE",
"locks": null,
"lifecycleDetails": null,
"freeformTags": {},
"definedTags": {
"Oracle-Tags": {
"CreatedBy": "pra-com",
"CreatedOn": "2024-08-03T09:59:35.568Z"
}
},
"systemTags": {}
}
],
"locks": null
}

error from console

[2025-11-24T19:26:11,738][WARN ][logstash.filters.json ][main][a68c1c4c2d95cb646a9dada960cc34bb73fe5ff2933a274b4e1f8ba616a82aa2] Error parsing json {:source=>"message", :raw=>" "timeCreated": "2025-10-23T12:48:31.397Z",\r", :exception=>#<LogStash::Json::ParserError: Unexpected character (':' (code 58)): expected a valid value (JSON String, Number, Array, Object or token 'null', 'true' or 'false')
at [Source: REDACTED (StreamReadFeature.INCLUDE_SOURCE_IN_LOCATION disabled); line: 1, column: 27]>}
[2025-11-24T19:26:11,738][WARN ][logstash.filters.json ][main][a68c1c4c2d95cb646a9dada960cc34bb73fe5ff2933a274b4e1f8ba616a82aa2] Error parsing json {:source=>"message", :raw=>" "compartmentId": "owsda",\r", :exception=>#<LogStash::Json::ParserError: Unexpected character (':' (code 58)): expected a valid value (JSON String, Number, Array, Object or token 'null', 'true' or 'false')
at [Source: REDACTED (StreamReadFeature.INCLUDE_SOURCE_IN_LOCATION disabled); line: 1, column: 29]>}
[2025-11-24T19:26:11,743][WARN ][logstash.filters.json ][main][a68c1c4c2d95cb646a9dada960cc34bb73fe5ff2933a274b4e1f8ba616a82aa2] Error parsing json {:source=>"message", :raw=>" "description": "osquery run",\r", :exception=>#<LogStash::Json::ParserError: Unexpected character (':' (code 58)): expected a valid value (JSON String, Number, Array, Object or token 'null', 'true' or 'false')
at [Source: REDACTED (StreamReadFeature.INCLUDE_SOURCE_IN_LOCATION disabled); line: 1, column: 27]>}

..

..

..

{
"event" => {},
"log" => {
"file" => {
"path" => "/savedqueries001.json"
}
},
"message" => " {\r",
"tags" => [
[0] "_jsonparsefailure"
]
}
{
"event" => {},
"log" => {
"file" => {
"path" => "/savedqueries001.json"
}
},
"message" => " "locks": null,\r",
"tags" => [
[0] "_jsonparsefailure"
]
}
{
"event" => {},
"log" => {
"file" => {
"path" => "/savedqueries001.json"
}
},
"message" => " },\r",
"tags" => [
[0] "_jsonparsefailure"
]
}
{
"event" => {},
"log" => {
"file" => {
"path" => "/savedqueries001.json"
}
},
"message" => " "compartmentId": "piwca",\r",
"tags" => [
[0] "_jsonparsefailure"
]
}
{
"event" => {},
"log" => {
"file" => {
"path" => "../savedqueries001.json"
}
},
"message" => " "Oracle-Tags": {\r",
"tags" => [
[0] "_jsonparsefailure"
]
}
{
"event" => {},
"log" => {
"file" => {
"path" => "/savedqueries001.json"
}
},
"message" => " "id": "tmjaqma",\r",
"tags" => [
[0] "_jsonparsefailure"
]
}

Desired output

{
id: 4kjba,
description: osquery run,
displayName: osquery,
compartmentId: owsda,
timeCreated: 2025-10-23T12:48:31.397Z,
timeUpdated: 2025-10-23T12:48:31.397Z,
lifecycleState: ACTIVE,
locks: null,
lifecycleDetails: null,
freeformTags: {},
definedTags: {
Oracle-Tags: {
CreatedBy: mancom,
CreatedOn: 2025-10-23T12:48:31.252Z
}
},
systemTags: {}
},
{
id: ibyvq,
description: null,
displayName: Memory Usage,
query: SELECT * FROM table;,
compartmentId: piwca,
timeCreated: 2024-08-03T10:08:54.555Z,
timeUpdated: 2024-08-03T10:08:54.555Z,
lifecycleState: ACTIVE,
locks: null,
lifecycleDetails: null,
freeformTags: {},
definedTags: {
Oracle-Tags: {
CreatedBy: gun,
CreatedOn: 2024-08-03T10:08:54.467Z
}
},
systemTags: {}
},
{
id: tmjaqma,
description: null,
displayName: Process Open Sockets,
compartmentId: iwca,
timeCreated: 2024-08-03T09:59:35.843Z,
timeUpdated: 2024-08-03T09:59:35.843Z,
lifecycleState: ACTIVE,
locks: null,
lifecycleDetails: null,
freeformTags: {},
definedTags: {
Oracle-Tags: {
CreatedBy: pra-com,
CreatedOn: 2024-08-03T09:59:35.568Z
}
},
systemTags: {}
}

If your JSON object is split across multiple lines you will need to use a multiline code to recombine the lines so that a single event contains the entire JSON object. There is an example of that here.

1 Like

Now, with the below configuration in input.conf, errors ::

"tags" => [ [0] "_jsonparsefailure", [1] "_split_type_failure" ]….
[logstash.filters.split ][main][488f998136b23c7ca8597dc5b5359e271d93ae07ad15217c7514cafaf6ceccf4] Only String and Array types are splittable. field:[@metadata][label] is of type = NilClass

Introduced if [“message“] drop{} in the filter but this also didn’t help and if positioned at begnning, didn’t publish anything in logstash console as would be dropping the whole json.

JSON input file remains same, as shared in earlier posts.

input.conf

input {
file {
path => ["<path>/savedqueries001.json"]
codec => multiline {
pattern => "^Spalanzani"
negate => true
what => "previous"
auto_flush_interval => 1
multiline_tag => ""
}
start_position => "beginning"
sincedb_path => "NUL"
}

}

filter {

json { source => "message" target => "[@metadata][json]" remove_field => [ "message" ] }
ruby {
code => '
json = event.remove("[@metadata][json]")
if json.is_a? Hash
newJson = 

json.each { |k, v|
newJson << v.merge({ "label" => k })
}
event.set("[@metadata][label]", newJson)
end
'
}
split { field => "[@metadata][label]" }
ruby {
code => '
d = event.remove("[@metadata][label]")
if d.is_a? Hash
d.each { |k, v|
event.set(k, v)
}
end
'
}

}

output {
stdout {}

elasticsearch {
hosts => ["http://localhost:9200"]
data_stream => "false"
index => "demo-index001"

}
}
logstash console output

[2025-11-25T10:36:31,064][INFO ][logstash.outputs.elasticsearch][main] Elasticsearch version determined (8.17.2) {:es_version=>8}
[2025-11-25T10:36:31,083][INFO ][logstash.outputs.elasticsearch][main] Using a default mapping template {:es_version=>8, :ecs_compatibility=>:v8}
[2025-11-25T10:36:31,099][INFO ][logstash.javapipeline ][main] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>8, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50, "pipeline.max_inflight"=>1000, "pipeline.sources"=>["/logstash-8.17.2/input.conf"], :thread=>"#<Thread:0x72ce294c /logstash-8.17.2/logstash-core/lib/logstash/java_pipeline.rb:138 run>"}
[2025-11-25T10:36:32,364][INFO ][logstash.javapipeline ][main] Pipeline Java execution initialization time {"seconds"=>1.26}
[2025-11-25T10:36:32,389][INFO ][logstash.javapipeline ][main] Pipeline started {"pipeline.id"=>"main"}
[2025-11-25T10:36:32,391][INFO ][filewatch.observingtail ][main][71a930d51b7b11ba0aaf7a74cea367194e20a9ae217ce78ab759286d06c738e4] START, creating Discoverer, Watch with file and sincedb collections
[2025-11-25T10:36:32,465][INFO ][logstash.agent ] Pipelines running {:count=>1, :running_pipelines=>[:main], :non_running_pipelines=>}
[2025-11-25T10:36:34,161][WARN ][logstash.filters.json ][main][21fb094d5f4848b0e04049621f00b2b0ccb0418890ad953f7c54e4b61f1cf1d3] Error parsing json {:source=>"message", :raw=>"{\r\n "items": [\r\n {\r\n "id": "4kjba",\r\n "description": "osquery run",\r\n "displayName": "osquery",\r\n "compartmentId": "owsda",\r\n "timeCreated": "2025-10-23T12:48:31.397Z",\r\n "timeUpdated": "2025-10-23T12:48:31.397Z",\r\n "lifecycleState": "ACTIVE",\r\n "locks": "NA",\r\n "lifecycleDetails": "NA",\r\n "freeformTags": {},\r\n "definedTags": {\r\n "Oracle-Tags": {\r\n "CreatedBy": "mancom",\r\n "CreatedOn": "2025-10-23T12:48:31.252Z"\r\n }\r\n },\r\n "systemTags": {}\r\n },\r\n {\r\n "id": "ibyvq",\r\n "description": "memory",\r\n "displayName": "Memory Usage",\r\n "query": "SELECT * FROM table;",\r\n "compartmentId": "piwca",\r\n "timeCreated": "2024-08-03T10:08:54.555Z",\r\n "timeUpdated": "2024-08-03T10:08:54.555Z",\r\n "lifecycleState": "ACTIVE",\r\n "locks": "NA",\r\n "lifecycleDetails": "NA",\r\n "freeformTags": {},\r\n "definedTags": {\r\n "Oracle-Tags": {\r\n "CreatedBy": "gun",\r\n "CreatedOn": "2024-08-03T10:08:54.467Z"\r\n }\r\n },\r\n "systemTags": {}\r\n },\r\n {\r\n "id": "tmjaqma",\r\n "description": "sockers",\r\n "displayName": "Process Open Sockets",\r\n "compartmentId": "iwca",\r\n "timeCreated": "2024-08-03T09:59:35.843Z",\r\n "timeUpdated": "2024-08-03T09:59:35.843Z",\r\n "lifecycleState": "ACTIVE",\r\n "locks": "NA",\r\n "lifecycleDetails": "NA",\r\n "freeformTags": {},\r\n "definedTags": {\r\n "Oracle-Tags": {\r\n "CreatedBy": "pra-com",\r\n "CreatedOn": "2024-08-03T09:59:35.568Z"\r\n }\r\n },\r\n "systemTags": {}\r\n }\r\n ],\r\n "locks": "NA"\r", :exception=>#<LogStash::Json::ParserError: Unexpected end-of-input: expected close marker for Object (start marker at [Source: REDACTED (StreamReadFeature.INCLUDE_SOURCE_IN_LOCATION disabled); line: 1, column: 1])
at [Source: REDACTED (StreamReadFeature.INCLUDE_SOURCE_IN_LOCATION disabled); line: 63, column: 1]>}
[2025-11-25T10:36:34,163][WARN ][logstash.filters.split ][main][488f998136b23c7ca8597dc5b5359e271d93ae07ad15217c7514cafaf6ceccf4] Only String and Array types are splittable. field:[@metadata][label] is of type = NilClass
{
"@version" => "1",
"host" => {
"name" => "KJ"
},
"tags" => [
[0] "_jsonparsefailure",
[1] "_split_type_failure"
],
"@timestamp" => 2025-11-25T05:06:34.040339Z,
"message" => "{\r\n "items": [\r\n {\r\n "id": "4kjba",\r\n "description": "osquery run",\r\n "displayName": "osquery",\r\n "compartmentId": "owsda",\r\n "timeCreated": "2025-10-23T12:48:31.397Z",\r\n "timeUpdated": "2025-10-23T12:48:31.397Z",\r\n "lifecycleState": "ACTIVE",\r\n "locks": "NA",\r\n "lifecycleDetails": "NA",\r\n "freeformTags": {},\r\n "definedTags": {\r\n "Oracle-Tags": {\r\n "CreatedBy": "mancom",\r\n "CreatedOn": "2025-10-23T12:48:31.252Z"\r\n }\r\n },\r\n "systemTags": {}\r\n },\r\n {\r\n "id": "ibyvq",\r\n "description": "memory",\r\n "displayName": "Memory Usage",\r\n "query": "SELECT * FROM table;",\r\n "compartmentId": "piwca",\r\n "timeCreated": "2024-08-03T10:08:54.555Z",\r\n "timeUpdated": "2024-08-03T10:08:54.555Z",\r\n "lifecycleState": "ACTIVE",\r\n "locks": "NA",\r\n "lifecycleDetails": "NA",\r\n "freeformTags": {},\r\n "definedTags": {\r\n "Oracle-Tags": {\r\n "CreatedBy": "gun",\r\n "CreatedOn": "2024-08-03T10:08:54.467Z"\r\n }\r\n },\r\n "systemTags": {}\r\n },\r\n {\r\n "id": "tmjaqma",\r\n "description": "sockers",\r\n "displayName": "Process Open Sockets",\r\n "compartmentId": "iwca",\r\n "timeCreated": "2024-08-03T09:59:35.843Z",\r\n "timeUpdated": "2024-08-03T09:59:35.843Z",\r\n "lifecycleState": "ACTIVE",\r\n "locks": "NA",\r\n "lifecycleDetails": "NA",\r\n "freeformTags": {},\r\n "definedTags": {\r\n "Oracle-Tags": {\r\n "CreatedBy": "pra-com",\r\n "CreatedOn": "2024-08-03T09:59:35.568Z"\r\n }\r\n },\r\n "systemTags": {}\r\n }\r\n ],\r\n "locks": "NA"\r",
"log" => {
"file" => {
"path" => "/jsonFiles/savedqueries001.json"
}
},
"event" => {
"original" => "{\r\n "items": [\r\n {\r\n "id": "4kjba",\r\n "description": "osquery run",\r\n "displayName": "osquery",\r\n "compartmentId": "owsda",\r\n "timeCreated": "2025-10-23T12:48:31.397Z",\r\n "timeUpdated": "2025-10-23T12:48:31.397Z",\r\n "lifecycleState": "ACTIVE",\r\n "locks": "NA",\r\n "lifecycleDetails": "NA",\r\n "freeformTags": {},\r\n "definedTags": {\r\n "Oracle-Tags": {\r\n "CreatedBy": "mancom",\r\n "CreatedOn": "2025-10-23T12:48:31.252Z"\r\n }\r\n },\r\n "systemTags": {}\r\n },\r\n {\r\n "id": "ibyvq",\r\n "description": "memory",\r\n "displayName": "Memory Usage",\r\n "query": "SELECT * FROM table;",\r\n "compartmentId": "piwca",\r\n "timeCreated": "2024-08-03T10:08:54.555Z",\r\n "timeUpdated": "2024-08-03T10:08:54.555Z",\r\n "lifecycleState": "ACTIVE",\r\n "locks": "NA",\r\n "lifecycleDetails": "NA",\r\n "freeformTags": {},\r\n "definedTags": {\r\n "Oracle-Tags": {\r\n "CreatedBy": "gun",\r\n "CreatedOn": "2024-08-03T10:08:54.467Z"\r\n }\r\n },\r\n "systemTags": {}\r\n },\r\n {\r\n "id": "tmjaqma",\r\n "description": "sockers",\r\n "displayName": "Process Open Sockets",\r\n "compartmentId": "iwca",\r\n "timeCreated": "2024-08-03T09:59:35.843Z",\r\n "timeUpdated": "2024-08-03T09:59:35.843Z",\r\n "lifecycleState": "ACTIVE",\r\n "locks": "NA",\r\n "lifecycleDetails": "NA",\r\n "freeformTags": {},\r\n "definedTags": {\r\n "Oracle-Tags": {\r\n "CreatedBy": "pra-com",\r\n "CreatedOn": "2024-08-03T09:59:35.568Z"\r\n }\r\n },\r\n "systemTags": {}\r\n }\r\n ],\r\n "locks": "NA"\r"
}
}