Ingest RESTAPI response into Elasticsearch as separate document through Logstash

I am working http_poller and using http plugin to ingest RestApi Array response output into Elasticsearch.
using Logstash , I need help to split the output and store each as a separate document in Elasticsearch.

Below is my configuration, I tried using split inside filter plugin but couldn't get it.

input {
	exec {
        id => "change_id"
        command => "curl --location 'http://10.2.2.20:8080/api/sys/login' --header 'authString: a' --header 'Content-Type: application/x-www-form-urlencoded' --header 'Cookie: u4kFLJTYA4X0gjrh4SQaMUKUq8TeB3wpmXYyQmRDmV6Xe5Wh6AFXMnKJbqteWoMOaT7zJbZ3KyUklI5U7ImM=&AAAAAAU4vLkpdx3qGPqZAvuWeJUERgJUaa9fgyzMGd_00ObS9sT7wtP51jDMcZCZKUOkR1KKuIwx20EMB0iTf115ayK5&' --data-urlencode 'username=admin' --data-urlencode 'password=test123'"
	    interval => 6000
		#codec => "json"
		type => "restapi"
        
        add_field => {
       
            "[service][environment]" => "QA"
            "[service][type]" => "testApi"
            "[service][name]" => "api Service"
            "[event][dataset]" => "restapi"
            "[agent][name]" => "logstash"
            "[metricset][type]" => "restapi"
            "[metricset][name]" => "change management"
        }
    }
}

filter {

  
    mutate {
        rename => {
            "message" => "[@metadata][request][token]"
        }
        capitalize => [ "[host][name]" ]
        #copy => {
        #    "@timestamp" => "[event][start]"
        #}
        add_field => {
            "[event][start]" => "%{+yyyy-MM-dd}T%{+HH:mm:ss.SSS}Z"
        }
    }



    http {
        verb => "GET"
        url => "http://10.2.2.20:8080/api/neow/v1.0/entry/infrachange%20Change?fields=values(Request%20ID,Submit%20Date,Change%20Request%20Status)"
        headers => {
            "Authorization" => "%{[@metadata][request][token]}"
        }
        request_timeout => 9
        body_format => "json"
        target_body => "response.body"
        target_headers => "response.headers"
        
           
    }



    
    mutate {
        rename => { "[response.body][_links][self][0]" => "[response.body][values][url]" }
    }





 

    mutate { remove_field => [ "process" , "response.headers", "@metadata", "[response.body][_links]" ] }

    

    mutate {
        add_field => {
            "[event][end]" => "%{+yyyy-MM-dd}T%{+HH:mm:ss.SSS}Z"
        }
    }
}

output {		
   elasticsearch {
        id => "restapi-main"
        hosts => ["https://elastic.local.com:9200"]
        #ssl => true
        #cacert => "/usr/share/logstash/certs/http_ca.crt"
        cacert => '/etc/logstash/config/certs/ca.crt'
        user => "elastic"
        password => "xyz123"
	    index => "restapi-data-%{+YYYY-MM-dd}"
	    document_id => "%{[@metadata][document_id]}"
        action => "index"
	}
    stdout {
        codec => rubydebug {
            metadata => true
        }
    }
}

Below is the logstash output

{
            "agent" => {
        "name" => "logstash"
    },
       "@timestamp" => 2023-03-20T22:38:46.174870412Z,
          "service" => {
        "environment" => "QA",
               "name" => "test",
               "type" => "restApi System"
    },
         "@version" => "1",
             "host" => {
        "name" => "elastic.local.com"
    },
    "response.body" => {
        "entries" => [
            [   0] {
                "_links" => {
                    "self" => [
                        [0] {
                            "href" => "http://10.2.2.20:8080/api/neow/v1.0/entry/infrachange%20Change/CRQ00000000000"
                        }
                    ]
                },
                "values" => {
                              "Submit Date" => "2013-09-19T14:57:27.000+0000",
                               "Request ID" => "CRQ0000000000000",
                    "Change Request Status" => "Cancelled"
                }
            },
            [   1] {
                "_links" => {
                    "self" => [
                        [0] {
                            "href" => "http://10.2.2.20:8080/api/neow/v1.0/entry/infrachange%20Change/CRQ00000000001"
                        }
                    ]
                },
                "values" => {
                              "Submit Date" => "2012-12-13T20:07:31.000+0000",
                               "Request ID" => "CRQ0000000000001",
                    "Change Request Status" => "Cancelled"
                }
            },```

Please help me. Thanks

You need to use the split filter on the response field.

But you have another issue, using field.nested in Logstash creates a field with a dot in the name, not an json object with a nested field, this can lead to confusion since in elasticsearch and kibana they will look the same in most places.

I would suggest that you change all references to fields with a dot, to the supported Logstash format, which is [field][nested].

For example, change response.body to [response][body].

Then after that you should use the split filter on the field [response][body][entries].

split {
    field => "[response][body][entries]"
}

This will create one document for each item in the [response][body][entries] array.

Awsome, IT really works, Thank you.

 body_format => "json"
        target_body => "[response][body]"
        target_headers => "[response][headers]"
        
           
    }

    split {
        field => "[response][body][entries]"
    }



    
    mutate {
        rename => { "[response][body][_links][self][0]" => "[response][body][values][url]" }
    }

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.