Load json text in oracle table to elastic search via logstash

Dear Team,
I have data in Oracle table Tbl_user in the below table format. JSON column has the value in the JSON format. I want to insert this data into the Elastic search index with userid as a document ID via logstash. Request your support on , how to achive the same.

User id Json
1001 {user: Adam, age: 30, city: New York}
1002 {user: Eve, age: 25, city: Paris}
1003 {user: John, age: 20, city: Sydney}

Thanks in advance
Karthik

Hi Karthik,

Here is the quick guide how you can achieve this -

  1. You will have to use JDBC logstash input plugin to get data from Oracle table.
  2. Use Elasticsearch output plugin to push data into Elasticsearch.
  3. For data processing like assigning user id to _id you need to use mutate in the filter.

Below are the sample file which you can give a try by adding some changes -

oracle_to_es.conf

input {
  jdbc {
    jdbc_connection_string => "jdbc:oracle:thin:@//your_oracle_host:1521/your_oracle_db"
    jdbc_user => "your_oracle_username"
    jdbc_password => "your_oracle_password"
    jdbc_driver_library => "/path/to/ojdbc8.jar"
    jdbc_driver_class => "Java::oracle.jdbc.driver.OracleDriver"
    statement => "SELECT userid, json FROM Tbl_user"
  }
}

filter {
  json {
    source => "json"
    target => "parsed_json"
  }
  mutate {
    rename => { "userid" => "[@metadata][_id]" }
  }
}

output {
  elasticsearch {
    hosts => ["http://your_elasticsearch_host:9200"]
    index => "your_index_name"
    document_id => "%{[@metadata][_id]}"
    user => "elastic"
    password => "your_elastic_password"
  }
  stdout {
    codec => rubydebug
  }
}

Run

logstash -f oracle_to_es.conf

Thanks Ashish , I will try it and let you know . Thanks for your support

Dear Ashish,
Thank you a lot for your support. Your solution works to import data. However, I forgot to say my requirement clearly. I already have an index with the mentioned field and want to map input JSON into the appropriate field. Please find the existing structure/data and newly inserted record from logstash.
Is there any way to make json fields to index fields rather than getting it as "parsed_json" or "json_content" . Also want to know is it possible in nested table as well.

-- structure
{
"my_index": {
"aliases": {},
"mappings": {
"properties": {
"@timestamp": {
"type": "date"
},
"@version": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"age": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"city": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"user1": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"userid": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
}
}
}

-- data
"hits": [
{
"_index": "my_index",
"_id": "6iXxNJIBnBMzydZlUOYX",
"_score": 1.0,
"_source": {
"userid": "1001",
"user1": "sam",
"city": "tornto",
"@timestamp": "2024-09-27T19:24:42.590670979Z",
"@version": "1",
"age": "32"
}
},

-- newly inserted data via logstash json
{
"_index": "my_index",
"_id": "1005",
"_score": 1.0,
"_source": {
"parsed_json": {
"user": "Eve",
"city": "Paris",
"age": "25"
},
"json_content": "{"user": "Eve", "age": "25", "city": "Paris"}",
"@version": "1",
"@timestamp": "2024-09-27T19:36:13.840690324Z"
}
}

thanks in advance once again.

@ashishtiwari1993 : Can you please help with the above scenario?