Need to populate geo location based on US State name column using logstash,es and kibi

I have a field with the name of USA states, I need to add another field which should have the geo location for that state. So, that I can use the Tile map in Kibi based on these newly generated Geo location.
My Logstash file looks like this...

input {
jdbc {
jdbc_driver_library => "My_path\ojdbc6.jar"
jdbc_driver_class => "Java::oracle.jdbc.driver.OracleDriver"
jdbc_connection_string => "My_string"
jdbc_user => "myuser"
jdbc_password => "pwd"
statement => "SELECT state_name from table1"

type => "string"
schedule => "* * * * *"
}
}

filter {
mutate {
convert => {
"state_name" => "string"
}
}
}

output {
elasticsearch {
hosts => "http://localhost:9220"
action => "index"
index => "logstash-testxx"
workers => 1
template => "my_path\elasticsearch-template-es2x.json"
manage_template => true
template_overwrite => true
}
stdout {}
}

Construct a text file (e.g. a CSV) that maps state names to lat/lon values, then use the translate filter to look up the state name field.

The issue still persists. However, I tried to use the 'if else' condition to manually hardcode the 'lat and long' position for each state in the config file. It adds a new column but doesn't recognizes it as a geo_point column.

ES needs to be configured via an index template to map a particular field as geo_point. Logstash's default index template contains one geo_point field, so either use that field name or hack the index template to suit your needs.

I tried using the field location but still not able to find the solution...
I just need to add lat and lang position in a newly added field based on the existing "clnt_nm" so that KIBI can recognize those newly added fields as GEO_POINT fields.......

here is the copy of my logstash config file

input {
jdbc {
jdbc_driver_library => "my_pathforojdbc6.jar"
jdbc_driver_class => "Java::oracle.jdbc.driver.OracleDriver"
jdbc_connection_string => "jdbc:oracle:thin:@mystring"
jdbc_user => "xxxx"
jdbc_password => "xxx"
statement => "SELECT clnt_nm from table1"
type => "string"
schedule => "* * * * *"
}
}

filter {

if [clnt_nm] == "CA" {
mutate { add_field => { "location" => "california" } } }
else {
mutate { add_field => { "location" => " " } }
}

if [client_name] == "CA" {
mutate { add_field => { "lat" => "36.7783" } } }
else {
mutate { add_field => { "lat" => "0" } }
}
}
mutate {
convert => {
"lat" => "float"
"CLNT_NM" => "string"

}
}
mutate { rename => {"lat" => "[location][lat]"} }
}

output {
elasticsearch {
hosts => "http://localhost:9220"
action => "index"
index => "logstash-testing_4"
workers => 1
template => "my_templatefile"
manage_template => true
template_overwrite => true
}
stdout {}
}

What does the resulting document look like in ES? Copy/paste from the JSON tab in Kibana's Discover view (or equivalent; I want to see the raw JSON document). Also, what do the mappings for your index look like? Use ES's get mapping API.

Are you talking about this ?

{
"index": "logstash-testing_4*",
"query": {
"query_string": {
"analyze_wildcard": true,
"query": ""
}
},
"filter": [],
"highlight": {
"pre_tags": [
"@kibana-highlighted-field@"
],
"post_tags": [
"@/kibana-highlighted-field@"
],
"fields": {
"
": {}
},
"require_field_match": false,
"fragment_size": 2147483647
}
}

No, that's the query. I'm talking about the document itself. What are you sending to ES?

Here is the .json template file

{
"template" : "",
"settings" : {
"index.refresh_interval" : "5s"
},
"mappings" : {
"default" : {
"_all" : {"enabled" : true, "omit_norms" : true},
"dynamic_templates" : [ {
"message_field" : {
"path_match" : "message",
"match_mapping_type" : "string",
"mapping" : {
"type" : "string", "index" : "not_analyzed", "omit_norms" : true,
"fielddata" : { "format" : "disabled" }
}
}
}, {
"string_fields" : {
"match" : "
",
"match_mapping_type" : "string",
"mapping" : {
"type" : "string", "index" : "not_analyzed", "omit_norms" : true,
"fielddata" : { "format" : "disabled" },
"fields" : {
"raw" : {"type": "string", "index" : "not_analyzed", "doc_values" : true, "ignore_above" : 256}
}
}
}
}, {
"float_fields" : {
"match" : "",
"match_mapping_type" : "float",
"mapping" : { "type" : "float", "doc_values" : true }
}
}, {
"double_fields" : {
"match" : "
",
"match_mapping_type" : "double",
"mapping" : { "type" : "double", "doc_values" : true }
}
}, {
"byte_fields" : {
"match" : "",
"match_mapping_type" : "byte",
"mapping" : { "type" : "byte", "doc_values" : true }
}
}, {
"short_fields" : {
"match" : "
",
"match_mapping_type" : "short",
"mapping" : { "type" : "short", "doc_values" : true }
}
}, {
"integer_fields" : {
"match" : "",
"match_mapping_type" : "integer",
"mapping" : { "type" : "integer", "doc_values" : true }
}
}, {
"long_fields" : {
"match" : "
",
"match_mapping_type" : "long",
"mapping" : { "type" : "long", "doc_values" : true }
}
}, {
"date_fields" : {
"match" : "",
"match_mapping_type" : "date",
"mapping" : { "type" : "date", "doc_values" : true }
}
}, {
"geo_point_fields" : {
"match" : "
",
"match_mapping_type" : "geo_point",
"mapping" : { "type" : "geo_point", "doc_values" : true }
}
} ],
"properties" : {
"@timestamp": { "type": "date", "doc_values" : true },
"@version": { "type": "string", "index": "not_analyzed", "doc_values" : true },
"geoip" : {
"type" : "object",
"dynamic": true,
"properties" : {
"ip": { "type": "ip", "doc_values" : true },
"location" : { "type" : "geo_point", "doc_values" : true },
"latitude" : { "type" : "float", "doc_values" : true },
"longitude" : { "type" : "float", "doc_values" : true }
}
}
}
}
}
}

No, that's not what I'm asking for! I want to see the log events the Logstash is sending based on what it's fetching from the database.

input {
jdbc {
jdbc_driver_library => "C:\oracle\product\11.2.0\client_64\ojdbc6.jar"
jdbc_driver_class => "Java::oracle.jdbc.driver.OracleDriver"
jdbc_connection_string => "jdbc:oracle:thin:@xxxxxx:0000/xxxxxx"
jdbc_user => "ssss"
jdbc_password => "xxxxxxxx"
statement => "SELECT col_1,col_2,col_3 from table_1
"
schedule => "* * * * *"
}
}

filter {
mutate {
convert => {
"col_1" => "integer"
}
}

date {
match => [ "CREATE_TS", "YYYY-MM-dd HH:mm:ss.SSS-SS ", "YYYY-MM-dd HH:mm:ss.SS-SS ", "YYYY-MM-dd HH:mm:ss.S-SS " ]
target => "ttimestamp"
}
}

output {
elasticsearch {
hosts => "http://localhost:9220"
action => "index"
index => "logstash-testing"
workers => 1
template => "C:\logstash-5.2.0\vendor\bundle\jruby\1.9\gems\logstash-output-elasticsearch-6.2.4-java\lib\logstash\outputs\elasticsearch\elasticsearch-template-es2x.json"
manage_template => true
template_overwrite => true
}
stdout {}
}

I am not getting what is that you are requesting for, can you please specify the file format you are looking. Like a .json file of a .conf file ?

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.