Create geopoint from lat, lon values in nested json file to be visible in Kibana maps

Hi all,
I have the following json file where there are different locations with their own coordinates.

{
    "A001": {
        "X": 503744.7,
        "Y": 4726339.0,
        "Z": 458.84,
        "LON": -2.954286956913572,
        "LAT": 42.68952475979137,
        "dates": [
            "2015-01-01",
            "2015-01-02",
            "2015-01-03",
            "2015-01-04",
            "2015-01-05",
            "2015-01-06"            
        ],
        "values": [
            "56.9",
            "49.7",
            "48.1",
            "37.1",
            "34.4",
            "35.9"         
        ]
    },
    "A002": {
        "X": 607870.5,
        "Y": 4670754.0,
        "Z": 264.83,
        "LON": -1.69378623727067,
        "LAT": 42.18149989583031,
        "dates": [
            "2015-01-01",
            "2015-01-02",
            "2015-01-03",
            "2015-01-04"          
        ],
        "values": [
            "287",
            "231",
            "207",
            "191"
		]	
    },
    "A403": {
        "X": 868708.0,
        "Y": 4709148.0,
        "Z": 849.0,
        "LON": 1.483146867002623,
        "LAT": 42.44694604132231,
        "dates": [
            "2015-01-01",
            "2015-01-02",
            "2015-01-03",
            "2015-01-04",
            "2015-01-05",
            "2015-01-06",
            "2015-01-07",
            "2015-01-08",
            "2015-01-09"            
        ],
        "values": [
            "2.296",
            "7.033",
            "2.298",
            "2.275",
            "7.207",
            "5.456",
            "4.794",
            "4.24",
            "4.748"
        ]
    }
}

I'm able to parse it in ES and what I would like to do is to create the corresponding geopoint to show these locations and their values in the Kibana dashboard.

I created the index using the Dev Tools in Kibana

PUT geo_data_json
{
  "mappings": {
      "properties": {
        "location": {
          "type": "geo_point"
        }
      }
  }
}

and this is my Logstash config file:

input {
    file {
        path => "/etc/logstash/json-data.json"
        sincedb_path => "/dev/null"
        start_position => "beginning"
        codec => multiline {
            pattern => "^Notexisting"
            negate => "true"
            what => "previous"
            auto_flush_interval => 1
            multiline_tag => ""
        }
    }
}
filter {
    json {
        source => "message" remove_field => ["message"] remove_field => ["original"]
    }
        mutate {
                add_field => { "location" => "%{LAT},%{LON}" }
        }
}

output {
        elasticsearch {
            hosts => ["https://IP:9200"]
            user => "elastic"
            password => "mypaswd"
            ssl_certificate_verification => false
            index => "geo_data_json"
            document_type => "json"
        }
        stdout {codec => rubydebug}
   }

When I tried to create a map in Kibana it says that the geopoint is empty.
I think I'm doing something wrong in reading the LON, LAT values from the JSON data.

Could you please me explain where is my error?

What does an Elasticsearch document look like from your index? You can view the documents in Discover.

@Nathan_Reese in the following lines you can find the document

{
  "_index": "geo_data_json",
  "_id": "eb8DK38B4Jx7Cgfbr3ng",
  "_version": 1,
  "_score": 1,
  "_ignored": [
    "event.original.keyword"
  ],
  "_source": {
    "@timestamp": "2022-02-24T09:15:36.063137Z",
    "@version": "1",
    "event": {
      "original": "{\n    \"A001\": {\n        \"X\": 503744.7,\n        \"Y\": 4726339.0,\n        \"Z\": 458.84,\n        \"LON\": -2.954286956913572,\n        \"LAT\": 42.68952475979137,\n        \"dates\": [\n            \"2015-01-01\",\n            \"2015-01-02\",\n            \"2015-01-03\",\n            \"2015-01-04\",\n            \"2015-01-05\",\n            \"2015-01-06\"            \n        ],\n        \"values\": [\n            \"56.9\",\n            \"49.7\",\n            \"48.1\",\n            \"37.1\",\n            \"34.4\",\n            \"35.9\"         \n        ]\n    },\n    \"A002\": {\n        \"X\": 607870.5,\n        \"Y\": 4670754.0,\n        \"Z\": 264.83,\n        \"LON\": -1.69378623727067,\n        \"LAT\": 42.18149989583031,\n        \"dates\": [\n            \"2015-01-01\",\n            \"2015-01-02\",\n            \"2015-01-03\",\n            \"2015-01-04\"          \n        ],\n        \"values\": [\n            \"287\",\n            \"231\",\n            \"207\",\n            \"191\"\n\t\t]\t\n    },\n    \"A403\": {\n        \"X\": 868708.0,\n        \"Y\": 4709148.0,\n        \"Z\": 849.0,\n        \"LON\": 1.483146867002623,\n        \"LAT\": 42.44694604132231,\n        \"dates\": [\n            \"2015-01-01\",\n            \"2015-01-02\",\n            \"2015-01-03\",\n            \"2015-01-04\",\n            \"2015-01-05\",\n            \"2015-01-06\",\n            \"2015-01-07\",\n            \"2015-01-08\",\n            \"2015-01-09\"            \n        ],\n        \"values\": [\n            \"2.296\",\n            \"7.033\",\n            \"2.298\",\n            \"2.275\",\n            \"7.207\",\n            \"5.456\",\n            \"4.794\",\n            \"4.24\",\n            \"4.748\"\n        ]\n    }\n}"
    },
    "log": {
      "file": {
        "path": "/etc/logstash/json-data.json"
      }
    },
    "host": {
      "name": "eosao68"
    },
    "A001": {
      "X": 503744.7,
      "values": [
        "56.9",
        "49.7",
        "48.1",
        "37.1",
        "34.4",
        "35.9"
      ],
      "dates": [
        "2015-01-01",
        "2015-01-02",
        "2015-01-03",
        "2015-01-04",
        "2015-01-05",
        "2015-01-06"
      ],
      "Y": 4726339,
      "Z": 458.84,
      "LAT": 42.68952475979137,
      "LON": -2.954286956913572
    },
    "A002": {
      "X": 607870.5,
      "values": [
        "287",
        "231",
        "207",
        "191"
      ],
      "dates": [
        "2015-01-01",
        "2015-01-02",
        "2015-01-03",
        "2015-01-04"
      ],
      "Y": 4670754,
      "Z": 264.83,
      "LAT": 42.18149989583031,
      "LON": -1.69378623727067
    },
    "A403": {
      "X": 868708,
      "values": [
        "2.296",
        "7.033",
        "2.298",
        "2.275",
        "7.207",
        "5.456",
        "4.794",
        "4.24",
        "4.748"
      ],
      "dates": [
        "2015-01-01",
        "2015-01-02",
        "2015-01-03",
        "2015-01-04",
        "2015-01-05",
        "2015-01-06",
        "2015-01-07",
        "2015-01-08",
        "2015-01-09"
      ],
      "Y": 4709148,
      "Z": 849,
      "LAT": 42.44694604132231,
      "LON": 1.483146867002623
    }
  },
  "fields": {
    "A403.values.keyword": [
      "2.296",
      "7.033",
      "2.298",
      "2.275",
      "7.207",
      "5.456",
      "4.794",
      "4.24",
      "4.748"
    ],
    "A002.values.keyword": [
      "287",
      "231",
      "207",
      "191"
    ],
    "A001.LON": [
      -2.954287
    ],
    "host.name.keyword": [
      "eosao68"
    ],
    "A001.values": [
      "56.9",
      "49.7",
      "48.1",
      "37.1",
      "34.4",
      "35.9"
    ],
    "A403.values": [
      "2.296",
      "7.033",
      "2.298",
      "2.275",
      "7.207",
      "5.456",
      "4.794",
      "4.24",
      "4.748"
    ],
    "A001.dates": [
      "2015-01-01T00:00:00.000Z",
      "2015-01-02T00:00:00.000Z",
      "2015-01-03T00:00:00.000Z",
      "2015-01-04T00:00:00.000Z",
      "2015-01-05T00:00:00.000Z",
      "2015-01-06T00:00:00.000Z"
    ],
    "A002.Z": [
      264.83
    ],
    "A002.LON": [
      -1.6937863
    ],
    "@version": [
      "1"
    ],
    "A001.Z": [
      458.84
    ],
    "A002.Y": [
      4670754
    ],
    "A403.LAT": [
      42.446945
    ],
    "host.name": [
      "eosao68"
    ],
    "A403.Z": [
      849
    ],
    "log.file.path.keyword": [
      "/etc/logstash/json-data.json"
    ],
    "A001.Y": [
      4726339
    ],
    "A002.X": [
      607870.5
    ],
    "A403.X": [
      868708
    ],
    "A001.X": [
      503744.7
    ],
    "A403.Y": [
      4709148
    ],
    "event.original": [
      "{\n    \"A001\": {\n        \"X\": 503744.7,\n        \"Y\": 4726339.0,\n        \"Z\": 458.84,\n        \"LON\": -2.954286956913572,\n        \"LAT\": 42.68952475979137,\n        \"dates\": [\n            \"2015-01-01\",\n            \"2015-01-02\",\n            \"2015-01-03\",\n            \"2015-01-04\",\n            \"2015-01-05\",\n            \"2015-01-06\"            \n        ],\n        \"values\": [\n            \"56.9\",\n            \"49.7\",\n            \"48.1\",\n            \"37.1\",\n            \"34.4\",\n            \"35.9\"         \n        ]\n    },\n    \"A002\": {\n        \"X\": 607870.5,\n        \"Y\": 4670754.0,\n        \"Z\": 264.83,\n        \"LON\": -1.69378623727067,\n        \"LAT\": 42.18149989583031,\n        \"dates\": [\n            \"2015-01-01\",\n            \"2015-01-02\",\n            \"2015-01-03\",\n            \"2015-01-04\"          \n        ],\n        \"values\": [\n            \"287\",\n            \"231\",\n            \"207\",\n            \"191\"\n\t\t]\t\n    },\n    \"A403\": {\n        \"X\": 868708.0,\n        \"Y\": 4709148.0,\n        \"Z\": 849.0,\n        \"LON\": 1.483146867002623,\n        \"LAT\": 42.44694604132231,\n        \"dates\": [\n            \"2015-01-01\",\n            \"2015-01-02\",\n            \"2015-01-03\",\n            \"2015-01-04\",\n            \"2015-01-05\",\n            \"2015-01-06\",\n            \"2015-01-07\",\n            \"2015-01-08\",\n            \"2015-01-09\"            \n        ],\n        \"values\": [\n            \"2.296\",\n            \"7.033\",\n            \"2.298\",\n            \"2.275\",\n            \"7.207\",\n            \"5.456\",\n            \"4.794\",\n            \"4.24\",\n            \"4.748\"\n        ]\n    }\n}"
    ],
    "A001.LAT": [
      42.689526
    ],
    "@version.keyword": [
      "1"
    ],
    "A403.LON": [
      1.4831469
    ],
    "A002.dates": [
      "2015-01-01T00:00:00.000Z",
      "2015-01-02T00:00:00.000Z",
      "2015-01-03T00:00:00.000Z",
      "2015-01-04T00:00:00.000Z"
    ],
    "A001.values.keyword": [
      "56.9",
      "49.7",
      "48.1",
      "37.1",
      "34.4",
      "35.9"
    ],
    "@timestamp": [
      "2022-02-24T09:15:36.063Z"
    ],
    "A002.LAT": [
      42.1815
    ],
    "A002.values": [
      "287",
      "231",
      "207",
      "191"
    ],
    "log.file.path": [
      "/etc/logstash/json-data.json"
    ],
    "A403.dates": [
      "2015-01-01T00:00:00.000Z",
      "2015-01-02T00:00:00.000Z",
      "2015-01-03T00:00:00.000Z",
      "2015-01-04T00:00:00.000Z",
      "2015-01-05T00:00:00.000Z",
      "2015-01-06T00:00:00.000Z",
      "2015-01-07T00:00:00.000Z",
      "2015-01-08T00:00:00.000Z",
      "2015-01-09T00:00:00.000Z"
    ]
  },
  "ignored_field_values": {
    "event.original.keyword": [
      "{\n    \"A001\": {\n        \"X\": 503744.7,\n        \"Y\": 4726339.0,\n        \"Z\": 458.84,\n        \"LON\": -2.954286956913572,\n        \"LAT\": 42.68952475979137,\n        \"dates\": [\n            \"2015-01-01\",\n            \"2015-01-02\",\n            \"2015-01-03\",\n            \"2015-01-04\",\n            \"2015-01-05\",\n            \"2015-01-06\"            \n        ],\n        \"values\": [\n            \"56.9\",\n            \"49.7\",\n            \"48.1\",\n            \"37.1\",\n            \"34.4\",\n            \"35.9\"         \n        ]\n    },\n    \"A002\": {\n        \"X\": 607870.5,\n        \"Y\": 4670754.0,\n        \"Z\": 264.83,\n        \"LON\": -1.69378623727067,\n        \"LAT\": 42.18149989583031,\n        \"dates\": [\n            \"2015-01-01\",\n            \"2015-01-02\",\n            \"2015-01-03\",\n            \"2015-01-04\"          \n        ],\n        \"values\": [\n            \"287\",\n            \"231\",\n            \"207\",\n            \"191\"\n\t\t]\t\n    },\n    \"A403\": {\n        \"X\": 868708.0,\n        \"Y\": 4709148.0,\n        \"Z\": 849.0,\n        \"LON\": 1.483146867002623,\n        \"LAT\": 42.44694604132231,\n        \"dates\": [\n            \"2015-01-01\",\n            \"2015-01-02\",\n            \"2015-01-03\",\n            \"2015-01-04\",\n            \"2015-01-05\",\n            \"2015-01-06\",\n            \"2015-01-07\",\n            \"2015-01-08\",\n            \"2015-01-09\"            \n        ],\n        \"values\": [\n            \"2.296\",\n            \"7.033\",\n            \"2.298\",\n            \"2.275\",\n            \"7.207\",\n            \"5.456\",\n            \"4.794\",\n            \"4.24\",\n            \"4.748\"\n        ]\n    }\n}"
    ]
  }
}

My main doubt is how to parse in logstash conf file the lat/long values with respect to the corresponding string name (A001, A002, A403) of the stations which changes within the JSON to create the geopoint

thanks

Can you explain your use case? Why are you structuring your documents as such? Encoding data in key names makes documents that are difficult to work with.

I would recommend splitting your document into a separate document per station so that

{
    "A001": {
        "X": 503744.7,
        "Y": 4726339.0,
        "Z": 458.84,
        "LON": -2.954286956913572,
        "LAT": 42.68952475979137,
        "dates": [
            "2015-01-01",
            "2015-01-02",
            "2015-01-03",
            "2015-01-04",
            "2015-01-05",
            "2015-01-06"            
        ],
        "values": [
            "56.9",
            "49.7",
            "48.1",
            "37.1",
            "34.4",
            "35.9"         
        ]
    },
    "A002": {
        "X": 607870.5,
        "Y": 4670754.0,
        "Z": 264.83,
        "LON": -1.69378623727067,
        "LAT": 42.18149989583031,
        "dates": [
            "2015-01-01",
            "2015-01-02",
            "2015-01-03",
            "2015-01-04"          
        ],
        "values": [
            "287",
            "231",
            "207",
            "191"
		]	
    },
    "A403": {
        "X": 868708.0,
        "Y": 4709148.0,
        "Z": 849.0,
        "LON": 1.483146867002623,
        "LAT": 42.44694604132231,
        "dates": [
            "2015-01-01",
            "2015-01-02",
            "2015-01-03",
            "2015-01-04",
            "2015-01-05",
            "2015-01-06",
            "2015-01-07",
            "2015-01-08",
            "2015-01-09"            
        ],
        "values": [
            "2.296",
            "7.033",
            "2.298",
            "2.275",
            "7.207",
            "5.456",
            "4.794",
            "4.24",
            "4.748"
        ]
    }
}

would become

 {
        "station": "A001",
        "X": 503744.7,
        "Y": 4726339.0,
        "Z": 458.84,
        "LON": -2.954286956913572,
        "LAT": 42.68952475979137,
        "dates": [
            "2015-01-01",
            "2015-01-02",
            "2015-01-03",
            "2015-01-04",
            "2015-01-05",
            "2015-01-06"            
        ],
        "values": [
            "56.9",
            "49.7",
            "48.1",
            "37.1",
            "34.4",
            "35.9"         
        ]
    }


{
        "station": "A002",
        "X": 607870.5,
        "Y": 4670754.0,
        "Z": 264.83,
        "LON": -1.69378623727067,
        "LAT": 42.18149989583031,
        "dates": [
            "2015-01-01",
            "2015-01-02",
            "2015-01-03",
            "2015-01-04"          
        ],
        "values": [
            "287",
            "231",
            "207",
            "191"
		]	
    }

@Nathan_Reese if I would follow your suggestion, how can I have the different geopoints for the various station?
should I use a loop in the conf file?

should I use a loop in the conf file?

Yes, you can use ruby filter plugin to write ruby code in your conf file. Then you can use ruby each operator to iterator over each key in your object and output a separate object for each key.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.