Not able to parse geojson data in logstash

This is the config I am using.

input {
  file {
    path => "D:/Softwares/ELK/data/geojson/features.geojson"
    start_position => "beginning"
	sincedb_path => "D:/Softwares/ELK/data/cache/geojsontry.txt"
	codec => multiline { 
		pattern => "^}" 
		negate => true 
		what => previous 
		auto_flush_interval => 1 
		multiline_tag => "" 
	}
  }
}

filter {
	json {
		source => "message"
	}
	json {
		source => "features"
		remove_field => [ "message" , "type", "crs"]
	}
}

output {
	elasticsearch {
		hosts => ["http://localhost:9200"]
		index => "geojson_index"
		manage_template => false
  }
   stdout{ codec => rubydebug }
}

This is the sample geojson file

{
  "type" : "FeatureCollection",
  "crs" : {
    "type" : "name",
    "properties" : {
      "name" : "urn:ogc:def:crs:EPSG::4326"
    }
  },
  "features" : [ {
    "type" : "Feature",
    "geometry" : {
      "type" : "Polygon",
      "coordinates" : [ [ [ -111.73542843574079, 57.32694412094532 ], [ -111.73542843571491, 57.32694474950013 ], [ -111.7354259456222, 57.326944749470115 ], [ -111.73542594564813, 57.3269441209153 ], [ -111.73542843574079, 57.32694412094532 ] ] ]
    },
    "properties" : {
      "aabb" : "{ \"max\": [ 91.472427, 0.034990, 49.527629 ],\"min\": [ 91.322427, -0.035010, 49.507629 ]}",
      "minZ" : 49.50828122254461,
      "maxZ" : 49.52828122254461
    }
  }, {
    "type" : "Feature",
    "geometry" : {
      "type" : "Polygon",
      "coordinates" : [ [ [ -111.73542885630422, 57.32694308827081 ], [ -111.73542885619332, 57.3269457821758 ], [ -111.73542552514651, 57.32694578213565 ], [ -111.73542552525765, 57.32694308823064 ], [ -111.73542885630422, 57.32694308827081 ] ] ]
    },
    "properties" : {
      "aabb" : "{ \"max\": [ 91.497756, 0.149995, 49.753795 ],\"min\": [ 91.297098, -0.150016, 49.692689 ]}",
      "minZ" : 49.69334086030722,
      "maxZ" : 49.75444686030721
    }
  }, {
    "type" : "Feature",
    "geometry" : {
      "type" : "Polygon",
      "coordinates" : [ [ [ -111.73542885626378, 57.32694308827077 ], [ -111.73542885615288, 57.326945782175855 ], [ -111.73542552510598, 57.326945782135674 ], [ -111.73542552521711, 57.32694308823061 ], [ -111.73542885626378, 57.32694308827077 ] ] ]
    },
    "properties" : {
      "aabb" : "{ \"max\": [ 91.497756, 0.149995, 49.583200 ],\"min\": [ 91.297098, -0.150016, 49.522095 ]}",
      "minZ" : 49.522746860049665,
      "maxZ" : 49.58385186004966
    }
  }, {
    "type" : "Feature",
    "geometry" : {
      "type" : "Polygon",
      "coordinates" : [ [ [ -111.73542911393673, 57.32694272909072 ], [ -111.73542911379629, 57.3269461413621 ], [ -111.73542526743296, 57.32694614131573 ], [ -111.73542526757376, 57.32694272904434 ], [ -111.73542911393673, 57.32694272909072 ] ] ]
    },
    "properties" : {
      "aabb" : "{ \"max\": [ 91.513277, 0.189996, 49.637951 ],\"min\": [ 91.281577, -0.190017, 49.522095 ]}",
      "minZ" : 49.52274664118886,
      "maxZ" : 49.63860264118886
    }
  } ]
}

I want to index only coordinates, maxZ and minZ into elasticSearch.

I am getting following parsing error.

:exception=>#<LogStash::Json::ParserError: Unexpected close marker '}': expected ']' (for root starting at [Source: (byte[])"}

Any help in this regard is highly appreciated.

Hi @aaryan. Is this a one-time import of the geojson file? If so, I would suggest looking for an alternative to Logstash. Elastic Maps in Kibana has an Upload feature that works well for files under 1GB.

The GDAL library can also upload geojson files into Elasticsearch.

1 Like

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.