Import csv file into elasticsearch using Logstash both version 6.2.2

my csv file like this:
lon,lat,number,street,unit,city,district,region,postcode,id,hash
-149.362137,61.4558721,26101,OLD GLENN HWY,TOWER,,,,99567,,95223943e9d286f2

my conf file:
input {
file {
path => "D:\ak-Copy.csv"
start_position => "beginning"
}
}
filter {
csv {
separator => ","
columns => [ 'lon', 'lat' ,'number','street', 'unit','city','district','region','postcode','id','hash']
}

mutate {
add_field => { "location" => "geo location" }
convert => {"location" => "geo_point"}
convert => {"number" => "integer"}
convert => {"street" => "text"}
convert => {"unit" => "text"}
convert => {"city" => "text"}
convert => {"district" => "text"}
convert => {"region" => "text"}
convert => {"postcode" => "text"}
convert => {"id" => "text"}
convert => {"hash" => "text"}
rename => [ "lon", "[location][lon]", "lat", "[location][lat]" ]
}

}
output {
elasticsearch {
hosts => ["localhost"]
index => "geostore-ak"
document_type => "locality-test"

}
stdout {}
}

I'm getting error like this

[2018-03-02T17:15:22,233][ERROR][logstash.pipeline        ] Error registering plugin {:pipeline_id=>"main", :plugin=>"#<LogStash::FilterDelegator:0x501abfad @metric_events_out=org.jruby.proxy.org.logstash.instrument.metrics.counter.LongCounter$Proxy2 -  name: out value:0, @metric_events_in=org.jruby.proxy.org.logstash.instrument.metrics.counter.LongCounter$Proxy2 -  name: in value:0, @metric_events_time=org.jruby.proxy.org.logstash.instrument.metrics.counter.LongCounter$Proxy2 -  name: duration_in_millis value:0, @id=\"2e6c343e98dfaa49dab3ef4a612cee0c2a8f6a0d208dc3b9111d270b29ffc165\", @klass=LogStash::Filters::Mutate, @metric_events=#<LogStash::Instrument::NamespacedMetric:0x4e470540 @metric=#<LogStash::Instrument::Metric:0x1501a2e1 @collector=#<LogStash::Instrument::Collector:0x794ec7a7 @agent=nil, @metric_store=#<LogStash::Instrument::MetricStore:0x7cacf8f4 @store=#<Concurrent::Map:0x00000000000fb0 entries=3 default_proc=nil>, @structured_lookup_mutex=#<Mutex:0x1dc5bc26>, @fast_lookup=#<Concurrent::Map:0x00000000000fb4 entries=66 default_proc=nil>>>>, @namespace_name=[:stats, :pipelines, :main, :plugins, :filters, :\"2e6c343e98dfaa49dab3ef4a612cee0c2a8f6a0d208dc3b9111d270b29ffc165\", :events]>, @filter=<LogStash::Filters::Mutate add_field=>{\"location\"=>\"geo location\"}, convert=>{\"location\"=>\"geo_point\", \"number\"=>\"integer\", \"street\"=>\"text\", \"unit\"=>\"text\", \"city\"=>\"text\", \"district\"=>\"text\", \"region\"=>\"text\", \"postcode\"=>\"text\", \"id\"=>\"text\", \"hash\"=>\"text\"}, rename=>{\"lon\"=>\"[location][lon]\", \"lat\"=>\"[location][lat]\"}, id=>\"2e6c343e98dfaa49dab3ef4a612cee0c2a8f6a0d208dc3b9111d270b29ffc165\", enable_metric=>true, periodic_flush=>false>>", :error=>"translation missing: en.logstash.agent.configuration.invalid_plugin_register", :thread=>"#<Thread:0x304dfd7f run>"}

[2018-03-02T17:15:22,270][ERROR][logstash.pipeline ] Pipeline aborted due to error {:pipeline_id=>"main", :exception=>#<LogStash::ConfigurationError: translation missing: en.logstash.agent.configuration.invalid_plugin_register>, :backtrace=>["D:/www/logstash-6.2.2/vendor/bundle/jruby/2.3.0/gems/logstash-filter-mutate-3.2.0/lib/logstash/filters/mutate.rb:190:in block in register'", "org/jruby/RubyHash.java:1343:ineach'", "D:/www/logstash-6.2.2/vendor/bundle/jruby/2.3.0/gems/logstash-filter-mutate-3.2.0/lib/logstash/filters/mutate.rb:188:in register'", "D:/www/logstash-6.2.2/logstash-core/lib/logstash/pipeline.rb:341:inregister_plugin'", "D:/www/logstash-6.2.2/logstash-core/lib/logstash/pipeline.rb:352:in block in register_plugins'", "org/jruby/RubyArray.java:1734:ineach'", "D:/www/logstash-6.2.2/logstash-core/lib/logstash/pipeline.rb:352:in register_plugins'", "D:/www/logstash-6.2.2/logstash-core/lib/logstash/pipeline.rb:736:inmaybe_setup_out_plugins'", "D:/www/logstash-6.2.2/logstash-core/lib/logstash/pipeline.rb:362:in start_workers'", "D:/www/logstash-6.2.2/logstash-core/lib/logstash/pipeline.rb:289:inrun'", "D:/www/logstash-6.2.2/logstash-core/lib/logstash/pipeline.rb:249:in `block in start'"], :thread=>"#<Thread:0x304dfd7f run>"}
[2018-03-02T17:15:22,323][ERROR][logstash.agent ] Failed to execute action {:id=>:main, :action_type=>LogStash::ConvergeResult::FailedAction, :message=>"Could not execute action: LogStash::PipelineAction::Create/pipeline_id:main, action_result: false", :backtrace=>nil}

is this corrects way?

Hey Karthik,

Your conf file looks ok (however I'm not sure the mutate is going to get the business done for the geo location - check out my example of doing exactly the same thing for the Australian Open Street Maps data here.

The error message seems to be pointing to a configuration issues - if my config does not work - I would check to see if you have some invalid plugins installed.

Thanks Johnnycc1, now working fine,

conf file:
input {

file {
    path => "D:/ak.csv"
start_position => "beginning"
    sincedb_path => "/dev/null"
}
}
 filter {
  #lon,lat,number,street,unit,city,district,region,postcode,id,hash

   csv {
	separator => ","
	columns => [ "lon", 
      "lat","number","street","unit","city","district","region","postcode","id","hash"]
   }

mutate {
		convert => { "lon" => "float" }
		convert => { "lat" => "float" }
		}
mutate {	
		rename => {
			"lon" => "[location][lon]"
			"lat" => "[location][lat]"
		}
 }

   }
output {
	   elasticsearch {
			index => "geo-aks"
			hosts => "http://localhost:9200"		
			document_type  => "locality-ak"
			template      => "geo-template.json"
			template_name => "ak-template"
			template_overwrite => true
		
		}

	stdout {  codec => rubydebug }
  }

Tempalte file:

           {
              "template": "ak-template",
              "settings": {
              "number_of_shards": 4,
             "number_of_replicas": 0,
             "index.codec": "best_compression",
          "index.refresh_interval" : "99s"
         },
            "mappings": {
            "address": {
             "properties": {
            "location": {
            "type": "geo_point"
              }
         }
       }
     }
  }

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.