I am trying to get log stash to import my csv data load to elastic search cluster and view in Kibana. Logstash/ES/Kibana instances are all up and running but LS does not create the index.
Below is my Logstash.conf file
input {
file {
path => "/data/cars.csv"
start_position => "beginning"
sincedb_path => "/dev/null"
}
}
filter {
csv {
separator => ","
columns => ["maker", "model", "mileage", "manufacture_year", "engine_displacement", "engine_power", "body_type", "color_slug", "stk_year", "transmission", "door_count", "seat_count", "fuel_type"]
}
mutate {convert => ["maker", "string"] }
mutate {convert => ["model", "string"] }
mutate {convert => ["mileage", "float"] }
mutate {convert => ["manufacture_year", "integer"] }
mutate {convert => ["engine_displacement", "integer"] }
mutate {convert => ["engine_power", "integer"] }
mutate {convert => ["body_type", "string"] }
mutate {convert => ["color_slug", "string"] }
mutate {convert => ["stk_year", "string"] }
mutate {convert => ["transmission", "string"] }
mutate {convert => ["door_count", "integer"] }
mutate {convert => ["seat_count", "integer"] }
mutate {convert => ["fuel_type", "string"] }
}
output {
elasticsearch { hosts => ["http://10.0.10.226:9200","http://10.0.10.227:9200","http://10.0.10.228:9200","http://10.0.10.229:9200"]
index => "cars"
document_type => "sold_cars"
}
stdout {codec => rubydebug }
}
Below is the output in the log file
[2017-11-21T00:19:56,669][INFO ][logstash.pipeline ] Pipeline main started
[2017-11-21T00:19:56,872][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2017-11-21T00:29:30,503][WARN ][logstash.runner ] SIGTERM received. Shutting down the agent.
[2017-11-21T00:29:30,515][WARN ][logstash.agent ] stopping pipeline {:id=>"main"}
[2017-11-21T00:29:45,625][INFO ][logstash.modules.scaffold] Initializing module {:module_name=>"fb_apache", :directory=>"/usr/share/logstash/modules/fb_apache/configuration"}
[2017-11-21T00:29:45,628][INFO ][logstash.modules.scaffold] Initializing module {:module_name=>"netflow", :directory=>"/usr/share/logstash/modules/netflow/configuration"}
[2017-11-21T00:29:47,083][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://10.0.10.226:9200/, http://10.0.10.227:9200/, http://10.0.10.228:9200/, http://10.0.10.229:9200/]}}
[2017-11-21T00:29:47,084][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://10.0.10.226:9200/, :path=>"/"}
[2017-11-21T00:29:47,170][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://10.0.10.226:9200/"}
[2017-11-21T00:29:47,208][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://10.0.10.227:9200/, :path=>"/"}
[2017-11-21T00:29:47,217][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://10.0.10.227:9200/"}
[2017-11-21T00:29:47,225][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://10.0.10.228:9200/, :path=>"/"}
[2017-11-21T00:29:47,237][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://10.0.10.228:9200/"}
[2017-11-21T00:29:47,243][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://10.0.10.229:9200/, :path=>"/"}
[2017-11-21T00:29:47,251][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://10.0.10.229:9200/"}
[2017-11-21T00:29:47,259][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>nil}
[2017-11-21T00:29:47,264][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-", "version"=>50001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"default"=>{"_all"=>{"enabled"=>true, "norms"=>false}, "dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date", "include_in_all"=>false}, "@version"=>{"type"=>"keyword", "include_in_all"=>false}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}}
[2017-11-21T00:29:47,288][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["http://10.0.10.226:9200", "http://10.0.10.227:9200", "http://10.0.10.228:9200", "http://10.0.10.229:9200"]}
[2017-11-21T00:29:47,293][INFO ][logstash.pipeline ] Starting pipeline {"id"=>"main", "pipeline.workers"=>2, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>5, "pipeline.max_inflight"=>250}
[2017-11-21T00:29:47,509][INFO ][logstash.pipeline ] Pipeline main started
[2017-11-21T00:29:47,546][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
The file is not read and the system just hangs for an indefinite time.