Hi ,
I am trying to push csv data into elastic search db from log stash, I am able to see the message successfully started logstash, But elastic search index was not created.
below is my logstash config file
input {
file{
path => "C:\Users\RAKESH\Desktop\Kibana\Data\JMeterRes.csv"
start_position => "beginning"
sincedb_path => "nul"
}
}
filter {
if ([timeStamp] == "timeStamp")
{
drop {}
}else{
csv{
columns => ["timeStamp","elapsed","label","responseCode","threadName","success","bytes","sentBytes","grpThreads","allThreads","Latency","SampleCount","ErrorCount","Hostname","Connect"]
separator => ","
skip_header => true
convert => {"elapsed" => "integer"
"bytes" => "integer"
"sentBytes" => "integer"
"SampleCount" => "integer"
"ErrorCount" => "integer"
"grpThreads" => "integer"
"allThreads" => "integer"
}
}
}
mutate {
rename => {
"timeStamp" => "time_stamp"
"elapsed" => "response_time"
"label" => "transaction_name"
"responseCode" => "response_code"
"threadName" => "thread_name"
"success" => "success_status"
"sentBytes" => "sent_bytes"
"grpThreads" => "grp_threads"
"allThreads" => "active_users"
"Latency" => "latency"
"SampleCount" => "sample_count"
"ErrorCount" => "error_count"
"Hostname" => "loadagent_name"
"Connect" => "connect"
}
}
}
output {
elasticsearch {
hosts => "localhost:9200"
index => "Rakesh-%{+YYYY.MM.dd}"
}
stdout { codec => rubydebug }
}
Below is the debug log of logstash.
[2018-10-08T22:19:25,906][DEBUG][logstash.outputs.elasticsearch] Normalizing http path {:path=>nil, :normalized=>nil}
[2018-10-08T22:19:28,996][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://localhost:9200/]}}
[2018-10-08T22:19:29,011][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://localhost:9200/, :path=>"/"}
[2018-10-08T22:19:30,275][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
[2018-10-08T22:19:30,275][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
[2018-10-08T22:19:31,149][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://localhost:9200/"}
[2018-10-08T22:19:31,913][INFO ][logstash.outputs.elasticsearch] ES Output version determined {:es_version=>6}
[2018-10-08T22:19:31,929][WARN ][logstash.outputs.elasticsearch] Detected a 6.x and above cluster: the type
event field won't be used to determine the document _type {:es_version=>6}
[2018-10-08T22:19:31,991][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//localhost:9200"]}
[2018-10-08T22:19:32,038][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>nil}
[2018-10-08T22:19:32,038][DEBUG][logstash.filters.csv ] CSV parsing options {:col_sep=>",", :quote_char=>"""}
[2018-10-08T22:19:32,116][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-", "version"=>60001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"default"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}}
[2018-10-08T22:19:32,225][DEBUG][logstash.outputs.elasticsearch] Found existing Elasticsearch template. Skipping template management {:name=>"logstash"}
[2018-10-08T22:19:33,927][INFO ][logstash.pipeline ] Pipeline started successfully {:pipeline_id=>"main", :thread=>"#<Thread:0x4e7d0981 sleep>"}
[2018-10-08T22:19:34,005][INFO ][filewatch.observingtail ] START, creating Discoverer, Watch with file and sincedb collections
[2018-10-08T22:19:34,161][INFO ][logstash.agent ] Pipelines running {:count=>1, :running_pipelines=>[:main], :non_running_pipelines=>[]}
[2018-10-08T22:19:34,208][DEBUG][logstash.agent ] Starting puma
[2018-10-08T22:19:34,224][DEBUG][logstash.agent ] Trying to start WebServer {:port=>9600}
[2018-10-08T22:19:34,286][DEBUG][logstash.api.service ] [api-service] start
[2018-10-08T22:19:35,280][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
[2018-10-08T22:19:35,280][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
[2018-10-08T22:19:35,390][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2018-10-08T22:19:38,932][DEBUG][logstash.pipeline ] Pushing flush onto pipeline {:pipeline_id=>"main", :thread=>"#<Thread:0x4e7d0981 sleep>"}
[2018-10-08T22:19:40,289][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
[2018-10-08T22:19:40,305][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
[2018-10-08T22:19:43,952][DEBUG][logstash.pipeline ] Pushing flush onto pipeline {:pipeline_id=>"main", :thread=>"#<Thread:0x4e7d0981 sleep>"}
[2018-10-08T22:19:45,318][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
[2018-10-08T22:19:45,318][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
[2018-10-08T22:19:48,953][DEBUG][logstash.pipeline ] Pushing flush onto pipeline {:pipeline_id=>"main", :thread=>"#<Thread:0x4e7d0981 sleep>"}
[2018-10-08T22:19:50,334][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
[2018-10-08T22:19:50,334][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}