Index not getting created in elasticsearch

When i start the logstash with my conf file the following log getting printed and no index are getting created in the elastisearch

Logstash log
Sending Logstash's logs to E:/ELK/logstash-6.2.3/logs which is now configured via log4j2.properties
[2018-04-27T00:36:49,496][INFO ][logstash.modules.scaffold] Initializing module {:module_name=>"fb_apache", :directory=>"E:/ELK/logstash-6.2.3/modules/fb_apache/configuration"}
[2018-04-27T00:36:49,527][INFO ][logstash.modules.scaffold] Initializing module {:module_name=>"netflow", :directory=>"E:/ELK/logstash-6.2.3/modules/netflow/configuration"}
[2018-04-27T00:36:49,824][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
[2018-04-27T00:36:50,652][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"6.2.3"}
[2018-04-27T00:36:51,558][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2018-04-27T00:37:03,146][INFO ][logstash.pipeline ] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>4, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50}
[2018-04-27T00:37:03,772][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://localhost:9200/]}}
[2018-04-27T00:37:03,787][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://localhost:9200/, :path=>"/"}
[2018-04-27T00:37:04,084][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://localhost:9200/"}
[2018-04-27T00:37:04,178][INFO ][logstash.outputs.elasticsearch] ES Output version determined {:es_version=>6}
[2018-04-27T00:37:04,178][WARN ][logstash.outputs.elasticsearch] Detected a 6.x and above cluster: the type event field won't be used to determine the document _type {:es_version=>6}
[2018-04-27T00:37:04,209][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>nil}
[2018-04-27T00:37:04,240][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-", "version"=>60001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"default"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}}
[2018-04-27T00:37:04,318][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//localhost:9200"]}
[2018-04-27T00:37:04,350][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://localhost:9200/]}}
[2018-04-27T00:37:04,350][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://localhost:9200/, :path=>"/"}
[2018-04-27T00:37:04,350][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://localhost:9200/"}
[2018-04-27T00:37:04,381][INFO ][logstash.outputs.elasticsearch] ES Output version determined {:es_version=>6}
[2018-04-27T00:37:04,381][WARN ][logstash.outputs.elasticsearch] Detected a 6.x and above cluster: the type event field won't be used to determine the document _type {:es_version=>6}
[2018-04-27T00:37:04,381][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>nil}
[2018-04-27T00:37:04,397][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-", "version"=>60001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"default"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}}
[2018-04-27T00:37:04,428][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//localhost:9200"]}
[2018-04-27T00:37:06,162][INFO ][logstash.pipeline ] Pipeline started succesfully {:pipeline_id=>"main", :thread=>"#<Thread:0x7f2d2c3 run>"}
[2018-04-27T00:37:06,381][INFO ][logstash.agent ] Pipelines running {:count=>1, :pipelines=>["main"]}

My conf file:
input {
file {
path => "C:\Users\Windows\Desktop\Prod\web1.log"
type => "web1_accesslogs"
}

file {    
   path => "C:\Users\Windows\Desktop\Prod\web2.log"	    
   type => "web2_accesslogs"
}

}

filter {

if [type] == "web1_accesslogs" {
grok{
match => {
"message" => "%{IPORHOST:clientip} %{USER:ident} %{USER:auth} [%{HTTPDATE:logTime}] "%{WORD:method} %{URIPATHPARAM:request}\ %{NOTSPACE:httpV}" %{NUMBER:status} (?:-|%{NUMBER:bytes}) %{QS:referrer} %{QS:agent} %{NUMBER:responseTime} %{NUMBER:responseTime}"
}
}
}

if [type] == "web2_accesslogs" {
grok {
match => {
"message" => "%{IPORHOST:clientip} %{USER:ident} %{USER:auth} [%{HTTPDATE:logTime}] "%{WORD:method} %{URIPATHPARAM:request}\ %{NOTSPACE:httpV}" %{NUMBER:status} (?:-|%{NUMBER:bytes}) %{QS:referrer} %{QS:agent} %{NUMBER:responseTime} %{NUMBER:responseTime}"
}
}
}

 mutate {
	convert => { "bytes" => "integer"}
	convert => { "status" => "integer"}
	convert => { "responseTime" => "integer"}
   }


useragent {
	source => "agent"
	target => "useragent"
}
date {
	match => [ "logTime", "dd/MMM/YYYY:HH:mm:ss Z" ]
	locale => en
}

}

output {

if [type] == "web1_accesslogs" {
elasticsearch {
hosts => ["localhost:9200"]
index => "web1_logs"
}
}

if [type] == "web2_accesslogs" {
elasticsearch {
hosts => ["localhost:9200"]
index => "web2_logs"
}
}

stdout { codec => rubydebug }

}

How do you know Logstash is even reading anything from your files? Have you read the file input documentation, paying special attention to the start_position option and everything that's said about sincedb?

Added the start_position but still its not reading from files.

start_position only makes a difference the first time a file is seen. In your case Logstash has already decided upon a position in the file.

This is an extremely common problem that people have. Please look into past threads for elaborations.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.