This is the whole config file Every thing is working fine and data is streaming if the input from the file not HTTP
input {
http_poller {
urls => {
logs => "http://www.almhuette-raith.at/apache-log/access.log"
}
schedule => { cron => "* * * * * UTC"}
codec => "plain"
}
}
filter {
grok {
match => { "message" => '%{IPV4:clientip} %{NOTSPACE:ER} %{NOTSPACE:EO} \[%{HTTPDATE:timestamp}\] \"%{NOTSPACE:Method} %{DATA:request} HTTP/%{NUMBER:httpversion}\" %{NUMBER:HTTPStatus} %{NOTSPACE:ObjectSize} %{QS:referrer} %{QS:User_Agent} %{QS:What}'
}
}
mutate {
gsub => [
"ObjectSize", "-", "0"
]
convert => { "ObjectSize" => "integer" }
convert => { "HTTPStatus" => "integer" }
convert => { "httpversion" => "float" }
add_field => { "domain" => "almhuetteraith.at" }
}
date {
match => [ "timestamp", "dd/MMM/YYYY:HH:mm:ss Z" ]
}
geoip {
source => "clientip"
}
}
output {
elasticsearch {
hosts => ["localhost:9200"]
index => "real"
user => "elastic"
password => "*********"
}
stdout { codec => rubydebug }
}
And when I run the pipeline, it run without any errors, this is the output
Sending Logstash's logs to /****/logstash-6.2.2/logs which is now configured via log4j2.properties
[2018-03-17T22:26:47,891][INFO ][logstash.modules.scaffold] Initializing module {:module_name=>"fb_apache", :directory=>"/***/logstash-6.2.2/modules/fb_apache/configuration"}
[2018-03-17T22:26:47,923][INFO ][logstash.modules.scaffold] Initializing module {:module_name=>"netflow", :directory=>"/****/logstash-6.2.2/modules/netflow/configuration"}
[2018-03-17T22:26:49,325][INFO ][logstash.modules.scaffold] Initializing module {:module_name=>"arcsight", :directory=>"/*****/logstash-6.2.2/vendor/bundle/jruby/2.3.0/gems/x-pack-6.2.2-java/modules/arcsight/configuration"}
[2018-03-17T22:26:49,880][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
[2018-03-17T22:26:50,796][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"6.2.2"}
[2018-03-17T22:26:51,370][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2018-03-17T22:26:59,378][INFO ][logstash.pipeline ] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>4, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50}
[2018-03-17T22:27:00,377][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://elastic:xxxxxx@localhost:9200/]}}
[2018-03-17T22:27:00,428][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://elastic:xxxxxx@localhost:9200/, :path=>"/"}
[2018-03-17T22:27:01,233][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://elastic:xxxxxx@localhost:9200/"}
[2018-03-17T22:27:01,386][INFO ][logstash.outputs.elasticsearch] ES Output version determined {:es_version=>nil}
[2018-03-17T22:27:01,392][WARN ][logstash.outputs.elasticsearch] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>6}
[2018-03-17T22:27:01,420][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>nil}
[2018-03-17T22:27:01,452][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-*", "version"=>60001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"_default_"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"*", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}}
[2018-03-17T22:27:01,528][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//localhost:9200"]}
[2018-03-17T22:27:01,856][INFO ][logstash.filters.geoip ] Using geoip database {:path=>"/***/logstash-6.2.2/vendor/bundle/jruby/2.3.0/gems/logstash-filter-geoip-5.0.3-java/vendor/GeoLite2-City.mmdb"}
[2018-03-17T22:27:06,983][INFO ][logstash.inputs.http_poller] Registering http_poller Input {:type=>nil, :schedule=>{"cron"=>"* * * * * UTC"}, :timeout=>nil}
[2018-03-17T22:27:07,048][INFO ][logstash.pipeline ] Pipeline started succesfully {:pipeline_id=>"main", :thread=>"#<Thread:0x7f55886 run>"}
[2018-03-17T22:27:07,210][INFO ][logstash.agent ] Pipelines running {:count=>1, :pipelines=>["main"]}
maybe the wrong input plugin was used?