Can't create a new index on elastic with logstash

Hi, is the first time for me that I use Logstash. I'm trying to index my apache log on localhost to a server. I can't see any error in /var/log/logstash/logstash-plain.log:

 [2018-02-28T14:45:29,984][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://elastic:xxxxxx@127.0.0.1:9200/]}}
[2018-02-28T14:45:29,984][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://elastic:xxxxxx@127.0.0.1:9200/, :path=>"/"}
[2018-02-28T14:45:30,016][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>#<URI::HTTP:0x29b767a2 URL:http://elastic:xxxxxx@127.0.0.1:9200/>}
[2018-02-28T14:45:30,016][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>nil}
[2018-02-28T14:45:30,020][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-*", "version"=>50001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"_default_"=>{"_all"=>{"enabled"=>true, "norms"=>false}, "dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"*", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword"}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date", "include_in_all"=>false}, "@version"=>{"type"=>"keyword", "include_in_all"=>false}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}}
[2018-02-28T14:45:30,023][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>[#<URI::Generic:0x7a3b87a4 URL://127.0.0.1>]}
[2018-02-28T14:45:30,061][INFO ][logstash.filters.geoip   ] Using geoip database {:path=>"/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-geoip-4.1.1-java/vendor/GeoLite2-City.mmdb"}
[2018-02-28T14:45:30,200][INFO ][logstash.pipeline        ] Starting pipeline {"id"=>"main", "pipeline.workers"=>8, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>5, "pipeline.max_inflight"=>1000}
[2018-02-28T14:45:30,280][INFO ][logstash.pipeline        ] Pipeline main started
[2018-02-28T14:45:30,305][INFO ][logstash.agent           ] Successfully started Logstash API endpoint {:port=>9600}

My configuration:

/usr/share/logstash/logstash.conf :

input {
  file {
    path => ["/var/log/apache2/access.log"]
    type => "apache_access"
  }
  file {
    path => ["/var/log/apache2/error.log"]
    type => "apache_error"
  }
}
filter {
   if [type] in [ "apache" , "apache_access" , "apache-access" ]  {
      grok {
         match => [
         "message" , "%{COMBINEDAPACHELOG}+%{GREEDYDATA:extra_fields}",
         "message" , "%{COMMONAPACHELOG}+%{GREEDYDATA:extra_fields}"
         ]
         overwrite => [ "message" ]
      }
      mutate {
         convert => ["response", "integer"]
         convert => ["bytes", "integer"]
         convert => ["responsetime", "float"]
      }
      geoip {
         source => "clientip"
         target => "geoip"
         add_tag => [ "apache-geoip" ]
      }
      date {
         match => [ "timestamp" , "dd/MMM/YYYY:HH:mm:ss Z" ]
         remove_field => [ "timestamp" ]
      }
      useragent {
         source => "agent"
      }
   }
   if [type] in ["apache_error","apache-error"] {
      grok {
         match => ["message", "\[%{WORD:dayname} %{WORD:month} %{DATA:day} %{DATA:hour}:%{DATA:minute}:%{DATA:second} %{YEAR:year}\] \[%{NOTSPACE:loglevel}\] (?:\[client %{IPORHOST:clientip}\] ){0,1}%{GREEDYDATA:message}"]
         overwrite => [ "message" ]
      }
      mutate
      {
         add_field =>
         {
            "time_stamp" => "%{day}/%{month}/%{year}:%{hour}:%{minute}:%{second}"
         }
      }
      date {
         match => ["time_stamp", "dd/MMM/YYYY:HH:mm:ss"]
         remove_field => [ "time_stamp","day","dayname","month","hour","minute","second","year"]
      }
   }
}
output {
 elasticsearch {
   hosts => ["http://172.16.251.121:9200/"]
   index => "apache-%{+YYYY.MM.dd}"
   document_type => "system_logs"
 }
 stdout { codec => rubydebug }
} 

/etc/logstash/conf.d/02-apache-input.conf :

input {
 file {
   path => ["/var/log/apache2/access.log"]
   type => "apache_access"
 }
 file {
   path => ["/var/log/apache2/error.log"]
   type => "apache_error"
 }
}

/etc/logstash/conf.d/10-apache-filter.conf :

filter {
   if [type] in [ "apache" , "apache_access" , "apache-access" ]  {
      grok {
         match => [
         "message" , "%{COMBINEDAPACHELOG}+%{GREEDYDATA:extra_fields}",
         "message" , "%{COMMONAPACHELOG}+%{GREEDYDATA:extra_fields}"
         ]
         overwrite => [ "message" ]
      }
      mutate {
         convert => ["response", "integer"]
         convert => ["bytes", "integer"]
         convert => ["responsetime", "float"]
      }
      geoip {
         source => "clientip"
         target => "geoip"
         add_tag => [ "apache-geoip" ]
      }
      date {
         match => [ "timestamp" , "dd/MMM/YYYY:HH:mm:ss Z" ]
         remove_field => [ "timestamp" ]
      }
      useragent {
         source => "agent"
      }
   }
   if [type] in ["apache_error","apache-error"] {
      grok {
         match => ["message", "\[%{WORD:dayname} %{WORD:month} %{DATA:day} %{DATA:hour}:%{DATA:minute}:%{DATA:second} %{YEAR:year}\] \[%{NOTSPACE:loglevel}\] (?:\[client %{IPORHOST:clientip}\] ){0,1}%{GREEDYDATA:message}"]
         overwrite => [ "message" ]
      }
      mutate
      {
         add_field =>
         {
            "time_stamp" => "%{day}/%{month}/%{year}:%{hour}:%{minute}:%{second}"
         }
      }
      date {
         match => ["time_stamp", "dd/MMM/YYYY:HH:mm:ss"]
         remove_field => [ "time_stamp","day","dayname","month","hour","minute","second","year"]
      }
   }
}

/etc/logstash/conf.d/30-elasticsearch-output.conf :

output {
 elasticsearch {
   hosts => ["http://172.16.251.121:9200/"]
   index => "apache-%{+YYYY.MM.dd}"
   document_type => "apache_logs"
 }
 stdout { codec => rubydebug }
}

Thanks in advance!

Does the Logstash user have read access to the files? Are they being updated with new data? With the configuration you have Logstash will tail the files rather than read them from the top.

Yes I had run sudo chmod -R 777 /var/log/apache2/ before starting logstash. No, the files are not being updated. What I should change in my conf to index all the content of these files? Thanks

This is an extremely frequently asked question. Please read past posts and the file input documentation. Keywords: start_position, sincedb_path.

Thanks a lot Magnus Bäck, I will search for that

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.