Grokparsefailure, Geoip lookup failure

Hi! I am using ELK 6.2.4 on Ubuntu Centos7. I'm a student doing a project and i am having trouble in getting the geoip :frowning:

image

logstash conf file:

i run /usr/share/logstash/bin$ sudo ./logstash -f /etc/logstash/conf.d/alert.conf

the output:

I'm using filebeat to logstash . Can you help me to solve it for my school project. deadlin submission is soon

Please don't post pictures of text, they are difficult to read and some people may not be even able to see them :slight_smile:

Sorry about that!

logstash conf file:

input {

beats {
port => 5044
}
}

filter {

grok {
match => { "message" => ["%{COMBINEDAPACHELOG}" , "%{COMMONAPACHELOG}"]}
}

date {
match => [ "timestamp" , "dd/MMM/yyyy:HH:mm:ss Z" ]
}

geoip {
source => "clientip"
}

mutate {
convert => [ "[geoip][coordinates]", "float"]
}

csv {
columns => ["datetime","msg","src_ip","src_port","src_mac-addr","dst_ip","dst_port","dst_mac_addr","category","proto","priority"]
separator => ","
}

}

output {

elasticsearch {

hosts => ["localhost:9200"]
}

stdout {
codec => "rubydebug"}

}

Then i run, /usr/share/logstash/bin$ sudo ./logstash -f /etc/logstash/conf.d/alert.conf

the output:
WARNING: Could not find logstash.yml which is typically located in $LS_HOME/config or /etc/logstash. You can specify the path using --path.settings. Continuing using the defaults
Could not find log4j2 configuration at path /usr/share/logstash/config/log4j2.properties. Using default config which logs errors to the console
[INFO ] 2018-07-25 12:44:28.037 [main] scaffold - Initializing module {:module_name=>"fb_apache", :directory=>"/usr/share/logstash/modules/fb_apache/configuration"}
[INFO ] 2018-07-25 12:44:28.052 [main] scaffold - Initializing module {:module_name=>"netflow", :directory=>"/usr/share/logstash/modules/netflow/configuration"}
[WARN ] 2018-07-25 12:44:28.849 [LogStash::Runner] multilocal - Ignoring the 'pipelines.yml' file because modules or command line options are specified
[INFO ] 2018-07-25 12:44:29.269 [LogStash::Runner] runner - Starting Logstash {"logstash.version"=>"6.2.4"}
[INFO ] 2018-07-25 12:44:29.698 [Api Webserver] agent - Successfully started Logstash API endpoint {:port=>9600}
[INFO ] 2018-07-25 12:44:33.786 [Ruby-0-Thread-1: /usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/stud-0.0.23/lib/stud/task.rb:22] pipeline - Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>4, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50}
[INFO ] 2018-07-25 12:44:34.917 [[main]-pipeline-manager] elasticsearch - Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://localhost:9200/]}}
[INFO ] 2018-07-25 12:44:34.957 [[main]-pipeline-manager] elasticsearch - Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://localhost:9200/, :path=>"/"}
[WARN ] 2018-07-25 12:44:35.381 [[main]-pipeline-manager] elasticsearch - Restored connection to ES instance {:url=>"http://localhost:9200/"}
[INFO ] 2018-07-25 12:44:36.433 [[main]-pipeline-manager] elasticsearch - ES Output version determined {:es_version=>6}
[WARN ] 2018-07-25 12:44:36.434 [[main]-pipeline-manager] elasticsearch - Detected a 6.x and above cluster: the type event field won't be used to determine the document _type {:es_version=>6}
[INFO ] 2018-07-25 12:44:36.475 [[main]-pipeline-manager] elasticsearch - Using mapping template from {:path=>nil}
[INFO ] 2018-07-25 12:44:36.507 [[main]-pipeline-manager] elasticsearch - Attempting to install template {:manage_template=>{"template"=>"logstash-", "version"=>60001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"default"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}}
[INFO ] 2018-07-25 12:44:36.552 [[main]-pipeline-manager] elasticsearch - New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//localhost:9200"]}
[INFO ] 2018-07-25 12:44:36.981 [[main]-pipeline-manager] geoip - Using geoip database {:path=>"/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/logstash-filter-geoip-5.0.3-java/vendor/GeoLite2-City.mmdb"}
[INFO ] 2018-07-25 12:44:38.490 [[main]-pipeline-manager] beats - Beats inputs: Starting input listener {:address=>"0.0.0.0:5044"}
[INFO ] 2018-07-25 12:44:38.838 [Ruby-0-Thread-1: /usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/stud-0.0.23/lib/stud/task.rb:22] pipeline - Pipeline started successfully {:pipeline_id=>"main", :thread=>"#<Thread:0xdae239d@/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:247 run>"}
[INFO ] 2018-07-25 12:44:38.938 [[main]<beats] Server - Starting server on port: 5044
[INFO ] 2018-07-25 12:44:38.950 [Ruby-0-Thread-1: /usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/stud-0.0.23/lib/stud/task.rb:22] agent - Pipelines running {:count=>1, :pipelines=>["main"]}

Here you go!

Hello @Adah,

Change thee field geoip { source => "clientip" } to

geoip { source => "src_ip" }

and add create this template template.json and save it where your logstash file is their.
{
"template" : "new-",
"version" : 50001,
"settings" : {
"index.refresh_interval" : "5s"
},
"mappings" : {
"default" : {
"_all" : {"enabled" : true, "omit_norms" : false},
"dynamic_templates" : [ {
"message_field" : {
"path_match" : "message",
"match_mapping_type" : "string",
"mapping" : {
"type" : "text",
"omit_norms" : false
}
}
}, {
"string_fields" : {
"match" : "
",
"match_mapping_type" : "string",
"mapping" : {
"type" : "text", "omit_norms" : false,
"fields" : {
"keyword" : { "type": "keyword", "ignore_above": 256 }
}
}
}
} ],
"properties" : {
"@timestamp": { "type": "date", "include_in_all": false },
"@version": { "type": "keyword", "include_in_all": false },
"geoip" : {
"dynamic": true,
"properties" : {
"ip": { "type": "ip" },
"location" : { "type" : "geo_point" },
"latitude" : { "type" : "half_float" },
"longitude" : { "type" : "half_float" }
}
},
"location": { "type": "geo_point" }
}
}
}
}

and add this two line in your configuration file of last in output part.

template => "/usr/share/logstash/bin/template.json"
template_name => "new-*"

I hope this information may be worked for you and its sufficient.

Thanks & Regards,
Krunal.

Hi! So i've change to { source => "src_ip" } and create template.json and put exactly the same as yours.

logstash conf file:

input {
beats {
port => 5044
}
}

filter {
grok {
match => { "message" => "%{COMBINEDAPACHELOG}" }
}

date {
match => [ "timestamp" , "dd/MMM/yyyy:HH:mm:ss Z" ]
}

geoip {
source => "src_ip"
target => "geoip"
}

mutate {
convert => [ "[geoip][coordinates]", "float"]
}
csv {
columns => ["datetime","msg","src_ip","src_port","src_mac-addr","dst_ip","dst_port","dst_mac_addr","category","proto","priority"]
separator => ","
}
}

output {

elasticsearch {
hosts => ["localhost:9200"]
}

template => "/usr/share/logstash/bin/template.json"
template_name => "new-*"
}


then i run : /usr/share/logstash/bin$ sudo ./logstash -f /etc/logstash/conf.d/alert.conf

Output:
WARNING: Could not find logstash.yml which is typically located in $LS_HOME/config or /etc/logstash. You can specify the path using --path.settings. Continuing using the defaults
Could not find log4j2 configuration at path /usr/share/logstash/config/log4j2.properties. Using default config which logs errors to the console
[INFO ] 2018-07-27 09:11:24.018 [main] scaffold - Initializing module {:module_name=>"fb_apache", :directory=>"/usr/share/logstash/modules/fb_apache/configuration"}
[INFO ] 2018-07-27 09:11:24.034 [main] scaffold - Initializing module {:module_name=>"netflow", :directory=>"/usr/share/logstash/modules/netflow/configuration"}
[WARN ] 2018-07-27 09:11:24.585 [LogStash::Runner] multilocal - Ignoring the 'pipelines.yml' file because modules or command line options are specified
[INFO ] 2018-07-27 09:11:24.954 [LogStash::Runner] runner - Starting Logstash {"logstash.version"=>"6.2.4"}
[INFO ] 2018-07-27 09:11:25.297 [Api Webserver] agent - Successfully started Logstash API endpoint {:port=>9600}
[ERROR] 2018-07-27 09:11:25.488 [Ruby-0-Thread-1: /usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/stud-0.0.23/lib/stud/task.rb:22] agent - Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of #, { at line 42, column 10 (byte 514) after output {\n \nelasticsearch {\nhosts => ["localhost:9200"]\n}\n\ntemplate ", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:42:in compile_imperative'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:50:incompile_graph'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:12:in block in compile_sources'", "org/jruby/RubyArray.java:2486:inmap'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:11:in compile_sources'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:51:ininitialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:169:in initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:40:inexecute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:315:in block in converge_state'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:141:inwith_pipelines'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:312:in block in converge_state'", "org/jruby/RubyArray.java:1734:ineach'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:299:in converge_state'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:166:inblock in converge_state_and_update'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:141:in with_pipelines'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:164:inconverge_state_and_update'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:90:in execute'", "/usr/share/logstash/logstash-core/lib/logstash/runner.rb:348:inblock in execute'", "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/stud-0.0.23/lib/stud/task.rb:24:in `block in initialize'"]}

it has an error. do you know how to solve the error?
Thank you :slight_smile:

This should be inside the brackets to work. They are options specific to the Elasticsearch output plugin and therefore must be included within the plugin declaration.

Alright! So i've put in this way

output {
elasticsearch {

hosts => ["localhost:9200"]

template => "/usr/share/logstash/bin/template.json"
template_name => "new-*"
}
}

and the kibana discover still show
beats_input_codec_plain_applied, _grokparsefailure, _geoip_lookup_failure

Could you post some sample logs that you are trying to parse?

here are the sample log (.csv) which i tried to parse into kibana:

05/23-01:43:41.181405 ,"Failed Login Attempt",192.168.199.129,21,00:0C:29:25:FD:06,192.168.199.128,59852,00:0C:29:32:1C:75,TCP,,,10000003
05/23-01:44:14.242689 ,"Failed Login Attempt",192.168.199.129,21,00:0C:29:25:FD:06,192.168.199.128,59854,00:0C:29:32:1C:75,TCP,,,10000003
05/23-01:44:46.848152 ,"Failed Login Attempt",192.168.199.129,21,00:0C:29:25:FD:06,192.168.199.128,59856,00:0C:29:32:1C:75,TCP,,,10000003

Output in kibana:

Hi Adah,

If you are trying to parse a csv, do you really need a grok filter? Would something similar to this work?

filter {
 csv {
  columns => ["datetime","msg","src_ip","src_port","src_mac-addr","dst_ip","dst_port","dst_mac_addr","category","proto","priority"]
  separator => ","
 }

 date {
  match => [ "datetime" , "dd/MMM/yyyy:HH:mm:ss Z" ]
 }

 geoip {
  source => "src_ip"
  target => "geoip"
 }

 mutate {
  convert => [ "[geoip][coordinates]", "float"]
 }
}

try with this config file:

input {
beats {
port => 5044
}
}

filter {
grok {
match => { "message" => "%{COMBINEDAPACHELOG}" }
}

mutate
{
add_field => { "generated_time" => "%{year}-%{monthnum}-%{daynum}T%{time_1}.000+05:30" }
remove_field => ["year", "month", "daynum", "time", "monthnum", "time_1"]
}
date
{
match => ["generated_time", "yyyy-MM-dd'T'HH:mm:ss.SSSZ"]
timezone => "America/New_York" #change with your timezone
target => "@timestamp"
}
geoip
{
source => "src_ip"
target => "geoip"
add_field => [ "[geoip][coordinates]","%{[geoip][longitude]}" ]
add_field => [ "[geoip][coordinates]","%{[geoip][latitude]}" ]
}

mutate
{
convert => [ "[geoip][coordinates]", "float"]
}
csv
{
separator => ","
columns => ["datetime","msg","src_ip","src_port","src_mac-addr","dst_ip","dst_port","dst_mac_addr","category","proto","priority"]
}
}

output
{
stdout { codec => rubydebug }
elasticsearch
{
hosts => ["localhost:9200"]
template => "/usr/share/logstash/bin/template.json"
template_name => "new-*"
index => "test-%{+YYYY.MM.dd}"
}
}

Are you not using X-pack right ?

Thanks & Regards,
Krunal.

Thank you! will try out! nope, i'm not using x-pack

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.