Null pointer exception in logstash filter

/Server/home/user/itbox/tools/logstash-7.6.2/vendor/bundle/jruby/2.5.0/gems/awesome_print-1.7.0/lib/awesome_print/formatters/base_formatter.rb:31: warning: constant ::Fixnum is deprecated
warning: thread "[main]>worker1" terminated with exception (report_on_exception is true):
java.lang.IllegalStateException: java.lang.NullPointerException
        at org.logstash.execution.WorkerLoop.run(org/logstash/execution/WorkerLoop.java:85)
        at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(jdk/internal/reflect/NativeMethodAccessorImpl.java:62)
        at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(jdk/internal/reflect/DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(java/lang/reflect/Method.java:566)
        at org.jruby.javasupport.JavaMethod.invokeDirectWithExceptionHandling(org/jruby/javasupport/JavaMethod.java:441)
        at org.jruby.javasupport.JavaMethod.invokeDirect(org/jruby/javasupport/JavaMethod.java:305)
        at Server.home.user.itbox.tools.logstash_minus_7_dot_6_dot_2.logstash_minus_core.lib.logstash.java_pipeline.start_workers(/Server/home/user/itbox/tools/logstash-7.6.2/logstash-core/lib/logstash/java_pipeline.rb:262)
        at org.jruby.RubyProc.call(org/jruby/RubyProc.java:274)
        at java.lang.Thread.run(java/lang/Thread.java:834)
Caused by: java.lang.NullPointerException
        at org.logstash.config.ir.compiler.EventCondition$Compiler$UnexpectedTypeException.<init>(EventCondition.java:659)
        at org.logstash.config.ir.compiler.EventCondition$Compiler.compare(EventCondition.java:433)
        at org.logstash.config.ir.compiler.EventCondition$Compiler.lambda$compareFields$10(EventCondition.java:409)
        at org.logstash.config.ir.compiler.Utils.filterEvents(Utils.java:27)
        at org.logstash.generated.CompiledDataset9.compute(Unknown Source)
        at org.logstash.generated.CompiledDataset10.compute(Unknown Source)
        at org.logstash.generated.CompiledDataset11.compute(Unknown Source)
        at org.logstash.generated.CompiledDataset12.compute(Unknown Source)
        at org.logstash.generated.CompiledDataset13.compute(Unknown Source)
        at org.logstash.generated.CompiledDataset14.compute(Unknown Source)
        at org.logstash.generated.CompiledDataset15.compute(Unknown Source)
        at org.logstash.generated.CompiledDataset16.compute(Unknown Source)
        at org.logstash.execution.WorkerLoop.run(WorkerLoop.java:64)
        at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.base/java.lang.reflect.Method.invoke(Method.java:566)
        at org.jruby.javasupport.JavaMethod.invokeDirectWithExceptionHandling(JavaMethod.java:441)
        at org.jruby.javasupport.JavaMethod.invokeDirect(JavaMethod.java:305)
        at org.jruby.java.invokers.InstanceMethodInvoker.call(InstanceMethodInvoker.java:32)
        at Server.home.user.itbox.tools.logstash_minus_7_dot_6_dot_2.logstash_minus_core.lib.logstash.java_pipeline.RUBY$block$start_workers$5(/Server/home/user/itbox/tools/logstash-7.6.2/logstash-core/lib/logstash/java_pipeline.rb:262)
        at org.jruby.runtime.CompiledIRBlockBody.callDirect(CompiledIRBlockBody.java:136)
        at org.jruby.runtime.IRBlockBody.call(IRBlockBody.java:77)
        at org.jruby.runtime.IRBlockBody.call(IRBlockBody.java:71)
        at org.jruby.runtime.Block.call(Block.java:125)
        at org.jruby.RubyProc.call(RubyProc.java:274)
        at org.jruby.internal.runtime.RubyRunnable.run(RubyRunnable.java:105)
        at java.base/java.lang.Thread.run(Thread.java:834)

I am having above exception while processing the CSV file causing logstash to crash. If I reprocess the same file it is processing without any issues. Could you please assist what is wrong in the filter

I am using below filter .

filter {
  csv {
    separator => "|"
    columns => ["SentTime","NID","ATime","HKTime","NTime","DatabaseTime","UniqueId","Type","ADelay_ms","HDelay_ms","NDelay_ms"]
    remove_field => ["message","path"]
  }
  mutate {
     convert => { "ADelay_ms" => "integer"
                  "HDelay_ms" => "integer"
                  "NDelay_ms" => "integer" }
  }
  date{
    match => [ "DatabaseTime" ,"dd/MM/yyyy HH:mm:ss,SSS" ]
    timezone => "Europe/Amsterdam"
    target => "DatabaseTime"
  }
  date{
    match => [ "SentTime" , "yyyy/MM/dd HH:mm:ss,SSS" ]
    timezone => "Europe/Amsterdam"
    target => "SentTime"
  }
  date{
    locale => "en"
    match => [ "ATime" , "MMM d, yyyy h:mm:ss a" ]
    timezone => "Europe/Amsterdam"
    target => "ATime"
  }
  date{
    locale => "en"
    match => [ "HTime" , "MMM d, yyyy h:mm:ss a" ]
    timezone => "Asia/Ho_Chi_Minh"
    target => "HTime"
  }
  date{
    locale => "en"
    match => [ "NTime" , "MMM d, yyyy h:mm:ss a" ]
    timezone => "America/Nipigon"
    target => "NTime"
  }
  ruby{
    init => "require 'time'"
    code => "
        event.set('SentDelay_ms',(event.get('SentTime') - event.get('DatabaseTime')) * 1000 );
        "
    }
  mutate{
    convert => { "SentDelay_ms" => "integer" }
  }
  if [HDelay_ms] <= [SentDelay_ms] and [HDelay_ms] > 0 {
        ruby { code => "event.set('HDelay_ms', (event.get('SentDelay_ms') + 463 ))" }
  }
  if [ADelay_ms] <= [SentDelay_ms] and [ADelay_ms] > 0 {
        ruby { code => "event.set('ADelay_ms', (event.get('SentDelay_ms') + 263 ))" }
  }
  if [NDelay_ms] <= [SentDelay_ms] and [NDelay_ms] > 0 {
        ruby { code => "event.set('NDelay_ms', (event.get('SentDelay_ms') + 363 ))" }
  }
}

I believe that error is caused by a numeric comparison with a non-existent field. See here.

Thanks for sharing the post I have already gone through this post before.
Point 1. Why I posted still is If i check csv file I dont see any null or blank data.
Point 2. If I reprocess same file it is working without any issues.

I updated the comparison operations to inside ruby rather than logstash fields comparion no more Null pointer exception

filter {
  csv {
    separator => "|"
    columns => ["SentTime","NID","ATime","HKTime","NTime","DatabaseTime","UniqueId","Type","ADelay_ms","HDelay_ms","NDelay_ms"]
    remove_field => ["message","path"]
  }
  mutate {
     convert => { "ADelay_ms" => "integer"
                  "HDelay_ms" => "integer"
                  "NDelay_ms" => "integer" }
  }
  date{
    match => [ "DatabaseTime" ,"dd/MM/yyyy HH:mm:ss,SSS" ]
    timezone => "Europe/Amsterdam"
    target => "DatabaseTime"
  }
  date{
    match => [ "SentTime" , "yyyy/MM/dd HH:mm:ss,SSS" ]
    timezone => "Europe/Amsterdam"
    target => "SentTime"
  }
  date{
    locale => "en"
    match => [ "ATime" , "MMM d, yyyy h:mm:ss a" ]
    timezone => "Europe/Amsterdam"
    target => "ATime"
  }
  date{
    locale => "en"
    match => [ "HTime" , "MMM d, yyyy h:mm:ss a" ]
    timezone => "Asia/Ho_Chi_Minh"
    target => "HTime"
  }
  date{
    locale => "en"
    match => [ "NTime" , "MMM d, yyyy h:mm:ss a" ]
    timezone => "America/Nipigon"
    target => "NTime"
  }
  
 ruby
  {
    init => "@buffer1 = 477
             require 'time'
             @buffer2 = 373
             @buffer3 = 257"

    code => "
	
    event.set('SentDelay_ms',(event.get('SentTime') - event.get('DatabaseTime')) * 1000 )

    if event.get('HDelay_ms') <  event.get('SentDelay_ms') and event.get('HDelay_ms') > 0  then
       event.set('HDelay_ms', (event.get('SentDelay_ms') + @buffer1 ))
    end

    if event.get('ADelay_ms') <  event.get('SentDelay_ms') and event.get('ADelay_ms') > 0   then
       event.set('ADelay_ms', (event.get('SentDelay_ms') + @buffer2 ))
    end

    if event.get('NDelay_ms') <  event.get('SentDelay_ms') and event.get('NDelay_ms') > 0 then
       event.set('NDelay_ms', (event.get('SentDelay_ms') + @buffer3 ))
    end
   "
  }

}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.