[ERROR][logstash.instrument.periodicpoller.jvm] Periodi cPoller: exception

We are running filebeat,logstash, kibana and elasticsearach in Master machine(1 machine) and from slave machines(6 machines). We are just running Filebeat and all the data from those machines is transferred to master machine.

We are facing below two error in Logstash console
We are thinking that the error occurred because we are loading data from 7 server (almost around 300 log files) and CPU utilization is reaching upto 100%

After the timeout error logstash stop working and data parsing and inserting in elasticsearch stop.

Errors:

2017-06-28T03:04:53,176][ERROR][logstash.filters.grok ] Error while attempti
g to check/cancel excessively long grok patterns {:message=>"Mutex relocking by
same thread", :class=>"ThreadError", :backtrace=>["org/jruby/ext/thread/Mutex.j
va:90:in lock'", "org/jruby/ext/thread/Mutex.java:147:insynchronize'", "G:/E
K_Softwares/exe/logstash-5.0.0/vendor/bundle/jruby/1.9/gems/logstash-filter-gro
-3.2.3/lib/logstash/filters/grok/timeout_enforcer.rb:38:in stop_thread_groking ", "G:/ELK_Softwares/exe/logstash-5.0.0/vendor/bundle/jruby/1.9/gems/logstash-f lter-grok-3.2.3/lib/logstash/filters/grok/timeout_enforcer.rb:53:incancel_tim
d_out!'", "org/jruby/RubyHash.java:1342:in each'", "G:/ELK_Softwares/exe/logst sh-5.0.0/vendor/bundle/jruby/1.9/gems/logstash-filter-grok-3.2.3/lib/logstash/f lters/grok/timeout_enforcer.rb:45:incancel_timed_out!'", "org/jruby/ext/threa
/Mutex.java:149:in synchronize'", "G:/ELK_Softwares/exe/logstash-5.0.0/vendor/ undle/jruby/1.9/gems/logstash-filter-grok-3.2.3/lib/logstash/filters/grok/timeo t_enforcer.rb:44:incancel_timed_out!'", "G:/ELK_Softwares/exe/logstash-5.0.0/
endor/bundle/jruby/1.9/gems/logstash-filter-grok-3.2.3/lib/logstash/filters/gro
/timeout_enforcer.rb:63:in `start!'"]}

The error mentions "excessively long grok patterns".
Do you mind sharing your config?

input
{
beats {
port => 5042
codec => multiline
{
pattern => "^#2.0"
what => "previous"
negate =>"true"
charset => "ISO-8859-1"
}
}
}
filter
{
if "trace" in [type]{
grok {
match => {"message" => "(?m)#%{DATA:m1}#%{DATA:DateTime}#%{DATA:Timezone}#%{DATA:Severity}#%{DATA:Category}#%{DATA:m6}#%{DATA:CustomerMessageComponent}#%{DATA:RuntimeComponent}#%{DATA:LogID}#%{DATA:CorrelationID}#%{DATA:Application}#%{DATA:Location}#%{DATA:User}#%{DATA:Session}#%{DATA:m2}#%{DATA:PassportSession}#%{DATA:PassportUserActivityID}#%{DATA:PassportConnection}#%{DATA:PassportConnectionCounter}#%{DATA:Thread}#%{DATA:m4}#%{DATA:m5}#%{GREEDYDATA:ErrorMessage}#"}
}
mutate
{
#remove_tag => ["multiline"],
remove_field => ["m1","m4","m6"]
strip => ["DateTime"]
}
date {
match => [ "DateTime", "YYYY MM dd HH:mm:ss:SSS" ]
timezone => "EST"
target => "DateTime"
}
}

if "trc-prd" in [type]{
	grok {
		match => {"message" => "(?m)#%{DATA:prd_m1}#%{DATA:prd_DateTime}#%{DATA:prd_Timezone}#%{DATA:prd_Severity}#%{DATA:prd_Category}#%{DATA:prd_m6}#%{DATA:prd_CustomerMessageComponent}#%{DATA:prd_RuntimeComponent}#%{DATA:prd_LogID}#%{DATA:prd_CorrelationID}#%{DATA:prd_Application}#%{DATA:prd_Location}#%{DATA:prd_User}#%{DATA:prd_Session}#%{DATA:prd_m2}#%{DATA:prd_PassportSession}#%{DATA:prd_PassportUserActivityID}#%{DATA:prd_PassportConnection}#%{DATA:prd_PassportConnectionCounter}#%{DATA:prd_Thread}#%{DATA:prd_m4}#%{DATA:prd_m5}#%{GREEDYDATA:prd_ErrorMessage}#"}
	}		
	mutate
	{
		#remove_tag => ["multiline"],
		remove_field => ["prd_m1","prd_m4","prd_m6"]
		strip => ["prd_DateTime"]
	}
	date {
		match => [ "prd_DateTime", "YYYY MM dd HH:mm:ss:SSS" ]
		timezone => "EST"
		target => "prd_DateTime"
	}
}
	
if "application" in [type]{
	grok {
		match => {"message" => "(?m)#%{DATA:m1}#%{DATA:DateTime}#%{DATA:Timezone}#%{DATA:Severity}#%{DATA:Category}#%{DATA:m6}#%{DATA:CustomerMessageComponent}#%{DATA:RuntimeComponent}#%{DATA:LogID}#%{DATA:CorrelationID}#%{DATA:Application}#%{DATA:Location}#%{DATA:User}#%{DATA:Session}#%{DATA:m2}#%{DATA:PassportSession}#%{DATA:PassportUserActivityID}#%{DATA:PassportConnection}#%{DATA:PassportConnectionCounter}#%{DATA:Thread}#%{DATA:m4}#%{DATA:m5}#%{GREEDYDATA:ErrorMessage}#"}
	}		
	mutate
	{
		#remove_tag => ["multiline"],
		remove_field => ["m1","m4","m6"]
		strip => ["DateTime"]			
	}
	date {
		match => [ "DateTime", "YYYY MM dd HH:mm:ss:SSS" ]
		timezone => "EST"
		target => "DateTime"
	}
}
if "appl-prd" in [type]{
	grok {
		match => {"message" => "(?m)#%{DATA:prd_m1}#%{DATA:prd_DateTime}#%{DATA:prd_Timezone}#%{DATA:prd_Severity}#%{DATA:prd_Category}#%{DATA:prd_m6}#%{DATA:prd_CustomerMessageComponent}#%{DATA:prd_RuntimeComponent}#%{DATA:prd_LogID}#%{DATA:prd_CorrelationID}#%{DATA:prd_Application}#%{DATA:prd_Location}#%{DATA:prd_User}#%{DATA:prd_Session}#%{DATA:prd_m2}#%{DATA:prd_PassportSession}#%{DATA:prd_PassportUserActivityID}#%{DATA:prd_PassportConnection}#%{DATA:prd_PassportConnectionCounter}#%{DATA:prd_Thread}#%{DATA:prd_m4}#%{DATA:prd_m5}#%{GREEDYDATA:prd_ErrorMessage}#"}
	}		
	mutate
	{
		#remove_tag => ["multiline"],
		remove_field => ["prd_m1","prd_m4","prd_m6"]
		strip => ["prd_DateTime"]			
	}
	date {
		match => [ "prd_DateTime", "YYYY MM dd HH:mm:ss:SSS" ]
		timezone => "EST"
		target => "prd_DateTime"
	}
}
	
if "security" in [type]{
	grok {
		match => {"message" => "(?m)#%{DATA:security_m1}#%{DATA:security_DateTime}#%{DATA:security_Timezone}#%{DATA:security_Severity}#%{DATA:security_Category}#%{DATA:security_m6}#%{DATA:security_CustomerMessageComponent}#%{DATA:security_RuntimeComponent}#%{DATA:security_LogID}#%{DATA:security_CorrelationID}#%{DATA:security_Application}#%{DATA:security_Location}#%{DATA:security_User}#%{DATA:security_Session}#%{DATA:security_m2}#%{DATA:security_PassportSession}#%{DATA:security_PassportUserActivityID}#%{DATA:security_PassportConnection}#%{DATA:security_PassportConnectionCounter}#%{DATA:security_Thread}#%{DATA:security_m4}#%{DATA:security_m5}#%{GREEDYDATA:security_RemainingMessage}"}
		}

	if "LOGIN." in [security_RemainingMessage] {
	
		grok {
			match => {"security_RemainingMessage" => "%{DATA:security_ErrorMessage}\nUser: %{DATA:security_LoginUser}\nIP Address: %{DATA:security_IPAddress}\n%{GREEDYDATA:security_FinalRemainingMessage}"}
		}		
	}
	else {
		grok {
			match => {"security_RemainingMessage" => "%{DATA:security_ErrorMessage}#%{GREEDYDATA:security_FinalRemainingMessage}"}
		}	
	}
	
mutate
{
	#remove_tag => ["multiline"],
	remove_field => ["security_m1","security_m4","security_m6","security_FinalRemainingMessage","security_RemainingMessage"]
	rename => {"host" => "hostname"}
	strip => ["security_DateTime"]
}
date {
		match => [ "security_DateTime", "YYYY MM dd HH:mm:ss:SSS" ]
		timezone => "EST"
		target => "security_DateTime"
	}
}
if "sec-prd" in [type]{
	grok {
		match => {"message" => "(?m)#%{DATA:prd_security_m1}#%{DATA:prd_security_DateTime}#%{DATA:prd_security_Timezone}#%{DATA:prd_security_Severity}#%{DATA:prd_security_Category}#%{DATA:prd_security_m6}#%{DATA:prd_security_CustomerMessageComponent}#%{DATA:prd_security_RuntimeComponent}#%{DATA:prd_security_LogID}#%{DATA:prd_security_CorrelationID}#%{DATA:prd_security_Application}#%{DATA:prd_security_Location}#%{DATA:prd_security_User}#%{DATA:prd_security_Session}#%{DATA:prd_security_m2}#%{DATA:prd_security_PassportSession}#%{DATA:prd_security_PassportUserActivityID}#%{DATA:prd_security_PassportConnection}#%{DATA:prd_security_PassportConnectionCounter}#%{DATA:prd_security_Thread}#%{DATA:prd_security_m4}#%{DATA:prd_security_m5}#%{GREEDYDATA:prd_security_RemainingMessage}"}
		}

	if "LOGIN." in [prd_security_RemainingMessage] {
	
		grok {
			match => {"prd_security_RemainingMessage" => "%{DATA:prd_security_ErrorMessage}\nUser: %{DATA:prd_security_LoginUser}\nIP Address: %{DATA:prd_security_IPAddress}\n%{GREEDYDATA:prd_security_FinalRemainingMessage}"}
		}		
	}
	else {
		grok {
			match => {"prd_security_RemainingMessage" => "%{DATA:prd_security_ErrorMessage}#%{GREEDYDATA:prd_security_FinalRemainingMessage}"}
		}	
	}
	
mutate
{
	#remove_tag => ["multiline"],
	remove_field => ["prd_security_m1","prd_security_m4","prd_security_m6","prd_security_FinalRemainingMessage","prd_security_RemainingMessage"]
	rename => {"host" => "hostname"}
	strip => ["prd_security_DateTime"]
}
date {
		match => [ "prd_security_DateTime", "YYYY MM dd HH:mm:ss:SSS" ]
		timezone => "EST"
		target => "prd_security_DateTime"
	}
}

}

due to size constraint we are sending output seperately

output {
if "trace" in [type]{
elasticsearch {
hosts => ["143.22.209.122"]
index => "sap-trace-app-logs-nprd-qa"
}
}
if "trc-prd" in [type]{
elasticsearch {
hosts => ["143.22.209.122"]
index => "sap-trace-app-logs-prd"
}
}
if "application" in [type]{
elasticsearch {
hosts => ["143.22.209.122"]
index => "sap-trace-app-logs-nprd-qa"
}
}
if "appl-prd" in [type]{
elasticsearch {
hosts => ["143.22.209.122"]
index => "sap-trace-app-logs-prd"
}
}

if "security" in [type]{
	elasticsearch {
		hosts => ["143.22.209.122"]
		index => "sap-security-logs-nprd-qa"  
	}
}
if "sec-prd" in [type]{
	elasticsearch {
		hosts => ["143.22.209.122"]
		index => "sap-security-logs-prd"  
	}
}

}

Filebeat yml file

###################### Filebeat Configuration Example #########################

This file is an example configuration file highlighting only the most common

options. The filebeat.full.yml file from the same directory contains all the

supported options with more comments. You can use it as a reference.

You can find the full configuration reference here:

https://www.elastic.co/guide/en/beats/filebeat/index.html

#=========================== Filebeat prospectors =============================

filebeat.prospectors:

Each - is a prospector. Most options can be set at the prospector level, so

you can use different prospectors for various configurations.

Below are the prospector specific configurations.

  • input_type: log

    Paths that should be crawled and fetched. Glob based paths.

    paths:

    • D:\Divyendu\PNGLOgConfirmation\test\defaultTrace_*.trc
      document_type: trace
      fields:
      server: bdc-bij002
      ignore_older: 3m
      #harvester_limit: 4
      harvester_buffer_size: 16384
      multiline:
      pattern: '^#2.0'
      negate: true
      match: after
      #tail_files: true
  • input_type: log
    paths:

    • D:\Divyendu\PNGLOgConfirmation\test\applications_*.log
      document_type: application
      fields:
      server: bdc-bij002
      ignore_older: 3m
      #harvester_limit: 4
      harvester_buffer_size: 16384
      multiline:
      pattern: '^#2.0'
      negate: true
      match: after
  • input_type: log
    paths:

    • D:\Divyendu\PNGLOgConfirmation\test\system\security_*.log
      document_type: application
      fields:
      server: bdc-bij002
      ignore_older: 3m
      harvester_buffer_size: 16384
      multiline:
      pattern: '^#2.0'
      negate: true
      match: after
      fields_under_root: true

#================================ Outputs =====================================

Configure what outputs to use when sending the data collected by the beat.

Multiple outputs may be used.

#-------------------------- Elasticsearch output ------------------------------
#output.elasticsearch:

Array of hosts to connect to.

#hosts: ["10.103.21.112:9200"]

Optional protocol and basic auth credentials.

#protocol: "https"
#username: "elastic"
#password: "changeme"

#----------------------------- Logstash output --------------------------------
output.logstash:

The Logstash hosts

hosts: ["10.103.21.64:5042"]
bulk_max_size: 1024
loadbalance: true
timeout: 120

Optional SSL. By default is off.

List of root certificates for HTTPS server verifications

#ssl.certificate_authorities: ["/etc/pki/root/ca.pem"]

Certificate for SSL client authentication

#ssl.certificate: "/etc/pki/client/cert.pem"

Client Certificate Key

#ssl.key: "/etc/pki/client/cert.key"

#================================ Logging =====================================

Sets log level. The default log level is info.

Available log levels are: critical, error, warning, info, debug

#logging.level: debug

At debug level, you can selectively enable logging only for some components.

To enable all selectors use ["*"]. Examples of other selectors are "beat",

"publish", "service".

#logging.selectors: ["*"]

ELK Version

Version
elasticsearch-2.3.5
filebeat-5.2.2
kibana-4.5.4
logstash-5.0.0

Please suggest some solution for this problem.

[ERROR][logstash.instrument.periodicpoller.jvm] Periodi
cPoller: exception {:poller=>#<LogStash::Instrument::PeriodicPoller::JVM:0x2b744
2a0 @task=#<Concurrent::TimerTask:0x5e412c77 @observers=#<Concurrent::Collection
::CopyOnNotifyObserverSet:0x2d23ee71 @observers={#<LogStash::Instrument::Periodi
cPoller::JVM:0x2b7442a0 ...>=>:update}>, @timeout_interval=60.0

@StopEvent=#<Concurrent::Event:0x3adecdc
d @set=false, @iteration=0>, @value=false, @copy_on_deref=nil, @dup_on_deref=nil

, @options={:polling_interval=>1, :polling_timeout=>60}>, :result=>nil, :exce
ption=>#<Concurrent::TimeoutError: Concurrent::TimeoutError>, :executed_at=>2017
-06-28 03:04:55 -0500}

match => {"message" => "(?m)#%{DATA:m1}#%{DATA:DateTime}#%{DATA:Timezone}#%{DATA:Severity}#%{DATA:Category}#%{DATA:m6}#%{DATA:CustomerMessageComponent}#%{DATA:RuntimeComponent}#%{DATA:LogID}#%{DATA:CorrelationID}#%{DATA:Application}#%{DATA:Location}#%{DATA:User}#%{DATA:Session}#%{DATA:m2}#%{DATA:PassportSession}#%{DATA:PassportUserActivityID}#%{DATA:PassportConnection}#%{DATA:PassportConnectionCounter}#%{DATA:Thread}#%{DATA:m4}#%{DATA:m5}#%{GREEDYDATA:ErrorMessage}#"}

This grok expression is insanely inefficient. You really need to reduce the number of DATA and GREEDYDATA patterns. I'm positive that it's possible to construct more exact expressions that aren't so expensive to process.

Or, since this appears to be a log with #-separated records, just use a csv filter.

Thank you for reply.

  1. only {GREEDYDATA:ErrorMessage} this field is loading too much data.It gives me error:
    create"=>{"_index"=>"sap-trace-app-logs-k8p", "_type"=>"trc-prd", "
    _id"=>"AV02Zi5YHAmgJsrjBUjd", "status"=>400, "error"=>{"type"=>"illegal_argument
    exception", "reason"=>"Document contains at least one immense term in field="p
    rd_ErrorMessage" (whose UTF8 encoding is longer than the max length 32766), all
    of which were skipped. Please correct the analyzer to not produce such terms.
    The prefix of the first immense term is: '[10, 83, 105, 110, 103, 108, 101, 32,
    67, 108, 105, 99, 107, 32, 65, 112, 112, 114, 111, 118, 97, 108, 32, 58, 32, 10
    9, 80, 111, 115, 116]...', original message: bytes can be at most 32766 in lengt
    h; got 106157", "caused_by"=>{"type"=>"max_bytes_length_exceeded_exception", "re
    ason"=>"bytes can be at most 32766 in length; got 106157"}}}}}
    2.I applied "ignore_above": 256 to this field in the index which resolves my timeout error but I don't want to loose data.
    ignore_above": 256 will ignore the whole record.
    3.I can see my CPU % is also reaches to 100%.
    4.And while loading dashboard ion kibana I am facing below error.
    RemoteTransportException[[Corona][143.22.209.122:9300][indices:data/read/search[
    phase/query]]]; nested: CircuitBreakingException[[request] Data too large, data
    for [] would be larger than limit of [381891379/364.1mb]];
    Caused by: CircuitBreakingException[[request] Data too large, data for [<reused

    arrays>] would be larger than limit of [381891379/364.1mb]]
    5.Getting Visualize Request Timeout after 30000ms error while loading dashboard.
    I think all these are related to that field .Is there way I can resolve this without loosing data.

only {GREEDYDATA:ErrorMessage} this field is loading too much data

What do you mean? Show your configuration, don't describe it.

input
{
beats {
port => 5111
codec => multiline
{
pattern => "^#2.0"
what => "previous"
negate =>"true"
charset => "ISO-8859-1"
}
}

}
filter
{
if "trace" in [type]{
grok {
match => {"message" => "(?m)#%{DATA:m1}#%{DATA:DateTime}#%{DATA:Timezone}#%{DATA:Severity}#%{DATA:Category}#%{DATA:m6}#%{DATA:CustomerMessageComponent}#%{DATA:RuntimeComponent}#%{DATA:LogID}#%{DATA:CorrelationID}#%{DATA:Application}#%{DATA:Location}#%{DATA:User}#%{DATA:Session}#%{DATA:m2}#%{DATA:PassportSession}#%{DATA:PassportUserActivityID}#%{DATA:PassportConnection}#%{DATA:PassportConnectionCounter}#%{DATA:Thread}#%{DATA:m4}#%{DATA:m5}#%{GREEDYDATA:ErrorMessage}#"}
}
mutate
{
#remove_tag => ["multiline"],
remove_field => ["m1","m4","m6"]
strip => ["DateTime"]
}
date {
match => [ "DateTime", "YYYY MM dd HH:mm:ss:SSS" ]
timezone => "EST"
target => "DateTime"
}
}

if "application" in [type]{
grok {
match => {"message" => "(?m)#%{DATA:m1}#%{DATA:DateTime}#%{DATA:Timezone}#%{DATA:Severity}#%{DATA:Category}#%{DATA:m6}#%{DATA:CustomerMessageComponent}#%{DATA:RuntimeComponent}#%{DATA:LogID}#%{DATA:CorrelationID}#%{DATA:Application}#%{DATA:Location}#%{DATA:User}#%{DATA:Session}#%{DATA:m2}#%{DATA:PassportSession}#%{DATA:PassportUserActivityID}#%{DATA:PassportConnection}#%{DATA:PassportConnectionCounter}#%{DATA:Thread}#%{DATA:m4}#%{DATA:m5}#%{GREEDYDATA:ErrorMessage}#"}
}
mutate
{
#remove_tag => ["multiline"],
remove_field => ["m1","m4","m6"]
strip => ["DateTime"]
}
date {
match => [ "DateTime", "YYYY MM dd HH:mm:ss:SSS" ]
timezone => "EST"
target => "DateTime"
}
}
if "security" in [type]{
grok {
match => {"message" => "(?m)#%{DATA:security_m1}#%{DATA:security_DateTime}#%{DATA:security_Timezone}#%{DATA:security_Severity}#%{DATA:security_Category}#%{DATA:security_m6}#%{DATA:security_CustomerMessageComponent}#%{DATA:security_RuntimeComponent}#%{DATA:security_LogID}#%{DATA:security_CorrelationID}#%{DATA:security_Application}#%{DATA:security_Location}#%{DATA:security_User}#%{DATA:security_Session}#%{DATA:security_m2}#%{DATA:security_PassportSession}#%{DATA:security_PassportUserActivityID}#%{DATA:security_PassportConnection}#%{DATA:security_PassportConnectionCounter}#%{DATA:security_Thread}#%{DATA:security_m4}#%{DATA:security_m5}#%{GREEDYDATA:security_RemainingMessage}"}
}

	if "LOGIN." in [security_RemainingMessage] {
	
		grok {
			match => {"security_RemainingMessage" => "%{DATA:security_ErrorMessage}\nUser: %{DATA:security_LoginUser}\nIP Address: %{DATA:security_IPAddress}\n%{GREEDYDATA:security_FinalRemainingMessage}"}
		}		
	}
	else {
		grok {
			match => {"security_RemainingMessage" => "%{DATA:security_ErrorMessage}#%{GREEDYDATA:security_FinalRemainingMessage}"}
		}	
	}
mutate
{
	#remove_tag => ["multiline"],
	remove_field => ["security_m1","security_m4","security_m6","security_FinalRemainingMessage","security_RemainingMessage"]
	rename => {"host" => "hostname"}
	strip => ["security_DateTime"]
}
date {
		match => [ "security_DateTime", "YYYY MM dd HH:mm:ss:SSS" ]
		timezone => "EST"
		target => "security_DateTime"
	}
}

}

output {
if "trace" in [type]{
elasticsearch {
hosts => ["143.22.209.122"]
index => "test"
}
}
if "application" in [type]{
elasticsearch {
hosts => ["143.22.209.122"]
index => "test"
}
}

if "security" in [type]{
	elasticsearch {
		hosts => ["143.22.209.122"]
		index => "security-test"  
	}
}

Please check configuration file

Did you understand anything of what I wrote last time? You still have too many DATA and GREEDYDATA patterns in your grok filters. You need to fix that. Over and out.

True .I have too many DATA and GREEDYDATA pattern but it is required to read different log files.

but it is required to read different log files.

No, it's not.

Hello,

Now I am using CSV filter.

Config file
input {
beats {
port => 5042
codec => multiline
{
pattern => "^#2.0"
what => "previous"
negate =>"true"
charset => "ISO-8859-1"
}
}
}
filter {
csv {
columns => ["m1","DateTime","Severity","Category","m6","CustomerMessageComponent", "RuntimeComponent" ,"LogID","CorrelationID","Application","Location","User","Session","m2","PassportSession","PassportUserActivityID","PassportConnection","PassportConnectionCounter","Thread","m4","m5","ErrorMessage"]
separator => ","
}
}
output {
if "trace" in [type]{
elasticsearch {
hosts => ["10.107.80.64"]
index => "testcsv"
}
}

I am not able to see all the fields in my index testcsv.


Also data is not collecting in to the correct field.

Sample data:

2.0#2017 07 12 00:49:15:943#0-500#Error#com.sap.engine.services.ejb3.runtime#
com.sap.ASJ.ejb.005017#BC-JAS-EJB#ejb#C0008F16D17A09C50000000300001D48#29014150000000005#sap.com/com.sap.xi.mdt.beans#com.sap.engine.services.ejb3.runtime#SM_COLL_GTA#2129##EE2669CB66C411E7911C000001BAB886#ee2669cb66c411e7911c000001bab886##0#Thread[HTTP Worker [@924290866],5,Dedicated_Application_Thread]#Plain##
System exception

Only m1 field shows #2.0#2017 07 12 00:48:15:851#0-500#Error#com.sap.engine.services.ejb3.runtime# this much data.

Please let me know how i will get correct data in all the fields.

Your separator character is #, not a comma.

I am using below config file
input {
beats {
port => 5042
codec => multiline
{
pattern => "^#2.0"
what => "previous"
negate =>"true"
charset => "ISO-8859-1"
}
}
}
filter {
csv {
separator => "#"

#m1#DateTime#Timezone#Severity#Category#m6#CustomerMessageComponent#RuntimeComponent#LogID#CorrelationID#Application#Location#User#Session#m2#PassportSession#PassportUserActivityID#PassportConnection#PassportConnectionCounter#Thread#m4#m5#ErrorMessage
columns => ["m1","DateTime","Timezone","Severity","Category","m6","CustomerMessageComponent", "RuntimeComponent" ,"LogID","CorrelationID","Application","Location","User","Session","m2","PassportSession","PassportUserActivityID","PassportConnection","PassportConnectionCounter","Thread","m4","m5","ErrorMessage"]

}
}

output {
if "trace" in [type]{
elasticsearch {
hosts => ["10.103.20.64"]
index => "csvconfigfour"
} } }
but still I am not getting data in correct field.