Logstash does not create index in es

Hello,

Can someone help me solving this index creation issue. I'm currently using logstash and filebeat to send tomcat logs to elasticsearch.

Here is my logstash configuration

input {
   beats {
     port => 5044
     host => "localhost" 
   }
}
filter {     

	if "LoggingAuditMessageProcessor" in [message] {
		grok {
			match => { "message" => "^LoggingAuditMessageProcessor\: %{GREEDYDATA:request}" }   
			patterns_dir => "/home/gfi-dev/ELK/logstash-5.6.2/patterns"     
		}   

		json{
			source => "request"
			remove_field=>["request"]
			remove_field=>["LoggingAuditMessageProcessor"]
		}  
 
		mutate {
		        add_tag => [ "LIFERAY_AUDIT_LOG" ]
		}

		if "_grokparsefailure" in [tags] {
		   drop { }
		}

	} else {
        	drop { }
    	}	

}   
output {  
   stdout {
          codec => rubydebug
   }
   if "LIFERAY_AUDIT_LOG" in [tags]  {
	elasticsearch {	
          hosts => ["http://localhost:9200"]
          index => "liferay-audit-%{+YYYY.MM.dd}" 
          #document_type => "liferay_log"
	  template => "/etc/filebeat/liferay.audit.template.es2x.json"
          template_name => "liferay-audit-template"
          template_overwrite => true
        }  
   }
}

To avoiding permissions problems i'm running all in root (only for test purpose).

Help will be very appreciate.

I forgot to put the logstash output in debug

[2017-10-14T15:43:26,729][INFO ][logstash.pipeline ] Starting pipeline {"id"=>"main", "pipeline.workers"=>2, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>5, "pipeline.max_inflight"=>250}
[2017-10-14T15:43:27,252][INFO ][logstash.inputs.beats ] Beats inputs: Starting input listener {:address=>"localhost:5044"}
[2017-10-14T15:43:27,300][INFO ][logstash.pipeline ] Pipeline main started
[2017-10-14T15:43:27,316][DEBUG][logstash.agent ] Starting puma
[2017-10-14T15:43:27,320][DEBUG][logstash.agent ] Trying to start WebServer {:port=>9600}
[2017-10-14T15:43:27,320][DEBUG][logstash.api.service ] [api-service] start
[2017-10-14T15:43:27,328][INFO ][org.logstash.beats.Server] Starting server on port: 5044
[2017-10-14T15:43:27,381][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2017-10-14T15:43:32,303][DEBUG][logstash.pipeline ] Pushing flush onto pipeline
[2017-10-14T15:43:37,311][DEBUG][logstash.pipeline ] Pushing flush onto pipeline
[2017-10-14T15:43:42,313][DEBUG][logstash.pipeline ] Pushing flush onto pipeline
[2017-10-14T15:43:47,314][DEBUG][logstash.pipeline ] Pushing flush onto pipeline
[2017-10-14T15:43:52,313][DEBUG][logstash.pipeline ] Pushing flush onto pipeline
[2017-10-14T15:43:57,313][DEBUG][logstash.pipeline ] Pushing flush onto pipeline
[2017-10-14T15:44:02,318][DEBUG][logstash.pipeline ] Pushing flush onto pipeline
[2017-10-14T15:44:04,595][DEBUG][org.logstash.beats.BeatsHandler] Received a new payload
[2017-10-14T15:44:04,604][DEBUG][org.logstash.beats.BeatsHandler] Sending a new message for the listener, sequence: 1
[2017-10-14T15:44:04,642][DEBUG][logstash.pipeline ] filter received {"event"=>{"@timestamp"=>2017-10-14T13:44:03.356Z, "offset"=>116117, "@version"=>"1", "input_type"=>"log", "beat"=>{"name"=>"osboxes", "hostname"=>"osboxes", "version"=>"5.6.2"}, "host"=>"osboxes", "source"=>"/home/gfi-dev/LIFERAY/liferay-dxp-digital-enterprise-7.0-sp4/tomcat-8.0.32/logs/catalina.out", "message"=>"LoggingAuditMessageProcessor: {"companyId":"20116","classPK":"20156","clientHost":"127.0.0.1","clientIP":"127.0.0.1","serverName":"localhost","className":"com.liferay.portal.kernel.model.User","sessionID":"23BA4608D001CEEFECAE40729C9A998C","eventType":"LOGIN","serverPort":8080,"userName":"Test Test","userId":"20156","timestamp":"20171014134355051"}", "type"=>"liferay_log", "tags"=>["beats_input_codec_plain_applied"]}}
[2017-10-14T15:44:04,645][DEBUG][logstash.filters.grok ] Running grok filter {:event=>2017-10-14T13:44:03.356Z osboxes LoggingAuditMessageProcessor: {"companyId":"20116","classPK":"20156","clientHost":"127.0.0.1","clientIP":"127.0.0.1","serverName":"localhost","className":"com.liferay.portal.kernel.model.User","sessionID":"23BA4608D001CEEFECAE40729C9A998C","eventType":"LOGIN","serverPort":8080,"userName":"Test Test","userId":"20156","timestamp":"20171014134355051"}}
[2017-10-14T15:44:04,646][DEBUG][logstash.filters.grok ] Event now: {:event=>2017-10-14T13:44:03.356Z osboxes LoggingAuditMessageProcessor: {"companyId":"20116","classPK":"20156","clientHost":"127.0.0.1","clientIP":"127.0.0.1","serverName":"localhost","className":"com.liferay.portal.kernel.model.User","sessionID":"23BA4608D001CEEFECAE40729C9A998C","eventType":"LOGIN","serverPort":8080,"userName":"Test Test","userId":"20156","timestamp":"20171014134355051"}}
[2017-10-14T15:44:04,647][DEBUG][logstash.filters.json ] Running json filter {:event=>2017-10-14T13:44:03.356Z osboxes LoggingAuditMessageProcessor: {"companyId":"20116","classPK":"20156","clientHost":"127.0.0.1","clientIP":"127.0.0.1","serverName":"localhost","className":"com.liferay.portal.kernel.model.User","sessionID":"23BA4608D001CEEFECAE40729C9A998C","eventType":"LOGIN","serverPort":8080,"userName":"Test Test","userId":"20156","timestamp":"20171014134355051"}}
[2017-10-14T15:44:04,649][DEBUG][logstash.filters.json ] filters/LogStash::Filters::Json: removing field {:field=>"request"}
[2017-10-14T15:44:04,650][DEBUG][logstash.filters.json ] filters/LogStash::Filters::Json: removing field {:field=>"LoggingAuditMessageProcessor"}
[2017-10-14T15:44:04,650][DEBUG][logstash.filters.json ] Event after json filter {:event=>2017-10-14T13:44:03.356Z osboxes LoggingAuditMessageProcessor: {"companyId":"20116","classPK":"20156","clientHost":"127.0.0.1","clientIP":"127.0.0.1","serverName":"localhost","className":"com.liferay.portal.kernel.model.User","sessionID":"23BA4608D001CEEFECAE40729C9A998C","eventType":"LOGIN","serverPort":8080,"userName":"Test Test","userId":"20156","timestamp":"20171014134355051"}}
[2017-10-14T15:44:04,652][DEBUG][logstash.util.decorators ] filters/LogStash::Filters::Mutate: adding tag {"tag"=>"LIFERAY_AUDIT_LOG"}
[2017-10-14T15:44:04,654][DEBUG][logstash.pipeline ] output received {"event"=>{"clientHost"=>"127.0.0.1", "offset"=>116117, "input_type"=>"log", "serverName"=>"localhost", "className"=>"com.liferay.portal.kernel.model.User", "source"=>"/home/gfi-dev/LIFERAY/liferay-dxp-digital-enterprise-7.0-sp4/tomcat-8.0.32/logs/catalina.out", "sessionID"=>"23BA4608D001CEEFECAE40729C9A998C", "eventType"=>"LOGIN", "message"=>"LoggingAuditMessageProcessor: {"companyId":"20116","classPK":"20156","clientHost":"127.0.0.1","clientIP":"127.0.0.1","serverName":"localhost","className":"com.liferay.portal.kernel.model.User","sessionID":"23BA4608D001CEEFECAE40729C9A998C","eventType":"LOGIN","serverPort":8080,"userName":"Test Test","userId":"20156","timestamp":"20171014134355051"}", "type"=>"liferay_log", "serverPort"=>8080, "userName"=>"Test Test", "userId"=>"20156", "tags"=>["beats_input_codec_plain_applied", "LIFERAY_AUDIT_LOG"], "companyId"=>"20116", "classPK"=>"20156", "@timestamp"=>2017-10-14T13:44:03.356Z, "clientIP"=>"127.0.0.1", "@version"=>"1", "beat"=>{"name"=>"osboxes", "hostname"=>"osboxes", "version"=>"5.6.2"}, "host"=>"osboxes", "timestamp"=>"20171014134355051"}}
[2017-10-14T15:44:07,345][DEBUG][logstash.pipeline ] Pushing flush onto pipeline

That looks ok, what does the Elasticsearch logs show?
What is the output of a request to _cat/indices?

Thanks a lot for your help.

Elasticsearch logs

[2017-10-14 20:34:46,559][WARN ][bootstrap ] running as ROOT user. this is a bad idea!
[2017-10-14 20:34:46,745][INFO ][node ] [liferay-node] version[2.4.6], pid[3332], build[5376dca/2017-07-18T12:17:44Z]
[2017-10-14 20:34:46,746][INFO ][node ] [liferay-node] initializing ...
[2017-10-14 20:34:47,540][INFO ][plugins ] [liferay-node] modules [reindex, lang-expression, lang-groovy], plugins [analysis-kuromoji, analysis-icu, analysis-smartcn, analysis-stempel], sites
[2017-10-14 20:34:47,594][INFO ][env ] [liferay-node] using [1] data paths, mounts [[/ (/dev/sda1)]], net usable_space [75.2gb], net total_space [95.3gb], spins? [possibly], types [ext4]
[2017-10-14 20:34:47,594][INFO ][env ] [liferay-node] heap size [1007.3mb], compressed ordinary object pointers [true]
[2017-10-14 20:34:49,974][INFO ][node ] [liferay-node] initialized
[2017-10-14 20:34:49,975][INFO ][node ] [liferay-node] starting ...
[2017-10-14 20:34:50,140][INFO ][transport ] [liferay-node] publish_address {10.0.2.15:9300}, bound_addresses {[::]:9300}
[2017-10-14 20:34:50,147][INFO ][discovery ] [liferay-node] elasticsearch/S4ePU6rpQcSaxkqy685d6A
[2017-10-14 20:34:53,260][INFO ][cluster.service ] [liferay-node] new_master {liferay-node}{S4ePU6rpQcSaxkqy685d6A}{10.0.2.15}{10.0.2.15:9300}, reason: zen-disco-join(elected_as_master, [0] joins received)
[2017-10-14 20:34:53,281][INFO ][http ] [liferay-node] publish_address {10.0.2.15:9200}, bound_addresses {[::]:9200}
[2017-10-14 20:34:53,289][INFO ][node ] [liferay-node] started
[2017-10-14 20:34:53,383][INFO ][gateway ] [liferay-node] recovered [3] indices into cluster_state
[2017-10-14 20:34:54,153][INFO ][cluster.routing.allocation] [liferay-node] Cluster health status changed from [RED] to [YELLOW] (reason: [shards started [[.kibana][0], [.kibana][0]] ...]).

In attachment the sense return for the asked command

There is already 2 indices created in elastic by the portal.

Thanks a lot :slight_smile:

Hello,

I finally understood what is the root cause of the problem. The index isn't created after index drop. My cluster is used by portal for indexation purpose and log tracking in same cluster but in different index :

  • liferay-* for portal
  • audit-liferay-* for log tracking

How can i force index creation in logstash ?

What i changed :

  • I've already changed index name to avoid futur index drop. (When a reindex was forced at portal level all indices starting by liferay* were dropped).

  • I plug logstash directly to the log file (without file beat) to keep all more simple

input {

file{
path => "/home/gfi-dev/ELK/logstash-5.6.2/samples/catalina.input.out"
sincedb_path => "/home/gfi-dev/ELK/logstash-5.6.2/sincedb/liferay-sincedb"
ignore_older => 0
}

}
filter {

if "LoggingAuditMessageProcessor" in [message] {
	grok {
		match => { "message" => "^LoggingAuditMessageProcessor\: %{GREEDYDATA:request}" }   
		patterns_dir => "/home/gfi-dev/ELK/logstash-5.6.2/patterns"     
	}   

	json{
		source => "request"
		remove_field=>["request"]
		remove_field=>["LoggingAuditMessageProcessor"]
	}  

	mutate {
	        add_tag => [ "LIFERAY_AUDIT_LOG" ]
	}

	if "_grokparsefailure" in [tags] {
	   drop { }
	}

} else {
    	drop { }
	}	

}
output {
stdout {
codec => rubydebug
}
if "LIFERAY_AUDIT_LOG" in [tags] {
elasticsearch {
hosts => ["http://localhost:9200"]
index => "audit-liferay-%{+YYYY.MM.dd}"
#document_type => "liferay_log"
template => "/home/gfi-dev/ELK/logstash-5.6.2/template/liferay.audit.template.es2x.json"
template_name => "audit-liferay-template"
template_overwrite => true
}
}
}

I been struggling for many days and seem couldn't figure out what happen.

Thanks for your help !!!

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.