Kibana Visualize log data as defined in module in filebeat

I confifured filebeat to send logs to elasticsearch. I am facing issue in kibana visualizations. I define a pattern in filebeat module and I want to see logs in Discover tab grouped by fields defined in the filebeat module pattern. My module pipeline[module/glassfish/server/ingest] configuration is:

{
"description": "Pipeline for parsing Glassfish3.2.2 server logs",
"processors": [
{
  "grok": {
    "field": "message",
		"patterns": ["\\[#\\|%{TIMESTAMP_ISO8601:glassfish.server.timestamp}\\|%{LOGLEVEL:glassfish.server.loglevel}\\|%{DATA:glassfish.server.application}\\|%{GREEDYDATA:glassfish.server.component}\\|%{GLASSFISHTHREADS:glassfish.server.threadinfo}\\|%{LOGLEVEL:glassfish.server.app_log_level} %{GREEDYDATA:glassfish.server.app_correl_id} %{DATA:glassfish.server.app_class_name} - %{GREEDYDATA:glassfish.server.app_message}\\|#\\]","\\[#\\|%{TIMESTAMP_ISO8601:glassfish.server.timestamp}\\|%{LOGLEVEL:glassfish.server.loglevel}\\|%{DATA:glassfish.server.application}\\|%{GREEDYDATA:glassfish.server.component}\\|%{GLASSFISHTHREADS:glassfish.server.threadinfo}\\|%{GREEDYDATA:glassfish.server.app_timestamp}: \\[%{LOGLEVEL:glassfish.server.app_log_level}\\]: \\[%{GREEDYDATA:glassfish.server.app_correl_id}\\] Source Class = %{GREEDYDATA:glassfish.server.app_class_name} %{GREEDYDATA:glassfish.server.app_message}\\|#\\]"
        ],
		"ignore_missing": true,
    "pattern_definitions": {
      "GLASSFISHTHREADS": "_ThreadID=%{NUMBER:glassfish.server.threadid};_ThreadName=Thread-%{NUMBER:glassfish.server.threadnumberinname};"		   
    } 
  }
},
],
"on_failure" : [{
"set" : {
  "field" : "error",
  "value" : "{{ _ingest.on_failure_message }}"
}
}]
}

My custom template configuration is:

{
"mappings": {
"_default_": {
  "_all": {
    "norms": false
  },
  "_meta": {
    "version": "5.4.2"
  },
  "date_detection": false,
  "dynamic_templates": [
    {
      "strings_as_keyword": {
        "mapping": {
          "ignore_above": 1024,
          "type": "keyword"
        },
        "match_mapping_type": "string"
      }
    }
  ],
  "properties": {
    "@timestamp": {
      "type": "date"
    },
	"glassfish": {
      "properties": {
        "server": {
          "properties": {              
            "loglevel": {
              "ignore_above": 1024,
              "type": "keyword"
            },				
            "application": {
              "ignore_above": 1024,
              "type": "keyword"
            },
            "component": {
              "norms": false,
              "type": "text"
            },
            "threadnumberinname": {
              "type": "long"
            },
            "threadid": {
              "type": "long"
            },
			"app_log_level": {
              "ignore_above": 1024,
              "type": "keyword"
            },
			"app_correl_id": {
              "type": "keyword"
            },
			"app_class_name": {
              "ignore_above": 1024,
              "type": "keyword"
            },
			"app_message": {
              "ignore_above": 1024,
              "type": "keyword"
            }
          }
        }
      }
    },
	"beat": {
      "properties": {
        "hostname": {
          "ignore_above": 1024,
          "type": "keyword"
        },
        "name": {
          "ignore_above": 1024,
          "type": "keyword"
        },
        "version": {
          "ignore_above": 1024,
          "type": "keyword"
        }
      }
    },
    "error": {
      "ignore_above": 1024,
      "type": "keyword"
    },
    "fileset": {
      "properties": {
        "module": {
          "ignore_above": 1024,
          "type": "keyword"
        },
        "name": {
          "ignore_above": 1024,
          "type": "keyword"
        }
      }
    },
    "input_type": {
      "ignore_above": 1024,
      "type": "keyword"
    },
    "message": {
      "norms": false,
      "type": "text"
    },
    "meta": {
      "properties": {
        "cloud": {
          "properties": {
            "availability_zone": {
              "ignore_above": 1024,
              "type": "keyword"
            },
            "instance_id": {
              "ignore_above": 1024,
              "type": "keyword"
            },
            "machine_type": {
              "ignore_above": 1024,
              "type": "keyword"
            },
            "project_id": {
              "ignore_above": 1024,
              "type": "keyword"
            },
            "provider": {
              "ignore_above": 1024,
              "type": "keyword"
            },
            "region": {
              "ignore_above": 1024,
              "type": "keyword"
            }
          }
        }
      }
    },
	"offset": {
      "type": "long"
    },
    "read_timestamp": {
      "ignore_above": 1024,
      "type": "keyword"
    },
    "source": {
      "ignore_above": 2048,
      "type": "keyword"
    },
	"tags": {
      "ignore_above": 1024,
      "type": "keyword"
    },
    "type": {
      "ignore_above": 1024,
      "type": "keyword"
    }
	}
}
},
"order": 0,
"settings": {
"index.mapping.total_fields.limit": 10000,
"index.refresh_interval": "5s"
},
"template": "filebeat-*"
}

I am using

.\filebeat.exe -c filebeat.yml -e -v -modules=glassfish

command to start filebeat. When I load the index pattern in kibana, I could not see any data are present in the defined fields. It is by default reside in either of below fields 1. _search
2. _message. But I want to visualize in individual columns to give better visibility for readers. I don't know where I am missing? Please help me to configure kibana based on my need.

It looks like your Filebeat module is not correctly parsing the log lines. The image from the second screenshot is just reading from the mapping, but Discover is showing the actual data that exists.

You might need to work on your grok processor a little bit. I suggest posting this question in the Beats category to get the best help, since this isn't really a Kibana question.

If you make a new post, I would also recommend to provide a sample of a message line to help with the grok pattern, because it is really hard to see what the data looks like in the screenshot.

Thank you so much tsullivan. There are 2 patterns of log messages i have to parse to get data from them. 1-sample log message:

[#|2017-07-18T13:58:00.340-0400|INFO|glassfish3.1.2|javax.enterprise.system.std.com.sun.enterprise.server.logging|_ThreadID=21;_ThreadName=Thread-2;|ERROR 2d3539313438333230393333353734313934326c6f63616c686f737438303835 MemberRegistrationAction - 
org.app.common.horizontal.validations.ValidationException
at org.app.common.web.util.ServiceGateway.checkException(ServiceGateway.java:176)
at org.app.common.web.util.ServiceGateway.get(ServiceGateway.java:56)
at org.app.internet.ora.application.action.MemberRegistrationAction.verifyLdapRecordNotExists(MemberRegistrationAction.java:665)
at org.app.internet.ora.application.action.MemberRegistrationAction.verifyMemberRegistration(MemberRegistrationAction.java:219)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.apache.struts.actions.DispatchAction.dispatchMethod(DispatchAction.java:280)
at org.apache.struts.actions.LookupDispatchAction.execute(LookupDispatchAction.java:252)
at org.apache.struts.action.RequestProcessor.processActionPerform(RequestProcessor.java:484)
at org.apache.struts.action.RequestProcessor.process(RequestProcessor.java:274)
at org.app.common.web.webcontrol.BaseRequestProcessor.process(BaseRequestProcessor.java:53)
at org.app.internet.ora.application.ORARequestProcessor.process(ORARequestProcessor.java:91)
at org.apache.struts.action.ActionServlet.process(ActionServlet.java:1482)
at org.apache.struts.action.ActionServlet.doPost(ActionServlet.java:525)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:688)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:770)
at org.apache.catalina.core.StandardWrapper.service(StandardWrapper.java:1542)
at org.apache.catalina.core.StandardWrapperValve.invoke(StandardWrapperValve.java:281)
at org.apache.catalina.core.StandardContextValve.invoke(StandardContextValve.java:175)
at org.apache.catalina.core.StandardPipeline.doInvoke(StandardPipeline.java:655)
at org.apache.catalina.core.StandardPipeline.invoke(StandardPipeline.java:595)
at org.apache.catalina.core.StandardHostValve.invoke(StandardHostValve.java:161)
at org.apache.catalina.core.StandardPipeline.doInvoke(StandardPipeline.java:655)
at org.apache.catalina.core.StandardPipeline.invoke(StandardPipeline.java:595)
at org.apache.catalina.connector.CoyoteAdapter.doService(CoyoteAdapter.java:328)
at org.apache.catalina.connector.CoyoteAdapter.service(CoyoteAdapter.java:231)
at com.sun.enterprise.v3.services.impl.ContainerMapper$AdapterCallable.call(ContainerMapper.java:317)
at com.sun.enterprise.v3.services.impl.ContainerMapper.service(ContainerMapper.java:195)
at com.sun.grizzly.http.ProcessorTask.invokeAdapter(ProcessorTask.java:849)
at com.sun.grizzly.http.ProcessorTask.doProcess(ProcessorTask.java:746)
at com.sun.grizzly.http.ProcessorTask.process(ProcessorTask.java:1045)
at com.sun.grizzly.http.DefaultProtocolFilter.execute(DefaultProtocolFilter.java:228)
at com.sun.grizzly.DefaultProtocolChain.executeProtocolFilter(DefaultProtocolChain.java:137)
at com.sun.grizzly.DefaultProtocolChain.execute(DefaultProtocolChain.java:104)
at com.sun.grizzly.DefaultProtocolChain.execute(DefaultProtocolChain.java:90)
at com.sun.grizzly.http.HttpProtocolChain.execute(HttpProtocolChain.java:79)
at com.sun.grizzly.ProtocolChainContextTask.doCall(ProtocolChainContextTask.java:54)
at com.sun.grizzly.SelectionKeyContextTask.call(SelectionKeyContextTask.java:59)
at com.sun.grizzly.ContextTask.run(ContextTask.java:71)
at com.sun.grizzly.util.AbstractThreadPool$Worker.doWork(AbstractThreadPool.java:532)
at com.sun.grizzly.util.AbstractThreadPool$Worker.run(AbstractThreadPool.java:513)
at java.lang.Thread.run(Thread.java:662)
|#] 

And 2-sample log message:

[#|2017-07-13T15:54:32.973-0400|INFO|glassfish3.1.2|javax.enterprise.system.std.com.sun.enterprise.server.logging|_ThreadID=27;_ThreadName=Thread-2;|2017 07 13 15:54:32: [Error]
 Source Class = org.app.common.horizontal.jndi.JndiObjectUtilities
 Source method = createInitialContext
 Application message = Missing value for the initial context factory class name.
|#]

There are another kind of log message also I am getting from glassfish server like below,

[#|2017-07-13T15:54:33.056-0400|SEVERE|glassfish3.1.2|javax.enterprise.system.tools.deployment.org.glassfish.deployment.common|_ThreadID=28;_ThreadName=Thread-2;|Exception while visiting com/sun/gjc/common/DataSourceSpec.class of size 3267
java.lang.NullPointerException
|#]

but I am setting "ignore_missing": true in my pipeline to ignore those messages, my grok pattern is:

"processors": [
{
  "grok": {
    "field": "message",
		"patterns": ["\\[#\\|%{TIMESTAMP_ISO8601:glassfish.server.timestamp}\\|%{LOGLEVEL:glassfish.server.loglevel}\\|%{DATA:glassfish.server.application}\\|%{GREEDYDATA:glassfish.server.component}\\|%{GLASSFISHTHREADS:glassfish.server.threadinfo}\\|%{LOGLEVEL:glassfish.server.app_log_level} %{GREEDYDATA:glassfish.server.app_correl_id} %{DATA:glassfish.server.app_class_name} - %{GREEDYMULTILINE:glassfish.server.app_message}\\|#\\]","\\[#\\|%{TIMESTAMP_ISO8601:glassfish.server.timestamp}\\|%{LOGLEVEL:glassfish.server.loglevel}\\|%{DATA:glassfish.server.application}\\|%{GREEDYDATA:glassfish.server.component}\\|%{GLASSFISHTHREADS:glassfish.server.threadinfo}\\|%{GREEDYDATA:glassfish.server.app_timestamp}: \\[%{LOGLEVEL:glassfish.server.app_log_level}\\]: \\[%{GREEDYDATA:glassfish.server.app_correl_id}\\] Source Class = %{GREEDYDATA:glassfish.server.app_class_name} %{GREEDYMULTILINE:glassfish.server.app_message}\\|#\\]"
        ],
		"ignore_missing": true,
    "pattern_definitions": {
      "GLASSFISHTHREADS": "_ThreadID=%{NUMBER:glassfish.server.threadid};_ThreadName=Thread-%{NUMBER:glassfish.server.threadnumberinname};", "GREEDYMULTILINE" : "(.|\n)*"		   
    } 
  }
}

Could you please correct me if I am wrong? I am sitting nearly a week on this

Hello,

If you are having problems with the Grok pattern, try https://grokdebug.herokuapp.com/patterns

For further help, I respectfully suggest posting the question in the Filebeats or Logtsash category since this isn't a Kibana question. I just want to make sure you find the best home for your question so you can get the best help.

Right!! I already posted a question in filebeat. Thanks for your time on this!!

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.