Here is message.proto
syntax = "proto3";
message SearchRequest {
string query = 1;
int32 page_number = 2;
int32 result_per_page = 3;
}
compiled using sudo protoc --python_out=. message.proto
which will gives message_pb.py as
Generated by the protocol buffer compiler. DO NOT EDIT!
source: message.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
@@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='message.proto',
package='',
syntax='proto3',
serialized_pb=_b('\n\rmessage.proto"L\n\rSearchRequest\x12\r\n\x05query\x18\x01 \x01(\t\x12\x13\n\x0bpage_number\x18\x02 \x01(\x05\x12\x17\n\x0fresult_per_page\x18\x03 \x01(\x05\x62\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_SEARCHREQUEST = _descriptor.Descriptor(
name='SearchRequest',
full_name='SearchRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='query', full_name='SearchRequest.query', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='page_number', full_name='SearchRequest.page_number', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='result_per_page', full_name='SearchRequest.result_per_page', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17,
serialized_end=93,
)
DESCRIPTOR.message_types_by_name['SearchRequest'] = _SEARCHREQUEST
SearchRequest = _reflection.GeneratedProtocolMessageType('SearchRequest', (_message.Message,), dict(
DESCRIPTOR = _SEARCHREQUEST,
module = 'message_pb2'
@@protoc_insertion_point(class_scope:SearchRequest)
))
_sym_db.RegisterMessage(SearchRequest)
@@protoc_insertion_point(module_scope)
and one simple python script is used to encode message as test_protobug.py
import message_pb2
import sys
message = message_pb2.SearchRequest()
message.query = "First message query"
message.page_number = 123
message.result_per_page = 10
f = open("firstmessage.log", "w")
print "Encoded: %s "%message.SerializeToString()
f.write(message.SerializeToString())
f.close()
f = open("firstmessage.log", "r")
message.ParseFromString(f.read())
f.close()
print message
for logstash message.proto is compiled and converted in message into ruby as message_pd.rb as
#!/usr/bin/env ruby
Generated by the protocol buffer compiler. DO NOT EDIT!
require 'protocol_buffers'
forward declarations
class SearchRequest < ::ProtocolBuffers::Message; end
class SearchRequest < ::ProtocolBuffers::Message
set_fully_qualified_name "SearchRequest"
required :string, :query, 1
required :int32, :page_number, 2
required :int32, :result_per_page, 3
end
and logstash conf file is as
input
{
file
{
path => "/home/qabuilder/protobuf/firstmessage.log"
start_position => "beginning"
codec => protobuf
{
class_name => "SearchRequest"
include_path => ["/tmp/message_pb.rb"]
protobuf_version => 3
}
}
}
output
{
stdout
{
codec => json
}
}
gives empty fields as
[2018-05-17T08:18:13,902][DEBUG][logstash.runner ] node.name: "rn-dt1"
[2018-05-17T08:18:17,785][DEBUG][logstash.pipeline ] filter received {"event"=>{"result_per_page"=>0, "page_number"=>0, "query"=>"", "@timestamp"=>2018-05-17T15:18:17.662Z, "path"=>"/home/qabuilder/protobuf/firstmessage.log", "host"=>"rn-dt1", "@version"=>"1"}}
[2018-05-17T08:18:17,792][DEBUG][logstash.pipeline ] output received {"event"=>{"result_per_page"=>0, "page_number"=>0, "query"=>"", "@timestamp"=>2018-05-17T15:18:17.662Z, "path"=>"/home/qabuilder/protobuf/firstmessage.log", "host"=>"rn-dt1", "@version"=>"1"}}
{"result_per_page":0,"page_number":0,"query":"","@timestamp":"2018-05-17T15:18:17.662Z","path":"/home/qabuilder/protobuf/firstmessage.log","host":"rn-dt1","@version":"1"}[2018-05-17T08:18:19,175][DEBUG][logstash.instrument.periodicpoller.cgroup] Error, cannot retrieve cgroups information {:exception=>"Errno::ENOENT", :message=>"No such file or directory - /sys/fs/cgroup/cpuacct/cpu.cfs_period_us"}
Expected o/p is
query = "First message query"
page_number = 123
result_per_page = 10
but it gives as
{
"result_per_page": 0,
"page_number": 0,
"query": ""
}