While starting my logstash service, i'm getting below error please help.
/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/stud-0.0.23/lib/stud/task.rb:22] agent - Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of #, => at line 76, column 17 (byte 2757) after filter {\n if [type] == "cflogs" {\n grok {\n match => [ "message", "%{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE:elb} %{IP:clientip}:%{INT:clientport:int} (?:(%{IP:backendip}:?:%{INT:backendport:int})|-) %{NUMBER:request_processing_time:float} %{NUMBER:backend_processing_time:float} %{NUMBER:response_processing_time:float} (?:-|%{INT:elb_status_code:int}) (?:-|%{INT:backend_status_code:int}) %{INT:received_bytes:int} %{INT:sent_bytes:int} \"%{ELB_REQUEST_LINE}\" \"(?:-|%{DATA:user_agent})\" (?:-|%{NOTSPACE:ssl_cipher}) (?:-|%{NOTSPACE:ssl_protocol})" ]\n }\n date {\n locale => "en"\n match => [ "timestamp", "d/MMM/YYYY:HH:mm:ss Z" ]\n }\n # these will ensure we have a valid index even if there are upper case letters in elb names\n mutate {\n add_field => { "indexname" => "cf-%{cf}" }\n }\n mutate {\n lowercase => [ "indexname" ]\n }\n }\n\n if [type] == "elblogs" {\n grok {\n match => [ "message", "%{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE:elb} %{IP:clientip}:%{INT:clientport:int} (?:(%{IP:backendip}:?:%{INT:backendport:int})|-) %{NUMBER:request_processing_time:float} %{NUMBER:backend_processing_time:float} %{NUMBER:response_processing_time:float} (?:-|%{INT:elb_status_code:int}) (?:-|%{INT:backend_status_code:int}) %{INT:received_bytes:int} %{INT:sent_bytes:int} \"%{ELB_REQUEST_LINE}\" \"(?:-|%{DATA:user_agent})\" (?:-|%{NOTSPACE:ssl_cipher}) (?:-|%{NOTSPACE:ssl_protocol})" ]\n }\n date {\n locale => "en"\n match => [ "timestamp", "d/MMM/YYYY:HH:mm:ss Z" ]\n }\n # these will ensure we have a valid index even if there are upper case letters in elb names\n mutate {\n add_field => { "indexname" => "elb-%{elb}" }\n }\n mutate {\n lowercase => [ "indexname" ]\n }\n }\n\n if [type] == "buildlogs" {\n grok {\n match => { "message" => "%{COMBINEDAPACHELOG}" }\n }\n date {\n match => [ "timestamp" , "dd/MMM/yyyy:HH:mm:ss Z" ]\n }\n mutate {\n add_field => { "indexname" => "builds-%{builds}" }\n\t}\n mutate {\n lowercase => [ "indexname" ]\n }\n}\noutput {\n elasticsearch ", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:42:in compile_imperative'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:50:in
compile_graph'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:12:in block in compile_sources'", "org/jruby/RubyArray.java:2486:in
map'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:11:in compile_sources'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:51:in
initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:169:in initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:40:in
execute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:315:in block in converge_state'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:141:in
with_pipelines'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:312:in block in converge_state'", "org/jruby/RubyArray.java:1734:in
each'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:299:in converge_state'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:166:in
block in converge_state_and_update'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:141:in with_pipelines'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:164:in
converge_state_and_update'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:90:in execute'", "/usr/share/logstash/logstash-core/lib/logstash/runner.rb:348:in
block in execute'", "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/stud-0.0.23/lib/stud/task.rb:24:in `block in initialize'"]}
My config file is as below.
input {
beats {
type => "buildLogs"
port => "5044"
}
s3 {
bucket => "fcalb-logs"
prefix => "qa-alb/AWSLogs/155086999298/elasticloadbalancing/"
region => "us-east-1"
type => "elblogs"
codec => plain
secret_access_key => "sdfsfsdfsfsfds"
access_key_id => "sfsfsfsfsfsf"
}
s3 {
bucket => "fcalb-logs"
prefix => "cf-logs/"
region => "us-east-1"
type => "cflogs"
codec => plain
secret_access_key => "fdggfdgdgdgd"
access_key_id => "sdggdfgdgf"
}
}
filter {
if [type] == "cflogs" {
grok {
match => [ "message", "%{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE:elb} %{IP:clientip}:%{INT:clientport:int} (?:(%{IP:backendip}:?:%{INT:backendport:int})|-) %{NUMBER:request_processing_time:float} %{NUMBER:backend_processing_time:float} %{NUMBER:response_processing_time:float} (?:-|%{INT:elb_status_code:int}) (?:-|%{INT:backend_status_code:int}) %{INT:received_bytes:int} %{INT:sent_bytes:int} "%{ELB_REQUEST_LINE}" "(?:-|%{DATA:user_agent})" (?:-|%{NOTSPACE:ssl_cipher}) (?:-|%{NOTSPACE:ssl_protocol})" ]
}
date {
locale => "en"
match => [ "timestamp", "d/MMM/YYYY:HH:mm:ss Z" ]
}
# these will ensure we have a valid index even if there are upper case letters in elb names
mutate {
add_field => { "indexname" => "cf-%{cf}" }
}
mutate {
lowercase => [ "indexname" ]
}
}
if [type] == "elblogs" {
grok {
match => [ "message", "%{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE:elb} %{IP:clientip}:%{INT:clientport:int} (?:(%{IP:backendip}:?:%{INT:backendport:int})|-) %{NUMBER:request_processing_time:float} %{NUMBER:backend_processing_time:float} %{NUMBER:response_processing_time:float} (?:-|%{INT:elb_status_code:int}) (?:-|%{INT:backend_status_code:int}) %{INT:received_bytes:int} %{INT:sent_bytes:int} "%{ELB_REQUEST_LINE}" "(?:-|%{DATA:user_agent})" (?:-|%{NOTSPACE:ssl_cipher}) (?:-|%{NOTSPACE:ssl_protocol})" ]
}
date {
locale => "en"
match => [ "timestamp", "d/MMM/YYYY:HH:mm:ss Z" ]
}
# these will ensure we have a valid index even if there are upper case letters in elb names
mutate {
add_field => { "indexname" => "elb-%{elb}" }
}
mutate {
lowercase => [ "indexname" ]
}
}
if [type] == "buildlogs" {
grok {
match => { "message" => "%{COMBINEDAPACHELOG}" }
}
date {
match => [ "timestamp" , "dd/MMM/yyyy:HH:mm:ss Z" ]
}
mutate {
add_field => { "indexname" => "builds-%{builds}" }
}
mutate {
lowercase => [ "indexname" ]
}
}
output {
elasticsearch { hosts => ["localhost:9200"] }
}