Logstash pipeline systax errors after upgrading from 7.1.1. to 7.7.0

------------------- Some information -------------------
we have running ELK stack 7.1.1. logstash parser written in multiple files under "/etc/logstash/conf.d" directory

Now i have upgrade ELK stack 7.1.1 to 7.7.0 and logstash gives below error

------------------- ERROR ------------------- from "/var/log/logstash/logstash-plain.log" file
[2020-06-10T05:57:28,429][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"7.7.0"}
[2020-06-10T05:57:29,745][ERROR][logstash.agent ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of [ \t\r\n], "#", "if", [A-Za-z0-9_-], '"', "'", "}" at line 4, column 1 (byte 11) after input {\n\n", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:58:in compile_imperative'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:66:in compile_graph'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:28:in block in compile_sources'", "org/jruby/RubyArray.java:2577:in map'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:27:in compile_sources'", "org/logstash/execution/AbstractPipelineExt.java:181:in initialize'", "org/logstash/execution/JavaBasePipelineExt.java:67:in initialize'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:43:in initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:52:in execute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:342:in block in converge_state'"]}
[2020-06-10T05:57:30,066][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2020-06-10T05:57:31,190][ERROR][logstash.agent ] Internal API server error {:status=>500, :request_method=>"GET", :path_info=>"/_node/stats", :query_string=>"", :http_version=>"HTTP/1.1", :http_accept=>nil, :error=>"Unexpected Internal Error", :class=>"LogStash::Instrument::MetricStore::MetricNotFound", :message=>"For path: events. Map keys: [:reloads, :pipelines]", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/instrument/metric_store.rb:241:in block in get_recursively'", "org/jruby/RubyArray.java:1809:in each'", "/usr/share/logstash/logstash-core/lib/logstash/instrument/metric_store.rb:240:in get_recursively'", "/usr/share/logstash/logstash-core/lib/logstash/instrument/metric_store.rb:251:in block in get_recursively'", "org/jruby/RubyArray.java:1809:in each'", "/usr/share/logstash/logstash-core/lib/logstash/instrument/metric_store.rb:240:in get_recursively'", "/usr/share/logstash/logstash-core/lib/logstash/instrument/metric_store.rb:111:in block in get'", "org/jruby/ext/thread/Mutex.java:164:in synchronize'", "/usr/share/logstash/logstash-core/lib/logstash/instrument/metric_store.rb:110:in get'", "/usr/share/logstash/logstash-core/lib/logstash/instrument/metric_store.rb:124:in get_shallow'", "/usr/share/logstash/logstash-core/lib/logstash/instrument/metric_store.rb:173:in block in extract_metrics'", "org/jruby/RubyArray.java:1809:in each'", "org/jruby/RubyEnumerable.java:1126:in inject'", "/usr/share/logstash/logstash-core/lib/logstash/instrument/metric_store.rb:149:in extract_metrics'", "/usr/share/logstash/logstash-core/lib/logstash/api/service.rb:45:in extract_metrics'", "/usr/share/logstash/logstash-core/lib/logstash/api/commands/base.rb:37:in extract_metrics'", "/usr/share/logstash/logstash-core/lib/logstash/api/commands/stats.rb:73:in events'", "/usr/share/logstash/logstash-core/lib/logstash/api/modules/node_stats.rb:57:in events_payload'", "/usr/share/logstash/logstash-core/lib/logstash/api/modules/node_stats.rb:37:in block in GET /?:filter?'", "org/jruby/RubyMethod.java:115:in call'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sinatra-2.0.8.1/lib/sinatra/base.rb:1636:in block in compile!'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sinatra-2.0.8.1/lib/sinatra/base.rb:987:in block in route!'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sinatra-2.0.8.1/lib/sinatra/base.rb:1006:in route_eval'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sinatra-2.0.8.1/lib/sinatra/base.rb:987:in block in route!'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sinatra-2.0.8.1/lib/sinatra/base.rb:1035:in block in process_route'", "org/jruby/RubyKernel.java:1189:in catch'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sinatra-2.0.8.1/lib/sinatra/base.rb:1033:in process_route'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sinatra-2.0.8.1/lib/sinatra/base.rb:985:in block in route!'", "org/jruby/RubyArray.java:1809:in each'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sinatra-2.0.8.1/lib/sinatra/base.rb:984:in route!'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sinatra-2.0.8.1/lib/sinatra/base.rb:1098:in block in dispatch!'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sinatra-2.0.8.1/lib/sinatra/base.rb:1072:in block in invoke'", "org/jruby/RubyKernel.java:1189:in catch'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sinatra-2.0.8.1/lib/sinatra/base.rb:1072:in invoke'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sinatra-2.0.8.1/lib/sinatra/base.rb:1095:in dispatch!'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sinatra-2.0.8.1/lib/sinatra/base.rb:919:in block in call!'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sinatra-2.0.8.1/lib/sinatra/base.rb:1072:in block in invoke'", "org/jruby/RubyKernel.java:1189:in catch'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sinatra-2.0.8.1/lib/sinatra/base.rb:1072:in invoke'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sinatra-2.0.8.1/lib/sinatra/base.rb:919:in call!'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sinatra-2.0.8.1/lib/sinatra/base.rb:908:in call'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/rack-protection-2.0.8.1/lib/rack/protection/xss_header.rb:18:in call'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/rack-protection-2.0.8.1/lib/rack/protection/path_traversal.rb:16:in call'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/rack-protection-2.0.8.1/lib/rack/protection/json_csrf.rb:26:in call'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/rack-protection-2.0.8.1/lib/rack/protection/base.rb:50:in call'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/rack-protection-2.0.8.1/lib/rack/protection/base.rb:50:in call'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/rack-protection-2.0.8.1/lib/rack/protection/frame_options.rb:31:in call'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/rack-2.2.2/lib/rack/null_logger.rb:11:in call'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/rack-2.2.2/lib/rack/head.rb:12:in call'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sinatra-2.0.8.1/lib/sinatra/base.rb:194:in call'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sinatra-2.0.8.1/lib/sinatra/base.rb:1951:in call'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/rack-2.2.2/lib/rack/urlmap.rb:74:in block in call'", "org/jruby/RubyArray.java:1809:in each'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/rack-2.2.2/lib/rack/urlmap.rb:58:in call'", "/usr/share/logstash/logstash-core/lib/logstash/api/rack_app.rb:74:in call'", "/usr/share/logstash/logstash-core/lib/logstash/api/rack_app.rb:48:in call'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/rack-2.2.2/lib/rack/builder.rb:244:in call'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/puma-4.3.3-java/lib/puma/server.rb:682:in handle_request'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/puma-4.3.3-java/lib/puma/server.rb:472:in process_client'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/puma-4.3.3-java/lib/puma/server.rb:328:in block in run'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/puma-4.3.3-java/lib/puma/thread_pool.rb:134:in `block in spawn_thread'"]}
[2020-06-10T05:57:31,287][ERROR][logstash.agent ] API HTTP Request {:status=>500, :request_method=>"GET", :path_info=>"/_node/stats", :query_string=>"", :http_version=>"HTTP/1.1", :http_accept=>nil}
[2020-06-10T05:57:34,977][INFO ][logstash.runner ] Logstash shut down.

Hi, can you copy here your pipeline filters configuration?

Your log marks the fault at:

I have following list of files

1000-input-start
1001-input-syslog
1002-input-netflow
1999-input-end
2000-filter-start
2001-filter-syslog
2002-filter-netflow
2999-filter-end
3000-output-start
3001-output-syslog
3002-output-netflow
3999-output-end

---- File wise code

#1000-input-start

input {

#1001-input-syslog

     # Syslog messages

     udp {
         port => 2101
         type => "syslog"
     }

#1002-input-netflow

     # Syslog messages from cisco devices

     udp {
         port => 2102
         type => "netflow"
     }

#1999-input-end

}

#2000-filter-start

 filter {

#2001-filter-syslog

 if [type] == "syslog"{
   some logic
 }

#2002-filter-netflow

 if [type] == "netflow"{
 	some logic
 }

#2999-filter-end

 }

#3000-output-start

 output {

#3001-output-syslog

if [type] == "syslog" {
     elasticsearch {
          hosts => ["localhost"]
          index => "syslog-%{+YYYY.MM.dd}"
       }
    }

#3002-output-netflow

if [type] == "netflow" {
     elasticsearch {
          hosts => ["localhost"]
          index => "netflow-%{+YYYY.MM.dd}"
       }
    }

#3999-output-end

 }

There are configuration files dedicated just to the opening and closing curly brackets, I presume to rely on the way logstash reads all configuration files and creates the full configuration.

I have not tested this way of working, I think it would be better to add "complete" input, filter and output configurations with their own opening and closing input/filter/output section. So each file is correct by itself and doesn't depend on being concatenated between " section + opening curly bracket" and "closing curly bracket" files

That is, get rid of the 1000,1999, 2000, 2999, 3000, 3999 configuration files and use instead:

1001-input-syslog

     # Syslog messages
input {
     udp {
         port => 2101
         type => "syslog"
     }
}

1002-input-netflow

     # Syslog messages from cisco devices
input {
     udp {
         port => 2102
         type => "netflow"
     }
}

2001-filter-syslog

filter {
  if [type] == "syslog"{
   some logic
  }
}

and so on.

Each file has to be a complete section, such as input, filter or output. See here for more details.

I have applied changes as you suggest and it worked for me.
Thanks andres for your help :slight_smile:

Thanks badger for your help :slight_smile:

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.