Logstash pipeline error at line 1 column 1

I get this error:

[2021-08-08T20:15:12,553][INFO ][logstash.agent           ] Successfully started Logstash API endpoint {:port=>9600}
[2021-08-08T20:15:13,441][ERROR][logstash.agent           ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of [ \\t\\r\\n], \"#\", \"input\", \"filter\", \"output\" at line 1, column 1 (byte 1)", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:32:in `compile_imperative'", "org/logstash/execution/AbstractPipelineExt.java:187:in `initialize'", "org/logstash/execution/JavaBasePipelineExt.java:72:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:47:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:52:in `execute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:389:in `block in converge_state'"]}
[2021-08-08T20:15:13,530][INFO ][logstash.runner          ] Logstash shut down.
[2021-08-08T20:15:13,538][FATAL][org.logstash.Logstash    ] Logstash stopped processing because of an error: (SystemExit) exit
org.jruby.exceptions.SystemExit: (SystemExit) exit
	at org.jruby.RubyKernel.exit(org/jruby/RubyKernel.java:747) ~[jruby-complete-9.2.16.0.jar:?]
	at org.jruby.RubyKernel.exit(org/jruby/RubyKernel.java:710) ~[jruby-complete-9.2.16.0.jar:?]
	at usr.share.logstash.lib.bootstrap.environment.<main>(/usr/share/logstash/lib/bootstrap/environment.rb:89) ~[?:?]

Take in mind that I'm running logstash in a container.
Here is the .conf file:

input {
	http_poller {
		urls => {
			mispevents => {
				method => post
				url => "https://misp.local/attributes/restsearch"
				headers => {
					Authorization => "mykey"
					"Content-Type" => "application/json"
					Accept => "application/json"
				}
				body => '{"returnFormat":"csv","type":{"OR": ["ip-dst","ip-dst|port", "ip-src", "ip-src|port"]}, "timestamp": "1h" }'
			}
		}
        cacert => "/usr/share/logstash/pipeline/local.crt"
		schedule => {every => "5m"}
		codec => "line"
		keepalive => false
		request_timeout => 180
		socket_timeout => 180
	}
}

filter {

	if [message] == "" {
		drop{}
	}

	csv{

		skip_header => "true"
		columns => ["uuid", "event_id","category", "type", "value", "comment", "to_ids", "date", "object_relation", "object_uuid", "object_name", "object_meta_category"]
		add_field => {"priority" => "6" "lookupType" => "feed"}
		remove_field => ["to_ids", "object_relation", "object_uuid", "object_name", "object_meta_category"]

	}

	if ( [comment] == "") {
		mutate { replace => {"comment" => " "}}
	}

	mutate { 
		add_field => {"misp_date" => "%{date}"} 
	}

	# ip-dst or ip-src - Attribute Types
	# Define document id as the ipaddress without the "." - to prevent duplicate entries in els index

	if ( ([type] == "ip-dst|port") or ([type] == "ip-src|port") ) {
		
		grok {

			match => {
				"value" => "(?<Address>\d+\.\d+\.\d+\.\d+) | ( ?<Port>\d+ )"
			}
		}

		if [Address] {
			mutate {
				copy => {"Address" => "[source] [address]"}
				copy => {"[source] [address]" => "[source] [ip]"}
				copy => {"[source] [ip]" => "[destination] [address]"}
				copy => {"[destination] [address]" => "[destination] [ip]"}
			}
		}

		if [Port] {
			mutate {
				copy => {"Port" => "[source] [port]"}
				copy => {"[source] [port]" => "[destination] [port]"}
			}
		}

		mutate {

			add_field => {
				"[@metadata][misp_key]" => "%{Address}:%{Port}" "[@metadata][misp_value]" => "%{category}~%{comment}~%{priority}~%{type}~%{event_id}~%{misp_date}" "[@metadata][documentid]" => "%{Address}%{Port}"
			}
		}

	}

	# if [@metadata][misp_key]{
	# 	mutate {
	# 		strip => ["[@metadata][misp_key]"]
	# 	}

	# 	//lookup for the key in csv file 
	# 	ruby{
	# 		path => "/opt/logstash7/scripts/mispKeyLookup.rb"
	# 		script_params => { "source_field" => "[@metadata][misp_key]" "lookupType" => "lookupType" "targetField" => "MispKeyAction" }
	# 	}
	# }

	if [MispKeyAction] != "add"{
		drop {}
	}

	if [@metadata][documentid] {
		mutate {
			gsub => [ "[@metadata] [documentid]", "\.", "-" ]
		}
	}

	mutate {
		remove_field => ["message", "MispKeyAction", "date", "Address", "Port"]
	}
	
}



output {

	csv {
		path => "a/path/"
		csv_options => {"col_sep" => ","}
		fields => [ "[@metadata][misp_key]", "[@metadata][misp_value]" ]
		flush_interval => 0
	}



    # elasticsearch {
	# 	hosts => ["https://localhost:9200"]
	# 	index => "misp"
	# 	user => logstash
	# 	password => pass
	# 	ssl => true
	# 	cacert => './ca.pem'
	# 	http_compression => true
	# 	sniffing => false
	# }

    stdout { codec => json }
}

If that were the configuration you were running you would not get that error. So the question is what configuration you are running.

What is the command line you are using to start logstash and what is the value of path.config?

Logstash is running as a container and that is command to start it:

docker run --rm --network="host" -d -v ~/Desktop/logstash/pipeline/:/usr/share/logstash/pipeline/ -v ~/Desktop/logstash/config/logstash.yml:/usr/share/logstash/config/logstash.yml docker.elastic.co/logstash/logstash:7.13.0

I'm unfamiliar with that path.config. In which directory is it located? I provide only the logstash.yml and the pipeline folder, as you can see from my command, which is named logstash.conf and it's the conf file i posted initially in my question.

Update: I found out that the default path.config is "/usr/share/logstash/pipeline", so that should be it in my case, as i don't change the entire config folder of logstash, but just the logstash.yml.

If you have path.config set to a directory then logstash will concatenate all of the files in the directory to create the configuration. Are there any other files in the directory?

That was it, i had left another file in that folder without .conf ending, but it was screwing everything.
Strange that logstash concatenates all the files but now it's ok.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.