I get this error:
[2021-08-08T20:15:12,553][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2021-08-08T20:15:13,441][ERROR][logstash.agent ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of [ \\t\\r\\n], \"#\", \"input\", \"filter\", \"output\" at line 1, column 1 (byte 1)", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:32:in `compile_imperative'", "org/logstash/execution/AbstractPipelineExt.java:187:in `initialize'", "org/logstash/execution/JavaBasePipelineExt.java:72:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:47:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:52:in `execute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:389:in `block in converge_state'"]}
[2021-08-08T20:15:13,530][INFO ][logstash.runner ] Logstash shut down.
[2021-08-08T20:15:13,538][FATAL][org.logstash.Logstash ] Logstash stopped processing because of an error: (SystemExit) exit
org.jruby.exceptions.SystemExit: (SystemExit) exit
at org.jruby.RubyKernel.exit(org/jruby/RubyKernel.java:747) ~[jruby-complete-9.2.16.0.jar:?]
at org.jruby.RubyKernel.exit(org/jruby/RubyKernel.java:710) ~[jruby-complete-9.2.16.0.jar:?]
at usr.share.logstash.lib.bootstrap.environment.<main>(/usr/share/logstash/lib/bootstrap/environment.rb:89) ~[?:?]
Take in mind that I'm running logstash in a container.
Here is the .conf file:
input {
http_poller {
urls => {
mispevents => {
method => post
url => "https://misp.local/attributes/restsearch"
headers => {
Authorization => "mykey"
"Content-Type" => "application/json"
Accept => "application/json"
}
body => '{"returnFormat":"csv","type":{"OR": ["ip-dst","ip-dst|port", "ip-src", "ip-src|port"]}, "timestamp": "1h" }'
}
}
cacert => "/usr/share/logstash/pipeline/local.crt"
schedule => {every => "5m"}
codec => "line"
keepalive => false
request_timeout => 180
socket_timeout => 180
}
}
filter {
if [message] == "" {
drop{}
}
csv{
skip_header => "true"
columns => ["uuid", "event_id","category", "type", "value", "comment", "to_ids", "date", "object_relation", "object_uuid", "object_name", "object_meta_category"]
add_field => {"priority" => "6" "lookupType" => "feed"}
remove_field => ["to_ids", "object_relation", "object_uuid", "object_name", "object_meta_category"]
}
if ( [comment] == "") {
mutate { replace => {"comment" => " "}}
}
mutate {
add_field => {"misp_date" => "%{date}"}
}
# ip-dst or ip-src - Attribute Types
# Define document id as the ipaddress without the "." - to prevent duplicate entries in els index
if ( ([type] == "ip-dst|port") or ([type] == "ip-src|port") ) {
grok {
match => {
"value" => "(?<Address>\d+\.\d+\.\d+\.\d+) | ( ?<Port>\d+ )"
}
}
if [Address] {
mutate {
copy => {"Address" => "[source] [address]"}
copy => {"[source] [address]" => "[source] [ip]"}
copy => {"[source] [ip]" => "[destination] [address]"}
copy => {"[destination] [address]" => "[destination] [ip]"}
}
}
if [Port] {
mutate {
copy => {"Port" => "[source] [port]"}
copy => {"[source] [port]" => "[destination] [port]"}
}
}
mutate {
add_field => {
"[@metadata][misp_key]" => "%{Address}:%{Port}" "[@metadata][misp_value]" => "%{category}~%{comment}~%{priority}~%{type}~%{event_id}~%{misp_date}" "[@metadata][documentid]" => "%{Address}%{Port}"
}
}
}
# if [@metadata][misp_key]{
# mutate {
# strip => ["[@metadata][misp_key]"]
# }
# //lookup for the key in csv file
# ruby{
# path => "/opt/logstash7/scripts/mispKeyLookup.rb"
# script_params => { "source_field" => "[@metadata][misp_key]" "lookupType" => "lookupType" "targetField" => "MispKeyAction" }
# }
# }
if [MispKeyAction] != "add"{
drop {}
}
if [@metadata][documentid] {
mutate {
gsub => [ "[@metadata] [documentid]", "\.", "-" ]
}
}
mutate {
remove_field => ["message", "MispKeyAction", "date", "Address", "Port"]
}
}
output {
csv {
path => "a/path/"
csv_options => {"col_sep" => ","}
fields => [ "[@metadata][misp_key]", "[@metadata][misp_value]" ]
flush_interval => 0
}
# elasticsearch {
# hosts => ["https://localhost:9200"]
# index => "misp"
# user => logstash
# password => pass
# ssl => true
# cacert => './ca.pem'
# http_compression => true
# sniffing => false
# }
stdout { codec => json }
}