what do you mean they are different?
The screen shot is from Kibana UI, under Management, Logstash, Pipelines. You're saying the API calls are not for those, but ones built into elastic?
So, the pipelines that I'm looking for would be considered a parser?
As an example, this is the 3 files in a parsers folder for some logs named BGCS
00-kafka-input.conf
input {
kafka {
bootstrap_servers => "${ZOOKEEPER1},${ZOOKEEPER2},${ZOOKEEPER3}"
topics => ["bgcs-logstash"]
group_id => "bgcs-logstash"
codec => "json"
session_timeout_ms => "30000"
max_poll_records => "250"
consumer_threads => 4
decorate_events => true
}
}
filter {
mutate {
copy => { "[@metadata][kafka]" => "[metadata][kafka]" }
}
}
10-bgcs-filter.conf
filter {
if [type] == "bgcs" {
grok {
match => ["message", "(?m)\[(?<timestamp>%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{TIME})\] %{WORD:level}\s+\[%{NOTSPACE:class}\] %{GREEDYDATA:Message}" ]
}
date {
locale => "en"
match => ["timestamp", "YY-MM-dd HH:mm:ss,SSS"]
}
}
else if [type] == "WLP" {
grok {
match => ["message", "(?m)\[%{DATE:date} %{TIME:time} %{WORD:tz2}] %{WORD:ThreadID} %{NOTSPACE:ShortName}\s*%{WORD:EventType}\s*%{GREEDYDATA:Message}"]
add_field => [ "timestamp", "%{date} %{time} %{tz2}" ]
}
grok {
match => [ "Message", "(?<MessageCode>[A-Z]{4,5}[0-9]{4}[A-Z]):"]
tag_on_failure => []
}
grok {
match => [ "Message", "\(\(Tenant=%{NOTSPACE:tenant}\)\)"]
tag_on_failure => []
}
grok {
match => [ "Message", "\(\(User=%{NOTSPACE:user}\)\)"]
tag_on_failure => []
}
mutate{
gsub => ["timestamp","CEST","+0200"]
}
date{
locale => "en"
match => ["timestamp", "MM/dd/YY HH:mm:ss:SSS zzz", "M/d/YY HH:mm:ss:SSS zzz", "MM/d/YY HH:mm:ss:SSS zzz", "M/dd/YY HH:mm:ss:SSS zzz", "MM/dd/YY H:mm:ss:SSS zzz", "M/d/YY H:mm:ss:SSS zzz", "MM/d/YY H:mm:ss:SSS zzz", "M/dd/YY H:mm:ss:SSS zzz", "MM/dd/YY HH:mm:ss:SSS ZZZ", "M/d/YY HH:mm:ss:SSS ZZZ", "MM/d/YY HH:mm:ss:SSS ZZZ", "M/dd/YY HH:mm:ss:SSS ZZZ", "MM/dd/YY H:mm:ss:SSS ZZZ", "M/d/YY H:mm:ss:SSS ZZZ", "MM/d/YY H:mm:ss:SSS ZZZ", "M/dd/YY H:mm:ss:SSS ZZZ", "M/dd/YY HH:mm:ss:SSS Z", "M/dd/YY HH:mm:ss:SSS z"]
}
}
}
20-elasticsearch.output.conf
output{
if [type] == "bgcs" {
elasticsearch{
document_type => "bgcs"
hosts => ["${ELASTICSEARCH1}","${ELASTICSEARCH2}","${ELASTICSEARCH3}"]
ssl => true
cacert => "ca.der"
index => "logstash-bgcs-%{+xxxx.ww}"
user => "${ELASTICUSER}"
password => "${ELASTICPASSWORD}"
manage_template => false
}
}
else if [type] == "WLP" {
elasticsearch{
document_type => "bgcs"
hosts => ["${ELASTICSEARCH1}","${ELASTICSEARCH2}","${ELASTICSEARCH3}"]
ssl => true
cacert => "ca.der"
index => "logstash-bgcs-wlp-%{+xxxx.ww}"
user => "${ELASTICUSER}"
password => "${ELASTICPASSWORD}"
manage_template => false
}
}
}
From what I'm understanding (from you pointing me the right way, thanks again) each of these parser config files would need to be converted over to elasticsearch parsers... with or without grok if they use it.
Guess I get to go down that rabbit hole, of how to convert the hosts, user, pass and all the other stuff in those parsers into whatever can be used in elastic cloud.
Thanks again.