Hi all I am deploy logstash consumer on kubernetes with following specification:
Logstash version logstash:6.6.1
I am picking up all the values from the environment variable, my config file looks like this:
input {
kafka {
type => nginxlogs
bootstrap_servers => ["{KAFKA_BROKERS}"]
group_id => "{NGINXLOGS_CONSUMER_GROUP_ID}"
topics_pattern => ["{NGINXLOGS_KAFKA_TOPIC_PATTERN}"]
consumer_threads => "{NGINXLOGS_CONSUMER_THREADS}"
metadata_max_age_ms => "5000"
partition_assignment_strategy => "org.apache.kafka.clients.consumer.RoundRobinAssignor"
codec => "json"
add_field => {
"broker" => "input_from_nginxlogs_topic"
}
}
kafka {
type => applicationlogs
bootstrap_servers => ["{KAFKA_BROKERS}"]
group_id => "{APPLICATIONLOGS_CONSUMER_GROUP_ID}"
topics_pattern => ["{APPLICATIONLOGS_KAFKA_TOPIC_PATTERN}"]
consumer_threads => "{APPLICATIONLOGS_CONSUMER_THREADS}"
metadata_max_age_ms => "5000"
partition_assignment_strategy => "org.apache.kafka.clients.consumer.RoundRobinAssignor"
codec => "json"
add_field => {
"broker" => "input_from_applicationlogs_topic"
}
}
kafka {
type => containerconsolelogs
bootstrap_servers => ["{KAFKA_BROKERS}"]
group_id => "{CONTAINERCONSOLELOGS_CONSUMER_GROUP_ID}"
topics_pattern => ["{CONTAINERCONSOLELOGS_KAFKA_TOPIC_PATTERN}"]
consumer_threads => "{CONTAINERCONSOLELOGS_CONSUMER_THREADS}"
metadata_max_age_ms => "5000"
partition_assignment_strategy => "org.apache.kafka.clients.consumer.RoundRobinAssignor"
codec => "json"
add_field => {
"broker" => "input_from_containerconsolelogs_topic"
}
}
}
output {
if [type] == "nginxlogs"{
elasticsearch {
hosts => "{ELASTICSEARCH_HOSTS}"
index => '"{NGINXLOGS_KAFKA_TOPIC_PATTERN}"-%{+YYYY.MM}'
}
}
else if [type] == "applicationlogs"{
elasticsearch {
hosts => "{ELASTICSEARCH_HOSTS}"
index => '"{APPLICATIONLOGS_KAFKA_TOPIC_PATTERN}"-%{+YYYY.MM}'
}
}
else if [type] == "containerconsolelogs"{
elasticsearch {
hosts => "{ELASTICSEARCH_HOSTS}"
index => '"{CONTAINERCONSOLELOGS_KAFKA_TOPIC_PATTERN}"-%{+YYYY.MM}'
}
}
else {
null { }
}
}
Issue I am facing is When I hardcode the same elasticsearch values in the file it works, but when I pass the same values in the environment variable it throughs error saying:
[2019-02-27T11:21:56,791][FATAL][logstash.runner ] The given configuration is invalid. Reason: Illegal character in scheme name at index 0: ["172.23.130.220:9200","172.23.130.164:9200","172.23.130.116:9200","172.23.130.109:9200"]
I tried all possible permutation and combination didn't get anything.
Environment variables for reference:
export NGINXLOGS_KAFKA_TOPIC_PATTERN="nginxlogs"
export NGINXLOGS_CONSUMER_GROUP_ID="nginxlogs"
export NGINXLOGS_CONSUMER_THREADS="5"
export APPLICATIONLOGS_KAFKA_TOPIC_PATTERN="applicationlogs"
export APPLICATIONLOGS_CONSUMER_GROUP_ID="applicationlogs"
export APPLICATIONLOGS_CONSUMER_THREADS="3"
export CONTAINERCONSOLELOGS_KAFKA_TOPIC_PATTERN="containerconsolelogs"
export CONTAINERCONSOLELOGS_CONSUMER_GROUP_ID="containerconsolelogs"
export CONTAINERCONSOLELOGS_CONSUMER_THREADS="5"
export BATCH_DELAY="5"
export BATCH_SIZE="200"
export KAFKA_BROKERS=a0a5398d9e8d-808076471.eu-west-1.elb.amazonaws.com:9092,
aed098d9e8d-808076471.eu-west-1.elb.amazonaws.com:9092
export ELASTICSEARCH_HOSTS="["172.23.130.220:9200","172.23.130.164:9200","172.23.130.116:9200","172.23.130.109:9200"]"