Logstash 6.4.x : reading multiple files in conf.d

Hello
Thanks you Christian for your interest.

I create 2 separate index, my 2 conf files are:

root@kvm:~# cat /etc/logstash/conf.d/auth.conf
input {
tcp {
    port => "5001"
   codec => json
    tags => ["syslogauth"]
}
}

filter {
    grok {
            named_captures_only => false
            break_on_match => true
            match => { "message" => [" New session %{NUMBER} of user %{USERNAME:user}."," Accepted password for %{USERNAME:user} from %{IP:ip} port %{NUMBER} ssh2"," Failed password for %{USERNAME:user} from %{IP} port %{NUMBER} ssh2"," Accepted publickey for %{USERNAME} from %{IP:ip} port %{NUMBER} ssh2"] }
}


if "_grokparsefailure" in [tags] {
  drop { }
}
}

output {
    if "syslogauth" in [tags] {
     elasticsearch {
    hosts => [ "localhost:9200" ]
    index => "auth"
}
    }
}

And

root@kvm:~# cat /etc/logstash/conf.d/iptables.conf
input {
    tcp {
        port => "5003"
       codec => "json"
        tags => ["iptables"]
    }
}
# The filter part of this file is commented out to indicate that it is
# optional.
 filter {

#  IPTABLES DROP/REJECT/ACCEPT

  grok {
        named_captures_only => false
        break_on_match => true
        patterns_dir => "/etc/logstash/iptables.pattern"
        match => { "message" => "%{IPTABLES}"}
         }
# convertir string en int pour elasticsearch
mutate {
    convert => {
        "src_port" => "integer"
        "dst_port" => "integer"
        }
      remove_field => [ "host" ] # kludge pour ne pas avoir d'erreur dans les logs a propos du champ "host"
   }

geoip {
        source => "src_ip"
    }
geoip {
        source => "dst_ip"
    }
if "_grokparsefailure" in [tags] {
  drop { }
}
}
output {
    if "iptables" in [tags] {
        elasticsearch {
                hosts => [ "localhost:9200" ]
                index => "iptables"
        }
    }
}

In this configuration the indexes are not create in elasticsearch.
When i removed one of them, it works perfectly

I have installed the deb package without no specific configuration

The logs seems good

[2018-09-21T22:10:16,031][WARN ][logstash.outputs.elasticsearch] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>6}
[2018-09-21T22:10:16,051][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-*", "version"=>60001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"_default_"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"*", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}}
[2018-09-21T22:10:16,069][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-*", "version"=>60001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"_default_"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"*", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}}
[2018-09-21T22:10:16,755][INFO ][logstash.filters.geoip   ] Using geoip database {:path=>"/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/logstash-filter-geoip-5.0.3-java/vendor/GeoLite2-City.mmdb"}
[2018-09-21T22:10:16,843][INFO ][logstash.filters.geoip   ] Using geoip database {:path=>"/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/logstash-filter-geoip-5.0.3-java/vendor/GeoLite2-City.mmdb"}
[2018-09-21T22:10:16,957][INFO ][logstash.inputs.tcp      ] Starting tcp input listener {:address=>"0.0.0.0:5001", :ssl_enable=>"false"}
[2018-09-21T22:10:17,313][INFO ][logstash.inputs.tcp      ] Starting tcp input listener {:address=>"0.0.0.0:5003", :ssl_enable=>"false"}
[2018-09-21T22:10:17,397][INFO ][logstash.pipeline        ] Pipeline started successfully {:pipeline_id=>"main", :thread=>"#<Thread:0x6c326101 sleep>"}
[2018-09-21T22:10:17,482][INFO ][logstash.agent           ] Pipelines running {:count=>1, :running_pipelines=>[:main], :non_running_pipelines=>[]}
[2018-09-21T22:10:18,030][INFO ][logstash.agent           ] Successfully started Logstash API endpoint {:port=>9600}

And Elasticsearch

[2018-09-22T01:30:00,000][INFO ][o.e.x.m.MlDailyMaintenanceService] triggering scheduled [ML] maintenance tasks
[2018-09-22T01:30:00,035][INFO ][o.e.x.m.a.TransportDeleteExpiredDataAction] [LRjRHtV] Deleting expired data
[2018-09-22T01:30:00,045][INFO ][o.e.x.m.a.TransportDeleteExpiredDataAction] [LRjRHtV] Completed deletion of expired data
[2018-09-22T01:30:00,045][INFO ][o.e.x.m.MlDailyMaintenanceService] Successfully completed [ML] maintenance tasks

tcpdump shows syslog sent on 5001 but not 9200

root@kvm:~# tcpdump -i br0 -nn -vvv -s 0 port 5001 -w -
tcpdump: listening on br0, link-type EN10MB (Ethernet), capture size 262144 bytes
  {"@timestamp":"2018-09-22T09:17:34.864681+02:00","@version":"1","message":" Accepted publickey for arnaud from 192.168.0.60 port 51458 ssh2","sysloghost":"gibson","severity:"info","facility":"authpriv","programname":"sshd","procid":"10520"}

{@timestamp":"2018-09-22T09:17:34.871754+02:00","@version":"1","message":" pam_unix(sshd:session): session opened for user arnaud by (uid=0)","sysloghost":"gibson",etc..."}

and

root@kvm:~# tcpdump -i lo -nn -vvv -s 0 port 9200 -w -
tcpdump: listening on lo, link-type EN10MB (Ethernet), capture size 262144 bytes
HEAD / HTTP/1.1
Host: localhost:9200
Content-Length: 0
Connection: keep-alive

HTTP/1.1 200 OK
content-type: application/json; charset=UTF-8
content-length: 493

GET /_nodes?filter_path=nodes.*.version%2Cnodes.*.http.publish_address%2Cnodes.*.ip
Host: localhost:9200
Content-Length: 0
Connection: keep-alived

not data syslog on 9200 but i works well when i have only auth.conf in /etc/logstash/conf.d