The indexs doesn't shown in kibana

input {
    gelf {
        port => 12201
    }
}

filter {
    mutate {
        add_field => { "environment" => "${ENVIRONMENT}" }
        add_tag => [ "%{tag}" ]
    }
    if "backend" in [tags]  {
        json {
            source => "message"
            skip_on_invalid_json => "false"
            add_field => { "[@metadata][target_index]" => "epourvoi-app-logs" }
        }
        if "_jsonparsefailure" in [tags] {
            drop { }
        }     
        if "http.log" in [logger] {
            mutate {
                replace => { "message" => "%{[request][method]} %{[request][uri]} %{status}" }
                add_tag => ["caddy"]
                add_field => { "[@metadata][target_index]" => "epourvoi-access-logs" }
            }
        } else {
            mutate {
                add_tag => ["php"]
                add_field => { "[@metadata][target_index]" => "epourvoi-app-logs" }
            }
        }
    }
    if "frontend" in [tags] {
    }
    if "gotenberg" in [tags] {
        json {
            source => "message"
            skip_on_invalid_json => "false"
        }
        if "_jsonparsefailure" in [tags] {
            mutate {
                remove_tag => ["_jsonparsefailure"]
            }
        } else {
            mutate {
                replace => { "message" => "%{[method]} %{[uri]} %{status}" }
            }
        }
    }
}

output {
    if [@metadata][target_index] == "epourvoi-app-logs" {
        elasticsearch {
            hosts => ["${ELASTIC_HOST}"]
            user => "${ELASTIC_USERNAME}"
            password => "${ELASTIC_PASSWORD}"
            ssl_enabled => false
            index => "%{[@metadata][target_index]}-%{+YYYY.MM.dd}"
        }
    } else if [@metadata][target_index] == "epourvoi-access-logs" {
        elasticsearch {
            hosts => ["${ELASTIC_HOST}"]
            user => "${ELASTIC_USERNAME}"
            password => "${ELASTIC_PASSWORD}"
            ssl_enabled => false
            index => "%{[@metadata][target_index]}-%{+YYYY.MM.dd}"
        }
        stdout { codec => rubydebug }
    }
}

the logsstash log

 [2024-12-19T15:36:08,363][WARN ][logstash.filters.json    ][main][ebb1e4beaf9014f830aaf79bd3fa7b1a04e148ac983e364bb1175e9ac9d6a8df] Error parsing json {:source=>"message", :raw=>"2024/12/19 15:36:08.257\t\e[34mINFO\e[0m\thttp.log.access\tNOP\t{\"request\": {\"remote_ip\": \"10.33.150.2\", \"remote_port\": \"53240\", \"client_ip\": \"150.99.36.14\", \"proto\": \"HTTP/1.1\", \"method\": \"GET\", \"host\": \"\", \"uri\": \"/\", \"headers\": {\"Connection\": [\"Close\"]}}, \"bytes_read\": 0, \"user_id\": \"\", \"duration\": 0.000004851, \"size\": 0, \"status\": 0, \"resp_headers\": {\"Server\": [\"Caddy\"]}}\r", :exception=>#<LogStash::Json::ParserError: Unexpected character ('/' (code 47)): Expected space separating root-level values
 at [Source: REDACTED (`StreamReadFeature.INCLUDE_SOURCE_IN_LOCATION` disabled); line: 1, column: 6]>}

The parse error occurs because the JSON is preceded by the date, log level, etc. You need to use dissect or grok or mutate+gsub to isolate the JSON from the rest of the message field before trying to parse it.

You drop events that cannot be parsed. It's unclear whether you are dropping all events.

You only set [@metadata][target_index] for event with the tag "backend" and the [logger] field set to "http.log". Events that do not match those will not be sent to any output, so those are effectively dropped as well.

I suggest you add a default output for events that do not have [@metadata][target_index] set and also keep events with a _jsonparsefailure tag and route them somewhere to see if they are fixable.