Cannot create datastream under new index template

Hi

I am using logstash running Kubernetes under ECK. I am ingesting both logs (filebeat) and metrics (metricbeat). Now i want ingest data from heartbeat. I get the following error:

[2023-11-14T13:22:23,598][INFO ][logstash.outputs.elasticsearch][main][80b6fcbcae1c789247b66f08ff48b2bde70d63d576edb68ec43e28cae9649bcd] Retrying failed action {:status=>403, :action=>["create", {:_id=>nil, :_index=>"testtype-http-straffe.skifte", :routing=>nil}, {"event"=>{"dataset"=>"http"}, "headers"=>{"http_accept"=>nil, "http_version"=>"HTTP/1.1", "accept_encoding"=>"gzip,deflate", "request_method"=>"POST", "x_forwarded_scheme"=>"https", "http_host"=>"logstash.elastic-app-logging.domdev.lan", "http_user_agent"=>"Manticore 0.9.1", "x_forwarded_port"=>"443", "x_forwarded_for"=>"10.209.254.65", "x_forwarded_host"=>"logstash.elastic-app-logging.domdev.lan", "x_real_ip"=>"10.209.254.65", "x_scheme"=>"https", "content_type"=>"application/json", "x_forwarded_proto"=>"https", "x_request_id"=>"1310b8c7d7f380d50851e23aaa5c29bc", "request_path"=>"/", "content_length"=>"340700"}, "summary"=>{"up"=>1, "down"=>0}, "@version"=>"1", "http"=>{"response"=>{"status_code"=>200, "body"=>{"hash"=>"caf313a2a2492aab037f25ea846572b4d2dd0dbf0c29936da7c13d9b4e005224", "bytes"=>243}, "mime_type"=>"application/json", "headers"=>{"X-Xss-Protection"=>"0", "Date"=>"Tue, 14 Nov 2023 13:11:19 GMT", "X-Frame-Options"=>"DENY", "Expires"=>"0", "Vary"=>["Origin", "Access-Control-Request-Method", "Access-Control-Request-Headers"], "Content-Type"=>"application/vnd.spring-boot.actuator.v3+json", "Cache-Control"=>"no-cache, no-store, max-age=0, must-revalidate", "X-Content-Type-Options"=>"nosniff", "Pragma"=>"no-cache"}}, "rtt"=>{"write_request"=>{"us"=>42}, "validate"=>{"us"=>780}, "response_header"=>{"us"=>603}, "total"=>{"us"=>930}, "content"=>{"us"=>176}}}, "agent"=>{"ephemeral_id"=>"b505ecd6-f6cd-4403-b948-4ad5009aa940", "type"=>"heartbeat", "id"=>"37825afd-a98a-4c5b-b514-af668a5bda5d", "version"=>"8.10.2", "name"=>"heartbeat-7d5985d586-2n6jf"}, "ecs"=>{"version"=>"8.0.0"}, "kubernetes"=>{"namespace_labels"=>{"field_cattle_io/projectId"=>"p-8gvfm", "kubernetes_io/metadata_name"=>"development", "argocd_argoproj_io/instance"=>"solution-development-development-wrapper"}, "container"=>{"name"=>"spring"}, "namespace_uid"=>"7d3a393e-a64c-4cf0-a75f-1b0c8df6e172", "labels"=>{"release"=>"cpr-abonnement", "pod-template-hash"=>"6658849bd6", "type"=>"application", "appX"=>"spring"}, "pod"=>{"uid"=>"ec339645-196a-48e7-bd1c-497297b0bdd0", "ip"=>"10.42.7.165", "name"=>"cpr-abonnement-spring-6658849bd6-q2qw6"}, "replicaset"=>{"name"=>"cpr-abonnement-spring-6658849bd6"}, "namespace"=>"development", "deployment"=>{"name"=>"cpr-abonnement-spring"}, "node"=>{"uid"=>"68adf4d6-a273-4383-af8a-d97233cb6cd7", "hostname"=>"rke-node-69", "name"=>"rke-node-69", "labels"=>{"node_longhorn_io/create-default-disk"=>"config", "beta_kubernetes_io/arch"=>"amd64", "kubernetes_io/arch"=>"amd64", "node_kubernetes_io/instance-type"=>"rke2", "kubernetes_io/hostname"=>"rke-node-69", "beta_kubernetes_io/instance-type"=>"rke2", "beta_kubernetes_io/os"=>"linux", "kubernetes_io/os"=>"linux"}}}, "container"=>{"runtime"=>"containerd", "id"=>"b1bd42718c08d4522eaed67e4c979c8f0e7e81025c2f2e36a4c82995c8b673a5", "image"=>{"name"=>"sosconreg.azurecr.io/cpr-abonnement-service:2023-11-14T08.14.01.254776358"}}, "@timestamp"=>2023-11-14T13:11:19.944Z, "monitor"=>{"id"=>"auto-http-0XB2965F489EF2DE6A-85eb9366d9e99796", "name"=>"", "check_group"=>"47433775-82ef-11ee-99ed-2af066366adf", "type"=>"http", "ip"=>"10.42.7.165", "duration"=>{"us"=>980}, "status"=>"up", "timespan"=>{"gte"=>"2023-11-14T13:11:19.945Z", "lt"=>"2023-11-14T13:11:29.945Z"}}, "tags"=>["beats_input_raw_event"], "tcp"=>{"rtt"=>{"connect"=>{"us"=>121}}}, "state"=>{"id"=>"default-18bcda05868-0", "ends"=>nil, "started_at"=>"2023-11-14T11:38:39.592827403Z", "flap_history"=>, "up"=>1113, "down"=>0, "checks"=>1113, "status"=>"up", "duration_ms"=>"5560352"}, "url"=>{"domain"=>"10.42.7.165", "path"=>"/actuator/health/readiness", "scheme"=>"http", "full"=>"http://10.42.7.165:8081/actuator/health/readiness", "port"=>8081}, "host"=>"10.42.2.4", "service"=>{"environment"=>"development"}, "data_stream"=>{"type"=>"testtype", "namespace"=>"straffe.skifte", "dataset"=>"http"}}], :error=>{"type"=>"security_exception", "reason"=>"action [indices:data/write/bulk[s]] is unauthorized for user [elasticsearch-logstash-application-logging-elasticsearch-elasticsearch-application-logging-logstash-user] with effective roles [eck_logstash_user_role] on indices [testtype-http-straffe.skifte], this action is granted by the index privileges [create_doc,create,delete,index,write,all]"}}

My logstash config looks like this:

{{- with $.Values.elastic }}
apiVersion: logstash.k8s.elastic.co/v1alpha1
kind: Logstash
metadata:
  name: logstash-{{ .clusterName }}
  namespace: {{ .namespace }}
spec:
  version: {{ .version }}
  count: 2
  podTemplate:
    spec:
      containers:
      - name: logstash
        resources:
          requests:
            memory: 2Gi
            cpu: 1
          limits:
            memory: 4Gi
  elasticsearchRefs:
  - name: elasticsearch-{{ .clusterName }}
    clusterName: elastic
  config: 
    pipeline:
      workers: 4
  services:
  - name: http
    service:
      spec:
        ports:
        - port: 8080
          name: "http"
          protocol: TCP
          targetPort: 8080
  pipelines:
    - pipeline.id: main
      config.string: |
        input {
          http {
            port => 8080
            codec => json
            ecs_compatibility => 'disabled'
          }
        }
        
        output {
          #stdout {
          #  codec => rubydebug
          #}
          elasticsearch {   
            hosts => [ "${ELASTIC_ES_HOSTS}" ]
            user => "${ELASTIC_ES_USER}"
            password => "${ELASTIC_ES_PASSWORD}"
            ssl_certificate_authorities => "${ELASTIC_ES_SSL_CERTIFICATE_AUTHORITY}"
            data_stream => "true"
          }
        }
{{- end }}

Everything works fine for all data where data_stream.type is "logs" or "metrics", where I have old index templates. But even though I create a new index template for "testtype", I get the error. If I change the data_stream.type in the above to metrics, it works?

Br
Casper

This does not work, Logstash does not support custom data stream types, the only supported types are as described in the documentation:

  • logs
  • metrics
  • synthetics
  • traces

This is validate in the code

base.config :data_stream_type, :validate => ['logs', 'metrics', 'synthetics', 'traces'], :default => 'logs'

If you want to use a custom data stream type you need to use a output like this:

output {
    elasticsearch {
        hosts => ["HOSTS"]
        index => "data-stream-name"
        action => "create"
        http_compression => true
        data_stream => false
        manage_template => false
        ilm_enabled => false
        cacert => 'ca.crt'
        user => 'USER'
        password => 'PASSWORD'
    }
}

And you will need to have a data stream index template matching the data-stream-name, which in your example would be testtype-http-straffe.skifte.

I had the same problem a couple of months ago and opened this github issue.

Hi

Thank you for the quick response. Sometimes it helps to read the documentation :see_no_evil:

Br
Casper

Hi again

Unfortunatly I still get an error, after changing the config:

{{- with $.Values.elastic }}
apiVersion: logstash.k8s.elastic.co/v1alpha1
kind: Logstash
metadata:
  name: logstash-{{ .clusterName }}
  namespace: {{ .namespace }}
spec:
  version: {{ .version }}
  count: 2
  podTemplate:
    spec:
      containers:
      - name: logstash
        resources:
          requests:
            memory: 2Gi
            cpu: 1
          limits:
            memory: 4Gi
  elasticsearchRefs:
  - name: elasticsearch-{{ .clusterName }}
    clusterName: elastic
  config: 
    pipeline:
      workers: 4
  services:
  - name: http
    service:
      spec:
        ports:
        - port: 8080
          name: "http"
          protocol: TCP
          targetPort: 8080
  pipelines:
    - pipeline.id: main
      config.string: |
        input {
          http {
            port => 8080
            codec => json
            ecs_compatibility => 'disabled'
          }
        }
        
        output {
          #stdout {
          #  codec => rubydebug
          #}
          elasticsearch {   
            hosts => [ "${ELASTIC_ES_HOSTS}" ]
            user => "${ELASTIC_ES_USER}"
            password => "${ELASTIC_ES_PASSWORD}"
            ssl_certificate_authorities => "${ELASTIC_ES_SSL_CERTIFICATE_AUTHORITY}"
            index => "%{[data_stream][type]}-%{[data_stream][dataset]}-%{[data_stream][namespace]}" # Dynamic index name
            action => "create"
            http_compression => true
            data_stream => false
            manage_template => false
            ilm_enabled => false            
          }
        }
{{- end }}

I have created an index template that looks like this:

{
  "priority": 2000,
  "template": {
    "settings": {
      "index": {
        "lifecycle": {
          "name": "heartbeats-straffe-skifte"
        }
      }
    },
    "mappings": {
      "dynamic_templates": []
    }
  },
  "index_patterns": [
    "heartbeats-*-straffe.skifte*"
  ],
  "data_stream": {
    "hidden": false,
    "allow_custom_routing": false
  },
  "composed_of": [
    "straffe-skifte-mappings"
  ]
}

Error:
[2023-11-14T14:38:51,687][INFO ][logstash.outputs.elasticsearch][main][97a9d830d7fa7ae9aa3a161f29bea709c10e2f758af60898d5444b6925ee4e9b] Retrying failed action {:status=>403, :action=>["create", {:_id=>nil, :_index=>"heartbeats-http-straffe.skifte", :routing=>nil}, {"agent"=>{"type"=>"heartbeat", "id"=>"37825afd-a98a-4c5b-b514-af668a5bda5d", "version"=>"8.10.2", "name"=>"heartbeat-7d5985d586-2n6jf", "ephemeral_id"=>"b505ecd6-f6cd-4403-b948-4ad5009aa940"}, "ecs"=>{"version"=>"8.0.0"}, "service"=>{"environment"=>"development"}, "event"=>{"dataset"=>"http"}, "monitor"=>{"id"=>"auto-http-0X16581C2EEA53827E-56159b94b2a29820", "duration"=>{"us"=>2962}, "name"=>"", "type"=>"http", "status"=>"up", "ip"=>"10.42.7.167", "timespan"=>{"gte"=>"2023-11-14T13:25:27.892Z", "lt"=>"2023-11-14T13:25:37.892Z"}, "check_group"=>"40ad8a3b-82f1-11ee-99ed-2af066366adf"}, "http"=>{"rtt"=>{"content"=>{"us"=>184}, "total"=>{"us"=>2907}, "validate"=>{"us"=>2752}, "response_header"=>{"us"=>2568}, "write_request"=>{"us"=>25}}, "response"=>{"status_code"=>200, "headers"=>{"Expires"=>"0", "Pragma"=>"no-cache", "Date"=>"Tue, 14 Nov 2023 13:25:26 GMT", "Vary"=>["Origin", "Access-Control-Request-Method", "Access-Control-Request-Headers"], "X-Content-Type-Options"=>"nosniff", "Content-Type"=>"application/vnd.spring-boot.actuator.v3+json", "Cache-Control"=>"no-cache, no-store, max-age=0, must-revalidate", "X-Frame-Options"=>"DENY", "X-Xss-Protection"=>"0"}, "mime_type"=>"application/json", "body"=>{"hash"=>"b4eea72c546204f220d9424dd6060b51f12ace5745b77ee926a5bfd2855fa6a0", "bytes"=>243}}}, "tags"=>["beats_input_raw_event"], "tcp"=>{"rtt"=>{"connect"=>{"us"=>125}}}, "kubernetes"=>{"namespace"=>"development", "deployment"=>{"name"=>"authorization-spring"}, "labels"=>{"release"=>"authorization", "appX"=>"spring", "type"=>"application", "pod-template-hash"=>"b547fd487"}, "namespace_labels"=>{"kubernetes_io/metadata_name"=>"development", "argocd_argoproj_io/instance"=>"solution-development-development-wrapper", "field_cattle_io/projectId"=>"p-8gvfm"}, "node"=>{"hostname"=>"rke-node-69", "labels"=>{"node_longhorn_io/create-default-disk"=>"config", "kubernetes_io/arch"=>"amd64", "beta_kubernetes_io/arch"=>"amd64", "node_kubernetes_io/instance-type"=>"rke2", "beta_kubernetes_io/os"=>"linux", "kubernetes_io/os"=>"linux", "kubernetes_io/hostname"=>"rke-node-69", "beta_kubernetes_io/instance-type"=>"rke2"}, "name"=>"rke-node-69", "uid"=>"68adf4d6-a273-4383-af8a-d97233cb6cd7"}, "container"=>{"name"=>"spring"}, "replicaset"=>{"name"=>"authorization-spring-b547fd487"}, "namespace_uid"=>"7d3a393e-a64c-4cf0-a75f-1b0c8df6e172", "pod"=>{"ip"=>"10.42.7.167", "name"=>"authorization-spring-b547fd487-zmzsm", "uid"=>"f9b68c03-58e8-41d1-952a-4a673d16c1df"}}, "@timestamp"=>2023-11-14T13:25:27.889Z, "summary"=>{"up"=>1, "down"=>0}, "container"=>{"runtime"=>"containerd", "image"=>{"name"=>"sosconreg.azurecr.io/authorization-service:2023-11-14T08.13.57.879251107"}, "id"=>"2e1029686249f83530fa1343b12673917ad3f225a0cd9b2d92d61e49868d5ffa"}, "headers"=>{"http_user_agent"=>"Manticore 0.9.1", "request_method"=>"POST", "http_accept"=>nil, "content_type"=>"application/json", "http_version"=>"HTTP/1.1", "x_forwarded_port"=>"443", "x_scheme"=>"https", "content_length"=>"355590", "x_forwarded_for"=>"10.209.254.62", "x_forwarded_proto"=>"https", "x_request_id"=>"0a97bffc8d15681580455ee6aaabc573", "request_path"=>"/", "x_forwarded_host"=>"logstash.elastic-app-logging.domdev.lan", "x_forwarded_scheme"=>"https", "accept_encoding"=>"gzip,deflate", "http_host"=>"logstash.elastic-app-logging.domdev.lan", "x_real_ip"=>"10.209.254.62"}, "@version"=>"1", "data_stream"=>{"namespace"=>"straffe.skifte", "type"=>"heartbeats", "dataset"=>"http"}, "url"=>{"scheme"=>"http", "domain"=>"10.42.7.167", "full"=>"http://10.42.7.167:8081/actuator/health/readiness", "path"=>"/actuator/health/readiness", "port"=>8081}, "state"=>{"duration_ms"=>"3010188", "id"=>"default-18bcdd43248-0", "checks"=>604, "flap_history"=>, "down"=>0, "ends"=>nil, "up"=>604, "started_at"=>"2023-11-14T12:35:17.704181056Z", "status"=>"up"}, "host"=>"10.42.1.3"}], :error=>{"type"=>"security_exception", "reason"=>"action [indices:data/write/bulk[s]] is unauthorized for user [elasticsearch-logstash-application-logging-elasticsearch-elasticsearch-application-logging-logstash-user] with effective roles [eck_logstash_user_role] on indices [heartbeats-http-straffe.skifte], this action is granted by the index privileges [create_doc,create,delete,index,write,all]"}}

Check the permission of the user, it seems it does not have permission on that index.

I have solved it :slight_smile:

First create a secret for the logstash role, where you add the new permissions:

kind: Secret
apiVersion: v1
metadata:
  name: eck-logstash-user-role-secret
  namespace: {{ .namespace }}
stringData:
  roles.yml: |-
    eck_logstash_user_role:
      cluster: [ "monitor", "manage_ilm", "read_ilm", "manage_logstash_pipelines", "manage_index_templates", "cluster:admin/ingest/pipeline/get"]
      indices:
      - names: [ "heartbeats-*", "logstash", "logstash-*", "ecs-logstash", "ecs-logstash-*", "logs-*", "metrics-*", "synthetics-*", "traces-*" ]
        privileges: [ "manage", "write", "create_index", "read", "view_index_metadata" ]  

Then add this to the Elasticsearch kind:

  auth:
    roles:
      - secretName: eck-logstash-user-role-secret  

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.