Kibana Logstash pipeline doesn't work!

I have created this Logstash pipeline.

And here is the result in Elasticsearch under index .logstash, it seems like it is not transferring the data properly. However, when I do the same thing on command line, everything gets transferred properly. Any idea why?

{
  "took" : 1,
  "timed_out" : false,
  "_shards" : {
    "total" : 1,
    "successful" : 1,
    "skipped" : 0,
    "failed" : 0
  },
  "hits" : {
    "total" : {
      "value" : 1,
      "relation" : "eq"
    },
    "max_score" : 1.0,
    "hits" : [
      {
        "_index" : ".logstash",
        "_type" : "_doc",
        "_id" : "Test_pipeline",
        "_score" : 1.0,
        "_source" : {
          "description" : "This is a pipeline to test Logstash.",
          "last_modified" : "2019-06-25T21:04:48.551Z",
          "pipeline_metadata" : {
            "version" : 1,
            "type" : "logstash_pipeline"
          },
          "username" : "elastic",
          "pipeline" : "input{\n  http_poller {\n    urls => {      \n      balena => {\n        method => get\n        url => \"https://api.balena-cloud.com/v4/release?$filter=belongs_to__application%20eq%201415733\"\n        headers => {\n          \"Content-Type\" => \"application/json\"\n          \"Authorization\" => \"Bearer NjEwNTUwMjU0ODYsImlhdCI6MTU2MTA1NTAyNSwiZXhwIjoxNTYxNjU5ODI1fQ.tkCc8Oa4d_uFpIlmRknpaZVoupC-V0MheLh4U2JMnrU\"\n        }\n      }\n    }\n    request_timeout => 60\n    schedule => { every => \"10s\" }\n    codec => \"json\"\n    type => \"Http_poller\"\n  }\n} \n\nfilter{\n  if [type] == \"Cloudwatch\" {\n    if [event] != \"\" {\n      mutate{\n        add_field => {\n          \"[@metadata][tags]\" => [\"Cloudwatch\"]\n          \"key\" => \"%{[Records][object][key]}\"\n        }\n      }\n\n      date {\n        match => [\"log-datestamp\", \"YYYY-MM-dd HH:mm:ss,SSS\"]\n        target =>  \"@timestamp\"\n        timezone => \"UTC\"\n      }       \n      date {\n        match => [\"log-datestamp\", \"YY-MM-dd HH:mm:ss,SSS\"]\n        target =>  \"@timestamp\"\n        timezone => \"UTC\"\n      }    \n      date {\n        match => [\"log-datestamp\", \"ISO8601\"]\n        target =>  \"@timestamp\"\n        timezone => \"UTC\"\n      }    \n      date {\n        match => [\"log-epoch\", \"UNIX\"]\n        target =>  \"@timestamp\"\n        timezone => \"UTC\"\n      }    \n      date {\n        match => [\"log-epoch\", \"UNIX_MS\"]\n        target =>  \"@timestamp\"\n        timezone => \"UTC\"\n      }\n    }\n  }\n  \n  else if [type] == \"Http_poller\" {\n    mutate{\n      add_field => {\n        \"[@metadata][tags]\" => [\"Http-poller\"]\n      }\n    }\n\n    ruby {\n      code => '\n        # a = []\n        i = 0\n        event.get(\"d\").each { |x|\n          h = {}\n          h[\"release\"] = x[\"commit\"]\n          h[\"timestamp\"] = x[\"created_at\"]\n          h[\"machineId\"] = x[\"belongs_to__application\"][\"__id\"]\n          # a[i] = h\n          # values = {h}\n          event.set(\"[pair_#{i+1}]\", h)\n          # event[\"[@metadata][pair_#{i+1}]\"] = h\n          # a << \"pair_#{i+1}\"\n          i += 1\n        }\n        # event.set(\"message\", a)\n      '\n      remove_field => [\"d\"]\n    }\n\n    date {\n      match => [\"log-datestamp\", \"YYYY-MM-dd HH:mm:ss,SSS\"]\n      target =>  \"@timestamp\"\n      timezone => \"UTC\"\n    }       \n    date {\n      match => [\"log-datestamp\", \"YY-MM-dd HH:mm:ss,SSS\"]\n      target =>  \"@timestamp\"\n      timezone => \"UTC\"\n    }    \n    date {\n      match => [\"log-datestamp\", \"ISO8601\"]\n      target =>  \"@timestamp\"\n      timezone => \"UTC\"\n    }    \n    date {\n      match => [\"log-epoch\", \"UNIX\"]\n      target =>  \"@timestamp\"\n      timezone => \"UTC\"\n    }    \n    date {\n      match => [\"log-epoch\", \"UNIX_MS\"]\n      target =>  \"@timestamp\"\n      timezone => \"UTC\"\n    }\n  }\n}\n\noutput{\n  if[type] == \"Http_poller\" {\n    elasticsearch{\n      hosts => [\"https://c41c137ba4fb498b8d8ca13ddcb77867.us-west-2.aws.found.io:9243\"]\n      user => \"elastic\"\n      password => \"Qi2ugh06PpwdESIWcA9zUQVs\"\n      index => \"helloworld\"\n    }    \n  }\n\n  stdout { \n    codec => rubydebug {\n      metadata => true\n    }\n  }\n}\n",
          "pipeline_settings" : {
            "pipeline.batch.delay" : 50,
            "pipeline.batch.size" : 125,
            "pipeline.workers" : 1,
            "queue.checkpoint.writes" : 1024,
            "queue.max_bytes" : "1gb",
            "queue.type" : "memory"
          }
        }
      }
    ]
  }
}

Also here is my Logstash pipeline configuration (in Kibana) for reference

input{
  http_poller {
    urls => {      
      balena => {
        method => get
        url => "https://api.balena-cloud.com/v4/release?$filter=belongs_to__application%20eq%****"
        headers => {
          "Content-Type" => "application/json"
          "Authorization" => "**** ****"
        }
      }
    }
    request_timeout => 60
    schedule => { every => "10s" }
    codec => "json"
    type => "Http_poller"
  }
} 

filter{
  if [type] == "Cloudwatch" {
    if [event] != "" {
      mutate{
        add_field => {
          "[@metadata][tags]" => ["Cloudwatch"]
          "key" => "%{[Records][object][key]}"
        }
      }

      date {
        match => ["log-datestamp", "YYYY-MM-dd HH:mm:ss,SSS"]
        target =>  "@timestamp"
        timezone => "UTC"
      }       
      date {
        match => ["log-datestamp", "YY-MM-dd HH:mm:ss,SSS"]
        target =>  "@timestamp"
        timezone => "UTC"
      }    
      date {
        match => ["log-datestamp", "ISO8601"]
        target =>  "@timestamp"
        timezone => "UTC"
      }    
      date {
        match => ["log-epoch", "UNIX"]
        target =>  "@timestamp"
        timezone => "UTC"
      }    
      date {
        match => ["log-epoch", "UNIX_MS"]
        target =>  "@timestamp"
        timezone => "UTC"
      }
    }
  }
  
  else if [type] == "Http_poller" {
    mutate{
      add_field => {
        "[@metadata][tags]" => ["Http-poller"]
      }
    }

    ruby {
      code => '
        # a = []
        i = 0
        event.get("d").each { |x|
          h = {}
          h["release"] = x["commit"]
          h["timestamp"] = x["created_at"]
          h["machineId"] = x["belongs_to__application"]["__id"]
          # a[i] = h
          # values = {h}
          event.set("[pair_#{i+1}]", h)
          # event["[@metadata][pair_#{i+1}]"] = h
          # a << "pair_#{i+1}"
          i += 1
        }
        # event.set("message", a)
      '
      remove_field => ["d"]
    }

    date {
      match => ["log-datestamp", "YYYY-MM-dd HH:mm:ss,SSS"]
      target =>  "@timestamp"
      timezone => "UTC"
    }       
    date {
      match => ["log-datestamp", "YY-MM-dd HH:mm:ss,SSS"]
      target =>  "@timestamp"
      timezone => "UTC"
    }    
    date {
      match => ["log-datestamp", "ISO8601"]
      target =>  "@timestamp"
      timezone => "UTC"
    }    
    date {
      match => ["log-epoch", "UNIX"]
      target =>  "@timestamp"
      timezone => "UTC"
    }    
    date {
      match => ["log-epoch", "UNIX_MS"]
      target =>  "@timestamp"
      timezone => "UTC"
    }
  }
}

output{
  if[type] == "Http_poller" {
    elasticsearch{
      hosts => ["***.aws.found.io:9243"]
      user => "***"
      password => "***"
      index => "helloworld"
    }    
  }

  stdout { 
    codec => rubydebug {
      metadata => true
    }
  }
}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.