Update record if present otherwise insert

Here is my logstash configuration

input {
  file {
        path => "/etc/logstash/fish/10gFresh.csv"
        start_position => "beginning"
        sincedb_path => "/dev/null"


        }
}

filter {
        csv {
                separator => ","
                columns => ["date","reading","temp","pH","Alkalinity","salinity","chlorine","hardness","ammonia","nitrites","nitrates","who"]
        }

        mutate { convert => ["reading","integer"]}
        mutate { convert => ["temp","float"]}
        mutate { convert => ["pH","float"]}
        mutate { convert => ["Alkalinity","float"]}
        mutate { convert => ["salinity","float"]}
        mutate { convert => ["chlorine","float"]}
        mutate { convert => ["hardness","float"]}
        mutate { convert => ["ammonia","float"]}
        mutate { convert => ["nitrites","float"]}
        mutate { convert => ["nitrates","float"]}
        date { match => ["date","M/d/yyyy"] target => "date"}
}


output {
        elasticsearch {
                hosts => "http://localhost:9200"
                #index => "2019apps"
                index => "fishstats"
                action => "update"
                document_id => "%{date}%{reading}"
                #document_type => "arcapps"
        }
        stdout{}
}

It appears that if I update an existing record that it works without issue but if there is a new entry that I want to add it fails to add that record.

I have reviewed this thread: Logstash Update a document in elasticsearch and have a document_id and action set to update.

Set action to ‘index’ instead. This will add new documents and overwrite/update existing ones.

That still produces the following

[WARN ] 2019-11-04 11:52:00.230 [[main]>worker1] elasticsearch - Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>"mart%{reading}", :_index=>"fishstats", :_type=>"doc", :routing=>nil}, #<LogStash::Event:0x30a78ac3>], :response=>{"index"=>{"_index"=>"fishstats", "_type"=>"doc", "_id"=>"mart%{reading}", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse field [date] of type [date] in document with id 'mart%{reading}'", "caused_by"=>{"type"=>"illegal_argument_exception", "reason"=>"Invalid format: \"mart\""}}}}}
{
          "host" => "elk.mikesdevhub.com",
      "@version" => "1",
       "message" => "mart",
    "@timestamp" => 2019-11-04T11:52:00.097Z,
          "tags" => [
        [0] "_dateparsefailure"
    ],
          "path" => "/etc/logstash/fish/10gFresh.csv",
          "date" => "mart"
}
{
          "temp" => 0.0,
            "pH" => 6.2,
          "host" => "elk.mikesdevhub.com",
      "@version" => "1",
      "nitrites" => 0.0,
           "who" => "PetCo",
    "@timestamp" => 2019-11-04T11:52:00.097Z,
          "date" => 2019-11-03T00:00:00.000Z,
       "reading" => 1,
    "Alkalinity" => 60.0,
      "nitrates" => 60.0,
      "chlorine" => 0.0,
      "salinity" => 0.0,
       "message" => "11/3/2019,1,0.0,6.2,60.0,0.0,0.0,0.0,3.0,0.0,60.0,PetCo",
      "hardness" => 0.0,
          "path" => "/etc/logstash/fish/10gFresh.csv",
       "ammonia" => 3.0
}

You are creating the document id based on field that not always exists which lads to invalid and data.

The date and the reading is always there. I have verified

Here is the csv file

10/31/2019,1,0.0,6.2,20.0,0.0,0.0,150.0,0.0,0.0,60.0,Petsmart
11/1/2019,1,0.0,6.2,20.0,0.0,0.0,150.0,0.0,0.0,60.0,Petsmart
11/2/2019,1,0.0,6.2,20.0,0.0,0.0,150.0,0.0,0.0,60.0,Petsmart
11/3/2019,1,0.0,6.2,60.0,0.0,0.0,0.0,3.0,0.0,60.0,PetCo

The format I see for date when I do a search is the following

October 29th 2019, 20:00:00.000

But in my code it is the following

October 29th 2019

It is saying invalid format because of this I believe. Can anyone concur?

ignore my previous reply.

what do you mean data that is not always there or exists? date is always there. I updated my configurations.

csv file

Oct 29 2019 12:00:00,1,0.0,6.2,20.0,0.0,0.0,150.0,0.0,0.0,60.0,self
Oct 30 2019 12:00:00,1,0.0,6.2,20.0,0.0,0.0,150.0,0.0,0.0,60.0,self
Oct 31 2019 12:00:00,1,0.0,6.2,20.0,0.0,0.0,150.0,0.0,0.0,60.0,Petsmart
Nov 1 2019 12:00:00,1,0.0,6.2,20.0,0.0,0.0,150.0,0.0,0.0,60.0,Petsmart
Nov 2 2019 12:00:00,1,0.0,6.2,20.0,0.0,0.0,150.0,0.0,0.0,60.0,Petsmart
Nov 3 2019 12:00:00,1,0.0,6.2,60.0,0.0,0.0,0.0,3.0,0.0,60.0,self
Nov 4 2019 12:00:00,1,0.0,6.2,60.0,0.0,0.0,0.0,3.0,0.0,60.0,self

The following error is generated

[WARN ] 2019-11-05 04:40:37.521 [[main]>worker1] elasticsearch - Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>"lf-%{reading}", :_index=>"fishstats", :_type=>"doc", :routing=>nil}, #<LogStash::Event:0x479962f0>], :response=>{"index"=>{"_index"=>"fishstats", "_type"=>"doc", "_id"=>"lf-%{reading}", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse field [date] of type [date] in document with id 'lf-%{reading}'", "caused_by"=>{"type"=>"illegal_argument_exception", "reason"=>"Invalid format: \"lf\""}}}}}
{
       "message" => "lf",
          "tags" => [
        [0] "_dateparsefailure"
    ],
          "path" => "/etc/logstash/fish/10gFresh.csv",
          "host" => "elk.mikesdevhub.com",
          "date" => "lf",
      "@version" => "1",
    "@timestamp" => 2019-11-05T04:40:37.404Z
}
{
       "message" => "Nov 4 2019 12:00:00,1,0.0,6.2,60.0,0.0,0.0,0.0,3.0,0.0,60.0,self",
      "chlorine" => 0.0,
            "pH" => 6.2,
      "hardness" => 0.0,
           "who" => "self",
          "path" => "/etc/logstash/fish/10gFresh.csv",
          "host" => "elk.mikesdevhub.com",
          "date" => 2019-11-04T12:00:00.000Z,
       "reading" => 1,
      "@version" => "1",
          "temp" => 0.0,
       "ammonia" => 3.0,
      "nitrites" => 0.0,
    "Alkalinity" => 60.0,
      "nitrates" => 60.0,
      "salinity" => 0.0,
    "@timestamp" => 2019-11-05T04:40:37.405Z
}

the configuration is as ollows

input {
  file {
        path => "/etc/logstash/fish/10gFresh.csv"
        start_position => "beginning"
        sincedb_path => "/dev/null"


        }
}

filter {
        csv {
                separator => ","
                columns => ["date","reading","temp","pH","Alkalinity","salinity","chlorine","hardness","ammonia","nitrites","nitrates","who"]
        }

        mutate { convert => ["reading","integer"]}
        mutate { convert => ["temp","float"]}
        mutate { convert => ["pH","float"]}
        mutate { convert => ["Alkalinity","float"]}
        mutate { convert => ["salinity","float"]}
        mutate { convert => ["chlorine","float"]}
        mutate { convert => ["hardness","float"]}
        mutate { convert => ["ammonia","float"]}
        mutate { convert => ["nitrites","float"]}
        mutate { convert => ["nitrates","float"]}
        #date { match => ["date","M/d/yyyy"] target => "date"}
        date { match => [ "date", "MMM d YYYY HH:mm:ss","MMM dd YYYY HH:mm:ss", "ISO8601" ]
                target => "date"}
}


output {
        elasticsearch {
                hosts => "http://localhost:9200"
                #index => "2019apps"
                index => "fishstats"
                action => "index"
                document_id => "%{date}-%{reading}"
                doc_as_upsert => true
        }
        stdout{}
}

Remove this and try again.

I also wonder where this comes from. Is that a line in your file?

I will try this today. I have included the entire csv file in my previous post.

I am getting the following

"_dateparsefailure"

[WARN ] 2019-11-06 02:12:56.541 [[main]>worker0] elasticsearch - Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>"Oct 30 2019 12:00:00-1", :_index=>"fishstats", :_type=>"doc", :routing=>nil}, #<LogStash::Event:0x6ee24a4d>], :response=>{"index"=>{"_index"=>"fishstats", "_type"=>"doc", "_id"=>"Oct 30 2019 12:00:00-1", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse field [date] of type [date] in document with id 'Oct 30 2019 12:00:00-1'", "caused_by"=>{"type"=>"illegal_argument_exception", "reason"=>"Invalid format: \"Oct 30 2019 12:00:00\""}}}}}
[WARN ] 2019-11-06 02:12:56.541 [[main]>worker0] elasticsearch - Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>"Oct 31 2019 12:00:00-1", :_index=>"fishstats", :_type=>"doc", :routing=>nil}, #<LogStash::Event:0x7b097182>], :response=>{"index"=>{"_index"=>"fishstats", "_type"=>"doc", "_id"=>"Oct 31 2019 12:00:00-1", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse field [date] of type [date] in document with id 'Oct 31 2019 12:00:00-1'", "caused_by"=>{"type"=>"illegal_argument_exception", "reason"=>"Invalid format: \"Oct 31 2019 12:00:00\""}}}}}
[WARN ] 2019-11-06 02:12:56.542 [[main]>worker0] elasticsearch - Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>"Nov 1 2019 12:00:00-1", :_index=>"fishstats", :_type=>"doc", :routing=>nil}, #<LogStash::Event:0x3f1e33a1>], :response=>{"index"=>{"_index"=>"fishstats", "_type"=>"doc", "_id"=>"Nov 1 2019 12:00:00-1", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse field [date] of type [date] in document with id 'Nov 1 2019 12:00:00-1'", "caused_by"=>{"type"=>"illegal_argument_exception", "reason"=>"Invalid format: \"Nov 1 2019 12:00:00\""}}}}}
[WARN ] 2019-11-06 02:12:56.542 [[main]>worker0] elasticsearch - Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>"Nov 2 2019 12:00:00-1", :_index=>"fishstats", :_type=>"doc", :routing=>nil}, #<LogStash::Event:0x273c1196>], :response=>{"index"=>{"_index"=>"fishstats", "_type"=>"doc", "_id"=>"Nov 2 2019 12:00:00-1", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse field [date] of type [date] in document with id 'Nov 2 2019 12:00:00-1'", "caused_by"=>{"type"=>"illegal_argument_exception", "reason"=>"Invalid format: \"Nov 2 2019 12:00:00\""}}}}}
[WARN ] 2019-11-06 02:12:56.543 [[main]>worker0] elasticsearch - Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>"Nov 3 2019 12:00:00-1", :_index=>"fishstats", :_type=>"doc", :routing=>nil}, #<LogStash::Event:0x16f43d80>], :response=>{"index"=>{"_index"=>"fishstats", "_type"=>"doc", "_id"=>"Nov 3 2019 12:00:00-1", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse field [date] of type [date] in document with id 'Nov 3 2019 12:00:00-1'", "caused_by"=>{"type"=>"illegal_argument_exception", "reason"=>"Invalid format: \"Nov 3 2019 12:00:00\""}}}}}
[WARN ] 2019-11-06 02:12:56.544 [[main]>worker0] elasticsearch - Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>"Nov 4 2019 12:00:00-1", :_index=>"fishstats", :_type=>"doc", :routing=>nil}, #<LogStash::Event:0xfbd1f50>], :response=>{"index"=>{"_index"=>"fishstats", "_type"=>"doc", "_id"=>"Nov 4 2019 12:00:00-1", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse field [date] of type [date] in document with id 'Nov 4 2019 12:00:00-1'", "caused_by"=>{"type"=>"illegal_argument_exception", "reason"=>"Invalid format: \"Nov 4 2019 12:00:00\""}}}}}
{
          "path" => "/etc/logstash/fish/10gFresh.csv",
      "hardness" => 150.0,
      "nitrates" => 60.0,
    "@timestamp" => 2019-11-06T02:12:56.355Z,
          "tags" => [
        [0] "_dateparsefailure"
    ],
       "reading" => 1,
          "host" => "elk.mikesdevhub.com",
          "date" => "Oct 30 2019 12:00:00",
      "nitrites" => 0.0,
      "salinity" => 0.0,
    "Alkalinity" => 20.0,
           "who" => "self",
       "ammonia" => 0.0,
       "message" => "Oct 30 2019 12:00:00,1,0.0,6.2,20.0,0.0,0.0,150.0,0.0,0.0,60.0,self",
      "@version" => "1",
          "temp" => 0.0,
            "pH" => 6.2,
      "chlorine" => 0.0
}
{
          "path" => "/etc/logstash/fish/10gFresh.csv",
      "hardness" => 150.0,
      "nitrates" => 60.0,
    "@timestamp" => 2019-11-06T02:12:56.356Z,
          "tags" => [
        [0] "_dateparsefailure"
    ],
       "reading" => 1,
          "host" => "elk.mikesdevhub.com",
          "date" => "Oct 31 2019 12:00:00",
      "nitrites" => 0.0,
      "salinity" => 0.0,
    "Alkalinity" => 20.0,
           "who" => "self",
       "ammonia" => 0.0,
       "message" => "Oct 31 2019 12:00:00,1,0.0,6.2,20.0,0.0,0.0,150.0,0.0,0.0,60.0,self",
      "@version" => "1",
          "temp" => 0.0,
            "pH" => 6.2,
      "chlorine" => 0.0
}
{
          "path" => "/etc/logstash/fish/10gFresh.csv",
      "hardness" => 150.0,
      "nitrates" => 60.0,
    "@timestamp" => 2019-11-06T02:12:56.356Z,
          "tags" => [
        [0] "_dateparsefailure"
    ],
       "reading" => 1,
          "host" => "elk.mikesdevhub.com",
          "date" => "Nov 1 2019 12:00:00",
      "nitrites" => 0.0,
      "salinity" => 0.0,
    "Alkalinity" => 20.0,
           "who" => "Petsmart",
       "ammonia" => 0.0,
       "message" => "Nov 1 2019 12:00:00,1,0.0,6.2,20.0,0.0,0.0,150.0,0.0,0.0,60.0,Petsmart",
      "@version" => "1",
          "temp" => 0.0,
            "pH" => 6.2,
      "chlorine" => 0.0
}
{
          "path" => "/etc/logstash/fish/10gFresh.csv",
      "hardness" => 150.0,
      "nitrates" => 60.0,
    "@timestamp" => 2019-11-06T02:12:56.356Z,
          "tags" => [
        [0] "_dateparsefailure"
    ],
       "reading" => 1,
          "host" => "elk.mikesdevhub.com",
          "date" => "Nov 2 2019 12:00:00",
      "nitrites" => 0.0,
      "salinity" => 0.0,
    "Alkalinity" => 20.0,
           "who" => "Petsmart",
       "ammonia" => 0.0,
       "message" => "Nov 2 2019 12:00:00,1,0.0,6.2,20.0,0.0,0.0,150.0,0.0,0.0,60.0,Petsmart",
      "@version" => "1",
          "temp" => 0.0,
            "pH" => 6.2,
      "chlorine" => 0.0
}
{
          "path" => "/etc/logstash/fish/10gFresh.csv",
      "hardness" => 0.0,
      "nitrates" => 60.0,
    "@timestamp" => 2019-11-06T02:12:56.356Z,
          "tags" => [
        [0] "_dateparsefailure"
    ],
       "reading" => 1,
          "host" => "elk.mikesdevhub.com",
          "date" => "Nov 3 2019 12:00:00",
      "nitrites" => 0.0,
      "salinity" => 0.0,
    "Alkalinity" => 60.0,
           "who" => "self",
       "ammonia" => 3.0,
       "message" => "Nov 3 2019 12:00:00,1,0.0,6.2,60.0,0.0,0.0,0.0,3.0,0.0,60.0,self",
      "@version" => "1",
          "temp" => 0.0,
            "pH" => 6.2,
      "chlorine" => 0.0
}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.