Hi all....
here i am trying to index data with custom template
here is my template looks like
PUT _template/qqq123
{
"template": "xxx123",
"settings": {
"number_of_shards": "6",
"number_of_replicas": "1"
},
"mappings": {
"sss123": {
"properties": {
"@timestamp": {
"type": "date"
},
"@version": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"acknowledged": {
"type": "long"
},
"agent": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"alertgroup": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"alertkey": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"identifier": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"node": {
"enabled": false
},
"nodealias": {
"enabled": false
},
"serial": {
"enabled": false
},
"serverserial": {
"enabled": false
}
}
}
}
}
here is my logstash conf file
input {
jdbc {
type => "jdbc-demo"
jdbc_driver_library => "/usr/share/java/jconn3.jar"
jdbc_driver_class => "com.sybase.jdbc3.jdbc.SybDriver"
jdbc_connection_string => "jdbc:sybase:Tds:192.168.1.32:4117"
#jdbc_driver_class => "com.mysql.jdbc.Driver"
#jdbc_connection_string => "jdbc:mysql://192.168.1.40:3306/jiradb40"
jdbc_user => "root"
jdbc_password => ""
schedule => "*/5 * * * * *"
statement => "SELECT * from alerts.status"
#use_column_value => true
#tracking_column => "statechange"
#last_run_metadata_path => "/tmp/ncool"
#record_last_run => true
# jdbc_default_timezone => "Asia/Kolkata"
}
}
filter {
date {
match => [ "statechange", "UNIX" ]
target => "schange"
timezone => "America/New_York"
}
date {
match => [ "firstoccurrence", "UNIX" ]
target => "firstoccur"
timezone => "America/New_York"
}
date {
match => [ "lastoccurrence", "UNIX" ]
target => "lastoccur"
timezone => "America/New_York"
}
date {
match => [ "internallast", "UNIX" ]
target => "internallst"
timezone => "America/New_York"
}
mutate {
strip => ["identifier", "serial", "node", "nodealias", "manager", "agent", "alertgroup", "alertkey", "severity", "summary", "statechange", "firstoccurrence", "lastoccurrence", "internallast", "poll", "type", "tally", "class", "grade", "location", "owneruid", "ownergid", "acknowledged", "flash", "eventid", "expiretime", "processreq", "suppressescl", "customer", "service", "physicalslot", "physicalport", "physicalcard", "tasklist", "nmosserial", "nmosobjinst", "nmoscausetype", "nmosdomainname", "nmosentityid", "nmosmanagedstatus", "nmoseventmap", "localnodealias", "localpriobj", "localsecobj", "localrootobj", "remotenodealias", "remotepriobj", "remotesecobj", "remoterootobj", "x733eventtype", "x733probablecause", "x733specificprob", "x733corrnotif", "servername", "serverserial", "url", "extendedattr", "oldrow", "probesubsecondid", "bsm_identity"]
}
}
output {
elasticsearch {
hosts => ["http://192.168.1.75:9200"]
user => "elastic"
password => "changeme"
index => 'my_index10'
document_id => "%{identifier}"
#document_type => "csv"
}
stdout {codec => rubydebug }
}
and the error that i am getting is
[2017-06-20T13:31:31,535][WARN ][logstash.outputs.elasticsearch] Failed action. {:status=>400, :action=>["index", {:_id=>"MeerutMachineMon4Systems", :_index=>"my_index10", :_type=>"0", :_routing=>nil}, 2017-06-20T08:01:30.202Z %{host} %{message}], :response=>{"index"=>{"_index"=>"my_index10", "_type"=>"0", "_id"=>"MeerutMachineMon4Systems", "status"=>400, "error"=>{"type"=>"illegal_argument_exception", "reason"=>"[remoterootobj] is defined as a field in mapping [0] but this name is already used for an object in other types"}}}}
Thank you