Thank you magnus, I changed the conf to
field => "[ext][obj_ids]"
But another error came out,
14:55:00.210 [[main]>worker13] ERROR logstash.pipeline - Exception in pipelineworker, the pipeline stopped processing new events, please check your filter configuration and restart Logstash. {"exception"=>"undefined method empty?' for 83058780:Fixnum", "backtrace"=>["/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-split-3.1.1/lib/logstash/filters/split.rb:89:in
filter'", "org/jruby/RubyArray.java:1613:in each'", "/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-split-3.1.1/lib/logstash/filters/split.rb:88:in
filter'", "/usr/share/logstash/logstash-core/lib/logstash/filters/base.rb:145:in do_filter'", "/usr/share/logstash/logstash-core/lib/logstash/filters/base.rb:164:in
multi_filter'", "org/jruby/RubyArray.java:1613:in each'", "/usr/share/logstash/logstash-core/lib/logstash/filters/base.rb:161:in
multi_filter'", "/usr/share/logstash/logstash-core/lib/logstash/filter_delegator.rb:42:in multi_filter'", "(eval):386:in
initialize'", "org/jruby/RubyArray.java:1613:in each'", "(eval):383:in
initialize'", "org/jruby/RubyProc.java:281:in call'", "(eval):400:in
initialize'", "org/jruby/RubyArray.java:1613:in each'", "(eval):397:in
initialize'", "org/jruby/RubyProc.java:281:in call'", "(eval):157:in
filter_func'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:353:in filter_batch'", "org/jruby/RubyProc.java:281:in
call'", "/usr/share/logstash/logstash-core/lib/logstash/util/wrapped_synchronous_queue.rb:201:in each'", "org/jruby/RubyHash.java:1342:in
each'", "/usr/share/logstash/logstash-core/lib/logstash/util/wrapped_synchronous_queue.rb:200:in each'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:352:in
filter_batch'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:340:in worker_loop'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:306:in
start_workers'"]}
my full configuation is
input {
redis {
host => "localhost"
data_type => "list"
key => "access"
type => "access"
}
redis {
host => "localhost"
data_type => "list"
key => "report_video"
type => "report_video"
}
}
filter {
mutate {
rename => { "_id" => "record_id" }
rename => { "_ext" => "ext"}
}
date { match => ["logged_at", "yyyy-MM-dd HH:mm:ss"] }
if [type]=="access" {
geoip { source => "ip" }
if [act]=="update" { drop{} }
}
if [type]=="report_video" {
if [act]=="digest_request" {
split {
field => "[ext][obj_ids]"
terminator => ","
}
}
}
}
output {
elasticsearch {
hosts => "localhost:9200"
index => "logstash-%{type}-%{+YYYY.MM.dd}"
}
}