Hi there,
I want to output elasticsearch documents to csv file with using logstash, but faced some problems...
tail -f /var/log/logstash/logstash-plain.log
[2020-07-10T11:11:02,561][ERROR][logstash.javapipeline ][elastiflow][output_elasticsearch_csv] A plugin had an unrecoverable error. Will restart this pl
Pipeline_id:elastiflow
Plugin: <LogStash::Inputs::Elasticsearch password=><password>, hosts=>["10.250.31.42:9200"], index=>"cic-format-%{+YYYY.MM.dd}", id=>"output_elasticsearc:Codecs::JSON id=>"json_a21bbcfa-b453-44a2-9cd9-cd6c72507134", enable_metric=>true, charset=>"UTF-8">, query=>"{ \"sort\": [ \"_doc\" ] }", size=>1000, scrid"], ssl=>false>
Error: [404] {"error":{"root_cause":[{"type":"index_not_found_exception","reason":"no such index [cic-format-%{+YYYY.MM.dd}]","resource.type":"index_or_aic-format-%{+YYYY.MM.dd}"}],"type":"index_not_found_exception","reason":"no such index [cic-format-%{+YYYY.MM.dd}]","resource.type":"index_or_alias","resou+YYYY.MM.dd}"},"status":404}
Exception: Elasticsearch::Transport::Transport::Errors::NotFound
Stack: /usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/elasticsearch-transport-5.0.5/lib/elasticsearch/transport/transport/base.rb:202:in `__raise_tra
/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/elasticsearch-transport-5.0.5/lib/elasticsearch/transport/transport/base.rb:319:in `perform_request'
/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/elasticsearch-transport-5.0.5/lib/elasticsearch/transport/transport/http/manticore.rb:67:in `perform_req
/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/elasticsearch-transport-5.0.5/lib/elasticsearch/transport/client.rb:131:in `perform_request'
/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/elasticsearch-api-5.0.5/lib/elasticsearch/api/actions/search.rb:183:in `search'
/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-elasticsearch-4.7.0/lib/logstash/inputs/elasticsearch.rb:340:in `search_request'
/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-elasticsearch-4.7.0/lib/logstash/inputs/elasticsearch.rb:268:in `do_run_slice'
/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-elasticsearch-4.7.0/lib/logstash/inputs/elasticsearch.rb:246:in `do_run'
/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-elasticsearch-4.7.0/lib/logstash/inputs/elasticsearch.rb:234:in `run'
/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:345:in `inputworker'
/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:336:in `block in start_input'
my config:
input {
# elasticsearch
elasticsearch {
id => "output_elasticsearch_csv"
hosts => "10.250.31.42:9200"
index => "cic-format-%{+YYYY.MM.dd}"
user => "${user}"
password => "${pwd}"
docinfo => true
}
}
output {
csv {
# elastic field name
fields => ["_id", "fl_dur", "tot_fw_pk", "tot_bw_pk", "fw_pkt_l_avg", "bw_pkt_l_avg", "fl_byt_s", "fl_pkt_s", "fw_iat_avg", "bw_iat_avg", "fw_pkt_s", "bw_pkt_s", "down_up_radio", "pkt_size_avg", "fw_seg_avg", "bw_seg_avg", "fw_has_fin", "fw_has_syn", "fw_has_rst", "fw_has_pst", "fw_has_ack", "fw_has_urg", "fw_has_cwe", "fw_has_ece", "bw_has_fin", "bw_has_syn", "bw_has_rst", "bw_has_pst", "bw_has_ack", "bw_has_urg", "bw_has_cwe", "bw_has_ece", "fw_dur", "bw_dur", "fw_src_addr", "fw_dst_addr", "fw_src_port", "fw_dst_port", "fw_ip_protocol", "fw_ip_tos", "fw_src_as", "fw_src_asn", "fw_dst_asn", "fw_input_snmp", "fw_output_snmp", "bw_src_addr", "bw_dst_addr", "bw_src_port", "bw_src_port", "bw_ip_protocol", "bw_ip_tos", "bw_src_as", "bw_src_asn", "bw_dst_asn", "bw_input_snmp", "bw_output_snmp"]
path => "/var/log/logstash/csv/cic-%{+YYYY.MM.dd}.csv"
}
}
my data in elasticsearch:
{
"_index": "cic-format-2020.07.10",
"_type": "_doc",
"_id": "8D5YOHMBB49SfiIi_Iau",
"_version": 1,
"_score": 0,
"_source": {
"fl_dur": 104,
"fw_dur": 52,
"bw_dur": 52,
"tot_fw_pk": 5,
"tot_bw_pk": 5,
"tot_l_fw_pkt": 589,
"tot_l_bw_pkt": 589,
"fw_pkt_l_avg": 117,
"bw_pkt_l_avg": 117,
"fl_byt_s": 1178000000,
"fl_pkt_s": 10000000,
"fw_iat_avg": 10,
"bw_iat_avg": 10,
"fw_pkt_s": 5000000,
"bw_pkt_s": 5000000,
"down_up_radio": 1,
"pkt_size_avg": 117,
"fw_seg_avg": 117,
"bw_seg_avg": 117,
"fw_src_addr": "163.29.98.82",
"fw_dst_addr": "10.250.35.20",
"fw_src_port": 80,
"fw_dst_port": 63179,
"fw_ip_protocol": "TCP",
"fw_ip_tos": 0,
"fw_src_as": "Data Communication Business Group (4782)",
"fw_src_asn": 0,
"fw_dst_asn": 0,
"fw_input_snmp": "0",
"fw_output_snmp": "0",
"bw_src_addr": "163.29.98.82",
"bw_dst_addr": "10.250.35.20",
"bw_src_port": 80,
"bw_ip_protocol": "TCP",
"bw_ip_tos": 0,
"bw_src_as": "Data Communication Business Group (4782)",
"bw_src_asn": 0,
"bw_dst_asn": 0,
"bw_input_snmp": "0",
"bw_output_snmp": "0",
"fw_has_fin": true,
"fw_has_syn": true,
"fw_has_rst": false,
"fw_has_pst": false,
"fw_has_ack": true,
"fw_has_urg": false,
"fw_has_cwe": false,
"fw_has_ece": false,
"bw_has_fin": true,
"bw_has_syn": true,
"bw_has_rst": false,
"bw_has_pst": false,
"bw_has_ack": true,
"bw_has_urg": false,
"bw_has_cwe": false,
"bw_has_ece": false
}
}
And there also a csv file in the path I specified, but file is empty,
Does anyone know how to solve this problem, please?
Kase