There are no data output to es with no error

there are no data output to es with no error in logs.
why haven't my data been imported?

logs is below

[2018-08-18T19:42:13,315][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"index_patterns"=>"trxdetail-*", "settings"=>{"index"=>{"analysis"=>{"analyzer"=>{"default"=>{"type"=>"ik_max_word"}}}}, "index.refresh_interval"=>"-1", "number_of_replicas"=>"0", "number_of_shards"=>"5"}, "mappings"=>{"trx"=>{"dynamic_templates"=>[{"dates"=>{"match_mapping_type"=>"date", "mapping"=>{"type"=>"date"}}}, {"strings"=>{"match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"ID"=>{"type"=>"text", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "CUSNO"=>{"type"=>"text", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "CUSACC"=>{"type"=>"text", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "CUSNAME"=>{"type"=>"text", "analyzer"=>"ik_max_word", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "OPPCUSNO"=>{"type"=>"text", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "OPPACC"=>{"type"=>"text", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "OPPNAME"=>{"type"=>"text", "analyzer"=>"ik_max_word", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "OPPACCFLAG"=>{"type"=>"text", "analyzer"=>"ik_max_word", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "CDFLAG"=>{"type"=>"text", "analyzer"=>"ik_max_word", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "AMT"=>{"type"=>"double"}, "TRXDATE"=>{"type"=>"date", "format"=>"yyyyMMdd"}, "TRXTIME"=>{"type"=>"date", "format"=>"HHmmss"}, "TRXFLOW"=>{"type"=>"text", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "TRXFLOWNO"=>{"type"=>"text", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "ACCCODE"=>{"type"=>"text", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "ACCNAME"=>{"type"=>"text", "analyzer"=>"ik_max_word", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "BANKNO"=>{"type"=>"text", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "BANKNAME"=>{"type"=>"text", "analyzer"=>"ik_max_word", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "BRANCHNO"=>{"type"=>"text", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "BRANCHNAME"=>{"type"=>"text", "analyzer"=>"ik_max_word", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "TELLER"=>{"type"=>"text", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "TERMINAL"=>{"type"=>"text", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "CUSWILL"=>{"type"=>"text", "analyzer"=>"ik_max_word", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "BUSICODE"=>{"type"=>"text", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "BUSINAME"=>{"type"=>"text", "analyzer"=>"ik_max_word", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "BUSICMMCODE"=>{"type"=>"text", "fields"=>{"keyword"=>{"ignore_above"=>256, "type"=>"keyword"}}}, "BUSICMM"=>{"type"=>"text", "analyzer"=>"ik_max_word"}, "BUSITECHCMM"=>{"type"=>"text", "analyzer"=>"ik_max_word"}}}}}}
[2018-08-18T19:42:13,427][INFO ][logstash.outputs.elasticsearch] Installing elasticsearch template to _template/trx-template
[2018-08-18T19:42:13,524][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::Elasticsearch", :hosts=>["//192.168.137.1:9200"]}
[2018-08-18T19:42:13,575][INFO ][logstash.licensechecker.licensereader] Elasticsearch pool URLs updated {:changes=>{:removed=>, :added=>[http://elastic:xxxxxx@192.168.137.1:9200/]}}
[2018-08-18T19:42:13,579][INFO ][logstash.licensechecker.licensereader] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://elastic:xxxxxx@192.168.137.1:9200/, :path=>"/"}
[2018-08-18T19:42:13,599][WARN ][logstash.licensechecker.licensereader] Restored connection to ES instance {:url=>"http://elastic:xxxxxx@192.168.137.1:9200/"}
[2018-08-18T19:42:13,627][INFO ][logstash.licensechecker.licensereader] ES Output version determined {:es_version=>6}
[2018-08-18T19:42:13,630][WARN ][logstash.licensechecker.licensereader] Detected a 6.x and above cluster: the type event field won't be used to determine the document _type {:es_version=>6}
[2018-08-18T19:42:13,893][INFO ][logstash.pipeline ] Pipeline started successfully {:pipeline_id=>".monitoring-logstash", :thread=>"#<Thread:0x4d290636@C:/Application/logstash/logstash-6.3.0/logstash-core/lib/logstash/pipeline_action/create.rb:48 sleep>"}
[2018-08-18T19:42:14,565][INFO ][logstash.pipeline ] Pipeline started successfully {:pipeline_id=>"main", :thread=>"#<Thread:0x55914dfe sleep>"}
[2018-08-18T19:42:14,702][INFO ][logstash.agent ] Pipelines running {:count=>2, :running_pipelines=>[:main, :".monitoring-logstash"], :non_running_pipelines=>}
[2018-08-18T19:42:14,736][INFO ][logstash.inputs.metrics ] Monitoring License OK
[2018-08-18T19:42:16,347][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}

and my configuration is

input {
file {
path => "C:\Application\logstash\script\data\zibotrxnew.csv"
#path => "C:\Application\logstash\script\data\zibotrxnewtest.csv"
start_position => "beginning"
#sincedb_path => "C:\Application\logstash\zibotrx.read"
sincedb_path => "NUL"
}
}

filter {
csv {
separator => ","
skip_header => true
columns => ["ID","CUSNO","CUSACC","CUSNAME","OPPCUSNO","OPPACC","OPPNAME","OPPACCFLAG","CDFLAG","AMT","TRXDATE","TRXTIME","TRXFLOW","TRXFLOWNO","ACCCODE","ACCNAME","BANKNO","BANKNAME","BRANCHNO","BRANCHNAME","TELLER","TERMINAL","CUSWILL","BUSICODE","BUSINAME","BUSICMMCODE","BUSICMM","BUSITECHCMM"]
#autodetect_column_names => true
#remove_field => ["host","path"]
}

#date{

match => [ "TRXDATE", "yyyyMMdd" ]

#}

#date{

match => [ "TRXTIME", "HHmmss" ]

#}

#dissect {

mapping => { "message" => "%{TRXDATE}-%{TRXFLOW}-%{TRXFLOWNO}" }

#}

mutate
{
remove_field => ["message","path","@version","@timestamp"]
}
}

output {

elasticsearch {
hosts => ["192.168.137.1:9200"]
user => "XXXXXX"
password => "XXXXXXXX"

  	index => "trxdetail-zibo"
  	document_type=>"trx"
  	#document_id => "%{TRXDATE}-%{TRXFLOW}-%{TRXFLOWNO}"
  	
  	template => "C:\Application\logstash\script\template\trxdetail-template.json"
  	template_name => "trx-template"
  	template_overwrite => true
  }

}

sorry, it's a mistake. the data have been imported into es, i could find them in es-head later.

I am also facing the same issue while parsing xml and output is not getting displayed anywhere

u should delete the output file which record the information about what logstash have readed before.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.