Logstash Indexing in Elasticsearch Bytecode Problem

Hello friends,
I'm trying to mapping with logstash my config file works but there is a problem all data imported as bytecode how to solve this? I'm I missed something? I can index in elasticsearch but it's not valid.

Logstash logs and my configfile below.

C:\Users\akumas\Desktop\logstash-6.6.0\bin>logstash -f C:\Kibana\mapping\elastic_Import2.config Sending Logstash logs to C:/Users/akumas/Desktop/logstash-6.6.0/logs which is now configured via log4j2.properties [2019-05-08T15:41:36,241][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified [2019-05-08T15:41:36,285][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"6.6.0"} [2019-05-08T15:41:51,429][WARN ][logstash.outputs.elasticsearch] You are using a deprecated config setting "document_type" set in elasticsearch. Deprecated settings will continue to work, but are scheduled for removal from logstash in the future. Document types are being deprecated in Elasticsearch 6.0, and removed entirely in 7.0. You should avoid this feature If you have any questions about this, please visit the #logstash channel on freenode irc. {:name=>"document_type", :plugin=><LogStash::Outputs::ElasticSearch index=>"alldata2", id=>"cdaa6f4e0ee9caebdfc1e852b2d9662a01e5f967fde4c9267c71dfeb7babb152", hosts=>[//localhost], document_type=>"antalya", enable_metric=>true, codec=><LogStash::Codecs::Plain id=>"plain_c07ee33d-1917-4eb6-ab96-db4f8ae5d18e", enable_metric=>true, charset=>"UTF-8">, workers=>1, manage_template=>true, template_name=>"logstash", template_overwrite=>false, doc_as_upsert=>false, script_type=>"inline", script_lang=>"painless", script_var_name=>"event", scripted_upsert=>false, retry_initial_interval=>2, retry_max_interval=>64, retry_on_conflict=>1, ilm_enabled=>false, ilm_rollover_alias=>"logstash", ilm_pattern=>"{now/d}-000001", ilm_policy=>"logstash-policy", action=>"index", ssl_certificate_verification=>true, sniffing=>false, sniffing_delay=>5, timeout=>60, pool_max=>1000, pool_max_per_route=>100, resurrect_delay=>5, validate_after_inactivity=>10000, http_compression=>false>} [2019-05-08T15:41:55,042][INFO ][logstash.pipeline ] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>4, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50} [2019-05-08T15:41:56,004][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://localhost:9200/]}} [2019-05-08T15:41:56,460][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://localhost:9200/"} [2019-05-08T15:41:56,644][INFO ][logstash.outputs.elasticsearch] ES Output version determined {:es_version=>6} [2019-05-08T15:41:56,665][WARN ][logstash.outputs.elasticsearch] Detected a 6.x and above cluster: thetype` event field won't be used to determine the document _type {:es_version=>6}
[2019-05-08T15:41:56,745][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//localhost"]}
[2019-05-08T15:41:56,774][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>nil}
[2019-05-08T15:41:56,899][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-", "version"=>60001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"default"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}}
[2019-05-08T15:41:59,009][INFO ][logstash.pipeline ] Pipeline started successfully {:pipeline_id=>"main", :thread=>"#<Thread:0x6f0eb94f run>"}
[2019-05-08T15:41:59,182][INFO ][logstash.agent ] Pipelines running {:count=>1, :running_pipelines=>[:main], :non_running_pipelines=>}
[2019-05-08T15:41:59,209][INFO ][filewatch.observingtail ] START, creating Discoverer, Watch with file and sincedb collections
[2019-05-08T15:42:00,353][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2019-05-08T15:42:00,664][WARN ][logstash.codecs.plain ] Received an event that has a different character encoding than you configured. {:text=>"PK\u0003\u0004\u0014\u0000\u0006\u0000\b\u0000\u0000\u0000!\u0000b\xEE\x9Dh^\u0001\u0000\u0000\x90\u0004\u0000\u0000\u0013\u0000\b\u0002[Content_Types].xml \xA2\u0004\u0002(\xA0\u0000\u0002\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\
and my config file;

`input{
file{
	path=>"C:/Kibana/mapping/All_data2.csv"
	start_position=>"beginning"
	sincedb_path=>"NUL"
    }
      }
          filter{
              csv {
	               separator =>","

	columns => [ "Yıl","Ay","Ziyaretçi_Sayısı","Otopark_Bileti","Otopark_Ücreti","Dükkan_Kirası","Elektrik_Faturası","Pompa_Elektrik_Faturası","Çalışan_Maaş","Giriş_Ücreti" ]

   } 
   mutate {convert => ["Yıl","integer"] }
   mutate {convert => ["Ay","string"] }
   mutate {convert => ["Ziyaretçi_Sayısı","integer"] }
   mutate {convert => ["Otopark_Bileti","integer"] }
   mutate {convert => ["Otopark_Ücreti","integer"] }
   mutate {convert => ["Dükkan_Kirası","integer"] }
   mutate {convert => ["Pompa_Elektrik_Faturası","integer"] }
   mutate {convert => ["Çalışan_Maaş","integer"] }
   mutate {convert => ["Giriş_Ücreti","integer"] }





          }
         output{
elasticsearch {
hosts => "localhost"
index => "alldata2"
document_type=>"antalya"
}
stdout{}

}
`

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.