Unable to create index from logstash to elastic search (beginner)

Hi team,

Im unable to create index in the elastic search,

im using 6.4.2 for the whole stack, and below is my config file, i believe im doing wrong somwhere in output section output to elasticsearch, as this is my first config file sending to elastic search
is the configuration correct in the output section. please help me in correction of config file

my configuration for the pipeline

  input {
	file {
		path => "/root/apache_*.log"
		start_position => "beginning"
	}

	http {

	}
}

filter {
	if [headers][request_uri] =~ "error" or [path] =~ "errors" {
		mutate {
			replace => { type => "error" }
		}
	} else {
		mutate {
			replace => { type => "access" }
		}

		grok {
			match => { "message" => '%{HTTPD_COMMONLOG} "%{GREEDYDATA:referrer}" "%{GREEDYDATA:agent}"' }
		}

		if "_grokparsefailure" in [tags] {
			drop { }
		}

		useragent {
			source => "agent"
			target => "ua"
		}

		# Admin pages
		if [request] =~ /^\/admin\// {
			drop { }
		}

		# Static files
		if [request] =~ /^\/js\//
			or [request] =~ /^\/css\//
			or [request] in ["/robots.txt", "/favicon.ico"] {
			drop { }
		}

		# Crawlers
		if [ua][device] == "Spider" {
			drop { }
		}

		mutate {
			convert => {
				"response" => "integer"
				"bytes" => "integer"
			}
		}

		date {
			match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss Z" ]
			remove_field => [ "timestamp" ]
		}

		geoip {
			source => "clientip"
		}
	}

	mutate {
		remove_field => [ "headers", "@version", "host" ]
	}
}

output {
	elasticsearch {
		hosts => ["localhost:9200"]
		document_type => "default"
		http_compression => true
	}
}

Below is my log ouput

[2018-10-10T12:52:41,401][INFO ][logstash.runner          ] Starting Logstash {"logstash.version"=>"6.4.2"}
[2018-10-10T12:53:01,890][INFO ][logstash.pipeline        ] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>1, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50}
[2018-10-10T12:53:03,963][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://localhost:9200/]}}
[2018-10-10T12:53:03,984][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://localhost:9200/, :path=>"/"}
[2018-10-10T12:53:04,648][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://localhost:9200/"}
[2018-10-10T12:53:04,796][INFO ][logstash.outputs.elasticsearch] ES Output version determined {:es_version=>6}
[2018-10-10T12:53:04,814][WARN ][logstash.outputs.elasticsearch] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>6}
[2018-10-10T12:53:04,936][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//localhost:9200"]}
[2018-10-10T12:53:05,016][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>nil}
[2018-10-10T12:53:05,072][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-*", "version"=>60001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"_default_"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"*", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}}
[2018-10-10T12:53:06,488][INFO ][logstash.filters.geoip   ] Using geoip database {:path=>"/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/logstash-filter-geoip-5.0.3-java/vendor/GeoLite2-City.mmdb"}
[2018-10-10T12:53:07,379][INFO ][logstash.inputs.file     ] No sincedb_path set, generating one based on the "path" setting {:sincedb_path=>"/var/lib/logstash/plugins/inputs/file/.sincedb_e4b74c1a603f42c5144c54f4fc45b48e", :path=>["/home/foss/apachelogs/apache_*.log"]}
[2018-10-10T12:53:08,146][INFO ][logstash.pipeline        ] Pipeline started successfully {:pipeline_id=>"main", :thread=>"#<Thread:0x6f033fd4 sleep>"}
[2018-10-10T12:53:08,171][INFO ][logstash.inputs.http     ] Starting http input listener {:address=>"0.0.0.0:8080", :ssl=>"false"}
[2018-10-10T12:53:08,465][INFO ][filewatch.observingtail  ] START, creating Discoverer, Watch with file and sincedb collections
[2018-10-10T12:53:08,578][INFO ][logstash.agent           ] Pipelines running {:count=>1, :running_pipelines=>[:main], :non_running_pipelines=>[]}
[2018-10-10T12:53:09,527][INFO ][logstash.agent           ] Successfully started Logstash API endpoint {:port=>9600}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.