I have configured the logstash configuration as below to read the logs generated by spring boot to push into elastic search, but index is not creating even the logstash started successfully,
Please let me know how to fix
Conf file,
input {
file {
path => "C:/workspace/showmenow/logback/redditApp.log"
codec => "json"
type => "logback"
}
}
output {
if [type]=="logback" {
elasticsearch {
hosts => [ "localhost:9200" ]
index => "logback-test"
}
}
}
Here are the logs
[2019-05-30T16:33:13,736][DEBUG][logstash.javapipeline ] Starting pipeline {:pipeline_id=>"main"}
[2019-05-30T16:33:13,817][DEBUG][logstash.outputs.elasticsearch] Normalizing http path {:path=>nil, :normalized=>nil}
[2019-05-30T16:33:14,330][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://localhost:9200/]}}
[2019-05-30T16:33:14,343][DEBUG][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://localhost:9200/, :path=>"/"}
[2019-05-30T16:33:14,572][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://localhost:9200/"}
[2019-05-30T16:33:14,630][INFO ][logstash.outputs.elasticsearch] ES Output version determined {:es_version=>7}
[2019-05-30T16:33:14,637][WARN ][logstash.outputs.elasticsearch] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>7}
[2019-05-30T16:33:14,669][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//localhost:9200"]}
[2019-05-30T16:33:14,687][INFO ][logstash.outputs.elasticsearch] Using default mapping template
[2019-05-30T16:33:14,720][INFO ][logstash.javapipeline ] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>4, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50, "pipeline.max_inflight"=>500, :thread=>"#<Thread:0x33318691 run>"}
[2019-05-30T16:33:14,928][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"index_patterns"=>"logstash-*", "version"=>60001, "settings"=>{"index.refresh_interval"=>"5s", "number_of_shards"=>1}, "mappings"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"*", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}
[2019-05-30T16:33:14,962][DEBUG][logstash.outputs.elasticsearch] Found existing Elasticsearch template. Skipping template management {:name=>"logstash"}
[2019-05-30T16:33:15,103][DEBUG][logstash.instrument.periodicpoller.cgroup] One or more required cgroup files or directories not found: /proc/self/cgroup, /sys/fs/cgroup/cpuacct, /sys/fs/cgroup/cpu
[2019-05-30T16:33:15,356][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
[2019-05-30T16:33:15,361][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
[2019-05-30T16:33:14,887][DEBUG][org.logstash.config.ir.CompiledPipeline] Compiled conditional
[if (event.getField('[type]')=='logback')]
into
org.logstash.config.ir.compiler.ComputeStepSyntaxElement@16ca425f
[2019-05-30T16:33:14,884][DEBUG][org.logstash.config.ir.CompiledPipeline] Compiled conditional
[if (event.getField('[type]')=='logback')]
into
org.logstash.config.ir.compiler.ComputeStepSyntaxElement@16ca425f
[2019-05-30T16:33:14,883][DEBUG][org.logstash.config.ir.CompiledPipeline] Compiled conditional
[if (event.getField('[type]')=='logback')]
into
org.logstash.config.ir.compiler.ComputeStepSyntaxElement@16ca425f
[2019-05-30T16:33:14,888][DEBUG][org.logstash.config.ir.CompiledPipeline] Compiled conditional
[if (event.getField('[type]')=='logback')]
into
org.logstash.config.ir.compiler.ComputeStepSyntaxElement@16ca425f
[2019-05-30T16:33:16,136][DEBUG][org.logstash.config.ir.CompiledPipeline] Compiled output
P[output-elasticsearch{"hosts"=>["localhost:9200"], "index"=>"logback-test"}|[str]pipeline:11:10:```
elasticsearch {
hosts => [ "localhost:9200" ]
index => "logback-test"
}
```]
into
org.logstash.config.ir.compiler.ComputeStepSyntaxElement@3a1579c8
[2019-05-30T16:33:16,258][DEBUG][org.logstash.config.ir.CompiledPipeline] Compiled output
P[output-elasticsearch{"hosts"=>["localhost:9200"], "index"=>"logback-test"}|[str]pipeline:11:10:```
elasticsearch {
hosts => [ "localhost:9200" ]
index => "logback-test"
}
```]
into
org.logstash.config.ir.compiler.ComputeStepSyntaxElement@3a1579c8
[2019-05-30T16:33:16,337][DEBUG][org.logstash.config.ir.CompiledPipeline] Compiled output
P[output-elasticsearch{"hosts"=>["localhost:9200"], "index"=>"logback-test"}|[str]pipeline:11:10:```
elasticsearch {
hosts => [ "localhost:9200" ]
index => "logback-test"
}
```]
into
org.logstash.config.ir.compiler.ComputeStepSyntaxElement@3a1579c8
[2019-05-30T16:33:16,396][DEBUG][org.logstash.config.ir.CompiledPipeline] Compiled output
P[output-elasticsearch{"hosts"=>["localhost:9200"], "index"=>"logback-test"}|[str]pipeline:11:10:```
elasticsearch {
hosts => [ "localhost:9200" ]
index => "logback-test"
}
```]
into
org.logstash.config.ir.compiler.ComputeStepSyntaxElement@3a1579c8
[2019-05-30T16:33:17,410][TRACE][logstash.inputs.file ] Registering file input {:path=>["C:/workspace/showmenow/logback/redditApp.log"]}
[2019-05-30T16:33:17,481][INFO ][logstash.inputs.file ] No sincedb_path set, generating one based on the "path" setting {:sincedb_path=>"C:/1/tools/logstash-7.1.1/data/plugins/inputs/file/.sincedb_2294bb1aede9b3549a8e6dbb55ef7e6c", :path=>["C:/workspace/showmenow/logback/redditApp.log"]}
[2019-05-30T16:33:17,519][INFO ][logstash.javapipeline ] Pipeline started {"pipeline.id"=>"main"}
[2019-05-30T16:33:17,538][DEBUG][log