i try monitoring iis use elasticsearch and logstash run in docker and want to use template for ouput elasticsearch but i got this error
[2019-01-12T02:38:28,874][ERROR][logstash.agent ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of #, input, filter, output at line 1, column 1 (byte 1) after ", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:41:in
compile_imperative'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:49:in
compile_graph'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:11:inblock in compile_sources'", "org/jruby/RubyArray.java:2486:in
map'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:10:incompile_sources'", "org/logstash/execution/AbstractPipelineExt.java:149:in
initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:22:ininitialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:90:in
initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:42:inblock in execute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:92:in
block in exclusive'", "org/jruby/ext/thread/Mutex.java:148:insynchronize'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:92:in
exclusive'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:38:inexecute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:317:in
block in converge_state'"]}
my logstash-template.json
{
"template":"logstash-*",
"settings":{
"index.refresh_interval":"5s"
},
"mappings":{
"_default_":{
"dynamic_templates":[
{
"string_fields":{
"match_mapping_type":"string",
"match":"*",
"mapping":{
"index":"analyzed",
"omit_norms":true,
"type":"string",
"fields":{
"raw":{
"index":"not_analyzed",
"ignore_above":256,
"type":"string"
}
}
}
}
}
],
"properties":{
"geoip":{
"dynamic":true,
"properties":{
"location":{
"type":"geo_point"
}
},
"type":"object"
},
"logsource":{
"index":"not_analyzed",
"type":"string"
},
"pid":{
"index":"not_analyzed",
"type":"long"
},
"program":{
"index":"not_analyzed",
"type":"string"
},
"ident":{
"index":"not_analyzed",
"type":"string"
},
"httpversion":{
"index":"not_analyzed",
"type":"string"
},
"version":{
"index":"not_analyzed",
"type":"string"
},
"referer":{
"index":"not_analyzed",
"type":"string"
},
"referrer":{
"index":"not_analyzed",
"type":"string"
},
"request":{
"index":"not_analyzed",
"type":"string"
},
"responsetime":{
"type":"long",
"ignore_malformed": true,
"null_value" : 0
},
"bytes":{
"type":"long"
},
"response":{
"type":"string",
"index":"not_analyzed"
}
},
"_all":{
"enabled":true
}
}
},
"aliases":{
}
}
my logstash.conf
input {
beats {
port => 5000
type => 'iis'
}
}
# First filter
filter {
#ignore log comments
if [message] =~ "^#" {
drop {}
}
grok {
patterns_dir => "./patterns"
match => ["message", "%{TIMESTAMP_ISO8601:timestamp} %{IPORHOST:serverip} %{WORD:verb} %{NOTSPACE:request} %{NOTSPACE:querystring} %{NUMBER:port} %{NOTSPACE:auth} %{IPORHOST:clientip} %{NOTSPACE:agent} %{NUMBER:response} %{NUMBER:sub_response} %{NUMBER:sc_status} %{NUMBER:responsetime}" ]
}
date {
match => [ "timestamp", "yyyy-MM-dd HH:mm:ss" ]
locale => "en"
}
}
# Second filter
filter {
if "_grokparsefailure" in [tags] {
} else {
# on success remove the message field to save space
mutate {
remove_field => ["message", "timestamp"]
}
}
}
output {
elasticsearch {
hosts => ["192.168.222.240:9200"]
index => "logstash-%{+YYYY.MM.dd}"
template => "./logstash-template.json"
template_name => "logstash"
document_type => "iis"
template_overwrite => true
manage_template => true
}
}