Error after upgrading to Logstash 7.1.1

    [ERROR][logstash.agent           ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Something is wrong with your configuration.", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/config/mixin.rb:86:in `config_init'", "/usr/share/logstash/logstash-core/lib/logstash/outputs/base.rb:60:in `initialize'", "org/logstash/config/ir/compiler/OutputStrategyExt.java:232:in `initialize'", "org/logstash/config/ir/compiler/OutputDelegatorExt.java:48:in `initialize'", "org/logstash/config/ir/compiler/OutputDelegatorExt.java:30:in `initialize'", "org/logstash/plugins/PluginFactoryExt.java:239:in `plugin'", "org/logstash/plugins/PluginFactoryExt.java:137:in `buildOutput'", "org/logstash/execution/JavaBasePipelineExt.java:50:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:23:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:36:in `execute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:325:in `block in converge_state'"]}

Here is my custom input file, thanks for any assistance in advance:

input {

file {
# glob syntax for filename matching is explained here:
# https://www.elastic.co/guide/en/logstash/current/glob-support.html
path => ["/var/log/adc/2019///adc.log",
"/var/log/adc/2019///asdi.log",
"/var/log/adc/2019///edct_cdm_flight_data.log",
"/var/log/adc/2019///flightaware.log",
"/var/log/adc/2019///flight_manager.log",
"/var/log/adc/2019///fp.log",
"/var/log/adc/2019///invalid_outgoing.log",
"/var/log/adc/2019///iridium.log",
"/var/log/adc/2019///met_error.log",
"/var/log/adc/2019///microservice.log",
"/var/log/adc/2019///mq_output.log",
"/var/log/adc/2019///performance.log",
"/var/log/adc/2019///position_data.log",
"/var/log/adc/2019///rmqapps.log",
"/var/log/adc/2019///sbbtraffic.log",
"/var/log/adc/2019///schneider.log",
"/var/log/adc/2019///skyguide_notams.log",
"/var/log/adc/2019///sql.log",
"/var/log/adc/2019///unparsed.log",
"/var/log/adc/2019///wx.log",
"/var/log/adc/2019///flightkeys.log",
"/var/log/adc/2019///fp_formatter.log",
"/var/log/sys/2019///net-session*",
"/var/log/sys/2019///ad2api.log"
]
tags => [ "standard_adc_format" ]

  # default discover_interval is 15 sec
  discover_interval => 60

  # file where indexes into the current log file positions are stored
  sincedb_path => "/tmp/logstash-sincedb.db"

  # when a new log is first found, begin reading from the first line
  start_position => "beginning"

}

file {
path => ["/var/log/adc/2019///api.log",
"/var/log/adc/2019///dashboard.log"
]
tags => [ "alt_adc_format" ]

  # default discover_interval is 15 sec
  discover_interval => 60

  # file where indexes into the current log file positions are stored
  sincedb_path => "/tmp/logstash-sincedb2.db"

  # when a new log is first found, begin reading from the first line
  start_position => "beginning"

}

file {
# FIXME: This is no longer the correct path for mail logs. Needs updated.
path => ["/var/log/sys/2019///*maillog"
]
tags => [ "syslog_format" ]

  # default discover_interval is 15 sec
  discover_interval => 60

  # file where indexes into the current log file positions are stored
  sincedb_path => "/tmp/logstash-sincedb3.db"

  # when a new log is first found, begin reading from the first line
  start_position => "beginning"

}

file {
path => ["/var/log/adc/2019///fp_formatter.log"]
tags => [ "windward_format" ]

  # default discover_interval is 15 sec
  discover_interval => 60

  # file where indexes into the current log file positions are stored
  sincedb_path => "/tmp/logstash-sincedb3.db"

  # when a new log is first found, begin reading from the first line
  start_position => "beginning"

}
}

filter {

if "standard_adc_format" in [tags] {
    if ".py" in [message] {
        # it's a log line from a python app with extra info
        grok {
            match => [ "message", "^%{TIMESTAMP_ISO8601:logdate} <%{NOTSPACE:syslog}> %{NOTSPACE:hostname} %{NOTSPACE:appname}\[%{USERNAME:process_id}\]  %{NOTSPACE:serverdate} %{NOTSPACE:servertime} %{WORD:loglevel} %{NUMBER:thread_id} %{NOTSPACE:source_file} %{POSINT:source_line} %{GREEDYDATA:message}" ]

            overwrite => [ "message" ]
        }
    } else {
        # it's a standard syslog format not generated by our python logging libs
        grok {
            match => [ "message", "^%{TIMESTAMP_ISO8601:logdate} <%{NOTSPACE:syslog}> %{NOTSPACE:hostname} %{NOTSPACE:appname}\[%{USERNAME:process_id}\] %{GREEDYDATA:message}" ]
        }
    }
    mutate  {
        gsub => [ "message", "<nl>", "

" ]
}
}

if "alt_adc_format" in [tags] {
    grok {
        match => [ "message", "^%{TIMESTAMP_ISO8601:logdate} <%{NOTSPACE:syslog}> %{NOTSPACE:hostname} #\|%{NOTSPACE:date2}  %{NOTSPACE:time2} %{WORD:loglevel} %{NUMBER:thread_id} %{NOTSPACE:source_file} %{POSINT:source_line} %{GREEDYDATA:message}" ]

        overwrite => [ "message" ]
    }
    mutate  {
        gsub => [ "message", "<nl>", "

" ]
}
}

if "syslog_format" in [tags] {
    grok {
        match => [ "message", "^%{TIMESTAMP_ISO8601:logdate} <%{NOTSPACE:syslog}> %{NOTSPACE:hostname} %{NOTSPACE:appname} %{GREEDYDATA:message}" ]
        overwrite => [ "message" ]
    }
}

if "windward_format" in [tags] {
    grok {
        match => [ "message", "^%{TIMESTAMP_ISO8601:logdate} <%{NOTSPACE:syslog}> %{NOTSPACE:hostname} %{GREEDYDATA:message}" ]
        overwrite => [ "message" ]
    }
}

date {
    match => [ "logdate", "yyyy-MM-dd'T'HH:mm:ss" ]
}

}

output {
if "_grokparsefailure" in [tags] {
# write events that didn't match to a file
file { "path" => "/tmp/grok_failures.txt" }
} else {
elasticsearch { hosts => ["localhost:9200"]
index => "logstash-%{+YYYY.MM}"
}
}

for debugging:

stdout { codec => rubydebug }

}

I would expect another error message, possibly multi-line, immediately preceding this. When I see this is is normally a plugin is detecting a problem, and logging a specific error, then logstash logs this very generic error. We need to see the specific error :slight_smile:

So, the only entry in the log file has this info, it does successful startup and then immediately shuts down. Is there somewhere else i need to look for other errors?

[2019-07-25T12:44:31,066][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2019-07-25T12:44:35,962][INFO ][logstash.runner ] Logstash shut down.
[2019-07-25T12:44:58,294][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
[2019-07-25T12:44:58,313][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"7.1.1"}
[2019-07-25T12:45:06,769][ERROR][logstash.outputs.file ] Unknown setting '"path"' for file
[2019-07-25T12:45:06,785][ERROR][logstash.agent ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Something is wrong with your configuration.", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/config/mixin.rb:86:in config_init'", "/usr/share/logstash/logstash-core/lib/logstash/outputs/base.rb:60:ininitialize'", "org/logstash/config/ir/compiler/OutputStrategyExt.java:232:in initialize'", "org/logstash/config/ir/compiler/OutputDelegatorExt.java:48:ininitialize'", "org/logstash/config/ir/compiler/OutputDelegatorExt.java:30:in initialize'", "org/logstash/plugins/PluginFactoryExt.java:239:inplugin'", "org/logstash/plugins/PluginFactoryExt.java:137:in buildOutput'", "org/logstash/execution/JavaBasePipelineExt.java:50:ininitialize'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:23:in initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:36:inexecute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:325:in `block in converge_state'"]}

That would suggest you have

input {
    file {
        "path" =>

rather than

input {
    file {
        path =>

The correct path is set...input_file

NVM, Found the issue. It was the "path" below it was complaining about.

file { "path" => "/tmp/grok_failures.txt" }

thanks.