Hi,
I'm getting following messages into my logstash logs, my logstash version islogstash-6.5.4-1.noarch
below id what i'm getting while starting via following command and logstash stops:
/usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/ --config.reload.automatic &
[WARN ] 2019-01-28 06:50:19.508 [[main]<file] createinitial - open_file OPEN_WARN_INTERVAL is '300'
[WARN ] 2019-01-28 06:50:19.509 [[main]<file] createinitial - open_file OPEN_WARN_INTERVAL is '300'
[WARN ] 2019-01-28 06:50:19.509 [[main]<file] createinitial - open_file OPEN_WARN_INTERVAL is '300'
[WARN ] 2019-01-28 06:50:19.509 [[main]<file] createinitial - open_file OPEN_WARN_INTERVAL is '300'
[WARN ] 2019-01-28 06:50:19.509 [[main]<file] createinitial - open_file OPEN_WARN_INTERVAL is '300'
While starting with systemctl start logstash.service
getting below but it works ..
[2019-01-28T07:54:03,219][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>404, :action=>["index", {:_id=>nil, :_index=>"sys log-2019.01.27", :_type=>"messages", :routing=>nil}, #<LogStash::Event:0x68d346c7>], :response=>{"index"=>{"_index"=>"syslog-2019.01.27", "_type"=>"messages", "_id" =>"7UgplWgB9lgozaqE-10s", "status"=>404, "error"=>{"type"=>"index_not_found_exception", "reason"=>"no such index", "index_uuid"=>"_na_", "index"=>"syslog-2019.01.27 "}}}}
[2019-01-28T07:54:03,220][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>404, :action=>["index", {:_id=>nil, :_index=>"sys log-2019.01.27", :_type=>"messages", :routing=>nil}, #<LogStash::Event:0x6f6caed1>], :response=>{"index"=>{"_index"=>"syslog-2019.01.27", "_type"=>"messages", "_id" =>"70gplWgB9lgozaqE-10s", "status"=>404, "error"=>{"type"=>"index_not_found_exception", "reason"=>"no such index", "index_uuid"=>"_na_", "index"=>"syslog-2019.01.27 "}}}}
[2019-01-28T07:55:43,499][WARN ][logstash.codecs.plain ] Received an event that has a different character encoding than you configured. {:text=>"Jan 27 14:14:35 ephjobsrv `\\xF4Y\\xC8-\\u007F[4774]: 4]: E.10 ETL (FYIFactIncrementalLoad_Quarterly_Unix_OD) succeeded#012#012E.10 ETL (FYIFactIncrementalLoad_Quarterly_Unix_OD) s ucceeded.#012", :expected_charset=>"UTF-8"}
[2019-01-28T07:55:43,501][WARN ][logstash.codecs.plain ] Received an event that has a different character encoding than you configured. {:text=>"Jan 27 14:14:35 ephjobsrv `\\xF4Y\\xC8-\\u007F[4774]: 4]: E.10 ETL (FYIFactIncrementalLoad_Quarterly_Unix_OD) succeeded#012#012E.10 ETL (FYIFactIncrementalLoad_Quarterly_Unix_OD) s ucceeded.#012", :expected_charset=>"UTF-8"}
my logstash pipelines:
[root@myelk04 ~]# cat /etc/logstash/conf.d/rmlogs.conf
input {
file {
path => [ "/data/rmlogs_SJ/*.txt" ]
start_position => beginning
sincedb_path => "/dev/null"
type => "rmlog"
}
}
filter {
if [type] == "rmlog" {
grok {
match => { "message" => "%{HOSTNAME:Hostname},%{DATE:Date},%{HOUR:Hour_since}:%{MINUTE:Mins_since},%{NUMBER}-%{WORD},%{USER:User_1},%{USER:User_2} %{NUMBER:Pid} %{NUMBER:float} %{NUMBER:float} %{NUMBER:Num_1} %{NUMBER:Num_2} %{DATA} %{HOUR:hour2}:%{MINUTE:minute2} %{HOUR:hour3}:%{MINUTE:minute3} %{GREEDYDATA:Command}" }
add_field => [ "received_at", "%{@timestamp}" ]
remove_field => [ "path","minute3","minute2","host","hour2","hour3","Num_1","Num_2","message" ]
}
}
}
output {
if [type] == "rmlog" {
elasticsearch {
hosts => ["myelk01:9200"]
manage_template => false
index => "rmlog-%{+YYYY.MM.dd}"
document_type => "messages"
}
}
}
[root@myelk04 ~]# cat /etc/logstash/conf.d/syslog.conf
input {
file {
path => [ "/data/SYSTEMS/*/messages.log" ]
start_position => beginning
sincedb_path => "/dev/null"
type => "syslog"
}
}
filter {
if [type] == "syslog" {
grok {
match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp } %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
add_field => [ "received_at", "%{@timestamp}" ]
remove_field => ["@version", "host", "message", "_type", "_index", "_score", "path"]
}
syslog_pri { }
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
}
}
}
output {
if [type] == "syslog" {
elasticsearch {
hosts => "myelk01:9200"
manage_template => false
index => "syslog-%{+YYYY.MM.dd}"
document_type => "messages"
}
}
}
However, i have added the openfile limit for the logstash user at the system level, i'm using RedHat Linux 7