Logstash as a service config not working

Trying to get a new install of LogStash running as a service on Ubuntu 18.04 LTS. I can start it ok as root, but it won't start using systemctl.

/etc/systemd/system/logstash.service:

[Unit]
Description=logstash

[Service]
Type=simple
User=logstash
Group=logstash

Load env vars from /etc/default/ and /etc/sysconfig/ if they exist.

Prefixing the path with '-' makes it try to load, but if the file doesn't

exist, it continues onward.

EnvironmentFile=-/etc/default/logstash
EnvironmentFile=-/etc/sysconfig/logstash
ExecStart=/usr/share/logstash/bin/logstash "--path.setttings=/etc/logstash/"
Restart=always
WorkingDirectory=/
Nice=19
LimitNOFILE=16384

[Install]
WantedBy=multi-user.target

trying to use systemctl creates no entries in /var/log/logstash/logstash-plain.log, and status shows:

root@uh-es-01://usr/share/logstash# systemctl status logstash
● logstash.service - logstash
Loaded: loaded (/etc/systemd/system/logstash.service; enabled; vendor preset: enabled)
Active: failed (Result: exit-code) since Tue 2020-02-18 11:23:03 EST; 13min ago
Main PID: 1636 (code=exited, status=1/FAILURE)

Feb 18 11:23:03 uh-es-01 systemd[1]: logstash.service: Service hold-off time over, scheduling restart.
Feb 18 11:23:03 uh-es-01 systemd[1]: logstash.service: Scheduled restart job, restart counter is at 5.
Feb 18 11:23:03 uh-es-01 systemd[1]: Stopped logstash.
Feb 18 11:23:03 uh-es-01 systemd[1]: logstash.service: Start request repeated too quickly.
Feb 18 11:23:03 uh-es-01 systemd[1]: logstash.service: Failed with result 'exit-code'.
Feb 18 11:23:03 uh-es-01 systemd[1]: Failed to start logstash.

I'm sure this is a permissions issue, but I'm stuck as to where to look....

Hi @sdkeslar,

sharing your Logstash conf might help :slight_smile:

My first suspects would be log files and lock files. These files are open by the Logstash process on one of my nodes

root @ logstash1.example.net ~ # lsof -p 2597 | grep REG | grep .log$
java    2597 logstash   56r      REG              253,0   8604637    408740 /var/dead_letter_queue/main/2777.log
java    2597 logstash   89w      REG              253,0   8604637    408740 /var/dead_letter_queue/main/2777.log
java    2597 logstash  342w      REG              253,0   5944830    405891 /var/log/logstash/logstash-plain.log
root @ logstash1.example.net ~ # lsof -p 2597 | grep REG | grep lock
java    2597 logstash   50wW     REG              253,0         0    394130 /var/lib/logstash/.lock
java    2597 logstash   51wW     REG              253,0         0    405876 /var/dead_letter_queue/main/.lock

So, I would check ownership of those files or other files in those folders.

logstash.conf:

input {
udp {
port => 514 ## change me to whatever you set your ASA syslog port to
type => "cisco-fw"
}
}

filter {
####### Cisco FW ####
if [type] == "cisco-fw" {
grok {
match => ["message", "%{CISCO_TAGGED_SYSLOG} %{GREEDYDATA:cisco_message}"]
}

Parse the syslog severity and facility

syslog_pri { }

Extract fields from the each of the detailed message types

The patterns provided below are included in core of LogStash 1.2.0.

grok {
match => [
"cisco_message", "%{CISCOFW106001}",
"cisco_message", "%{CISCOFW106006_106007_106010}",
"cisco_message", "%{CISCOFW106014}",
"cisco_message", "%{CISCOFW106015}",
"cisco_message", "%{CISCOFW106021}",
"cisco_message", "%{CISCOFW106023}",
"cisco_message", "%{CISCOFW106100}",
"cisco_message", "%{CISCOFW110002}",
"cisco_message", "%{CISCOFW302010}",
"cisco_message", "%{CISCOFW302013_302014_302015_302016}",
"cisco_message", "%{CISCOFW302020_302021}",
"cisco_message", "%{CISCOFW305011}",
"cisco_message", "%{CISCOFW313001_313004_313008}",
"cisco_message", "%{CISCOFW313005}",
"cisco_message", "%{CISCOFW402117}",
"cisco_message", "%{CISCOFW402119}",
"cisco_message", "%{CISCOFW419001}",
"cisco_message", "%{CISCOFW419002}",
"cisco_message", "%{CISCOFW500004}",
"cisco_message", "%{CISCOFW602303_602304}",
"cisco_message", "%{CISCOFW710001_710002_710003_710005_710006}",
"cisco_message", "%{CISCOFW713172}",
"cisco_message", "%{CISCOFW733100}",
"message", "%IP"
]
}

geoip {
source => "src_ip"
}

Parse the date

date {
match => ["timestamp",
"MMM dd HH:mm:ss",
"MMM d HH:mm:ss",
"MMM dd yyyy HH:mm:ss",
"MMM d yyyy HH:mm:ss"
]
}
}

End of Cisco FW

}

output {

stdout {

#codec => json
#}

elasticsearch {
ssl => true
hosts => ["https://10.101.100.93:9200"] # change me to the IP of your elasticsearch server
user => elastic
password => xxxxxxxxxxxxxxxxxxxxxxxx
cacert => "/etc/elasticsearch/certs/ca.crt"
}
}

I don't have /var/dead_letter_queue, but /var/log/logstash and /var/lib/logstash are both owned by logstash, and that's the user and group in the unit file:

[Unit]
Description=logstash

[Service]
Type=simple
User=logstash
Group=logstash

Load env vars from /etc/default/ and /etc/sysconfig/ if they exist.

Prefixing the path with '-' makes it try to load, but if the file doesn't

exist, it continues onward.

EnvironmentFile=-/etc/default/logstash
EnvironmentFile=-/etc/sysconfig/logstash
ExecStart=/usr/share/logstash/bin/logstash "--path.setttings=/etc/logstash/"
Restart=always
WorkingDirectory=/
Nice=19
LimitNOFILE=16384

[Install]
WantedBy=multi-user.target

/etc/logstash and /usr/share/logstash are both owned by logstash as well....

Hi @sdkeslar,

could you please use Preformated text for the config. It is </> from the text formatting menu.

sorry..

[Unit]
Description=logstash

[Service]
Type=simple
User=logstash
Group=logstash
# Load env vars from /etc/default/ and /etc/sysconfig/ if they exist.
# Prefixing the path with '-' makes it try to load, but if the file doesn't
# exist, it continues onward.
EnvironmentFile=-/etc/default/logstash
EnvironmentFile=-/etc/sysconfig/logstash
ExecStart=/usr/share/logstash/bin/logstash "--path.settings" "/etc/logstash"
Restart=always
WorkingDirectory=/
Nice=19
LimitNOFILE=16384

[Install]
WantedBy=multi-user.target




input {
 udp { 
 port => 514 ## change me to whatever you set your ASA syslog port to
 type => "cisco-fw"
 }
}

filter {
 ####### Cisco FW ####
 if [type] == "cisco-fw" {
 grok {
 match => ["message", "%{CISCO_TAGGED_SYSLOG} %{GREEDYDATA:cisco_message}"]
 }
 # Parse the syslog severity and facility
 syslog_pri { }

 # Extract fields from the each of the detailed message types
 # The patterns provided below are included in core of LogStash 1.2.0.
 grok {
 match => [
 "cisco_message", "%{CISCOFW106001}",
 "cisco_message", "%{CISCOFW106006_106007_106010}",
 "cisco_message", "%{CISCOFW106014}",
 "cisco_message", "%{CISCOFW106015}",
 "cisco_message", "%{CISCOFW106021}",
 "cisco_message", "%{CISCOFW106023}",
 "cisco_message", "%{CISCOFW106100}",
 "cisco_message", "%{CISCOFW110002}",
 "cisco_message", "%{CISCOFW302010}",
 "cisco_message", "%{CISCOFW302013_302014_302015_302016}",
 "cisco_message", "%{CISCOFW302020_302021}",
 "cisco_message", "%{CISCOFW305011}",
 "cisco_message", "%{CISCOFW313001_313004_313008}",
 "cisco_message", "%{CISCOFW313005}",
 "cisco_message", "%{CISCOFW402117}",
 "cisco_message", "%{CISCOFW402119}",
 "cisco_message", "%{CISCOFW419001}",
 "cisco_message", "%{CISCOFW419002}",
 "cisco_message", "%{CISCOFW500004}",
 "cisco_message", "%{CISCOFW602303_602304}",
 "cisco_message", "%{CISCOFW710001_710002_710003_710005_710006}",
 "cisco_message", "%{CISCOFW713172}",
 "cisco_message", "%{CISCOFW733100}",
 "message", "%IP"
 ]
 }

geoip {
source => "src_ip" 
}


 # Parse the date
 date {
 match => ["timestamp",
 "MMM dd HH:mm:ss",
 "MMM d HH:mm:ss",
 "MMM dd yyyy HH:mm:ss",
 "MMM d yyyy HH:mm:ss"
 ]
 }
 }
 ###### End of Cisco FW #######
}

output {
# stdout { 
#codec => json
#}

 elasticsearch {
 ssl => true
 hosts => ["https://10.101.100.93:9200"] # change me to the IP of your elasticsearch server
 user => elastic
 password => xxxxxxxxxxxxxxxxxxxxxxxxx
 cacert => "/etc/elasticsearch/certs/ca.crt" 
 }
}

AHA ! found it. Had to explicitly specify JAVA_HOME in /etc/default/logstash. Now just need to get ufw to redirect the incoming from 514 to another high port since I'm not running LS as root.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.