Errors running logstash, bit of a newb

Trying to set logstash up properly but I'm getting the following message after running it with my config file:
Attempting to install template {:manage_template=>{"template"=>"logstash-", "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"default"=>{"_all"=>{"enabled"=>true, "omit_norms"=>true}, "dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"string", "index"=>"analyzed", "omit_norms"=>true, "fielddata"=>{"format"=>"disabled"}}}}, {"string_fields"=>{"match"=>"", "match_mapping_type"=>"string", "mapping"=>{"type"=>"string", "index"=>"analyzed", "omit_norms"=>true, "fielddata"=>{"format"=>"disabled"}, "fields"=>{"raw"=>{"type"=>"string", "index"=>"not_analyzed", "doc_values"=>true, "ignore_above"=>256}}}}}, {"float_fields"=>{"match"=>"", "match_mapping_type"=>"float", "mapping"=>{"type"=>"float", "doc_values"=>true}}}, {"double_fields"=>{"match"=>"", "match_mapping_type"=>"double", "mapping"=>{"type"=>"double", "doc_values"=>true}}}, {"byte_fields"=>{"match"=>"", "match_mapping_type"=>"byte", "mapping"=>{"type"=>"byte", "doc_values"=>true}}}, {"short_fields"=>{"match"=>"", "match_mapping_type"=>"short", "mapping"=>{"type"=>"short", "doc_values"=>true}}}, {"integer_fields"=>{"match"=>"", "match_mapping_type"=>"integer", "mapping"=>{"type"=>"integer", "doc_values"=>true}}}, {"long_fields"=>{"match"=>"", "match_mapping_type"=>"long", "mapping"=>{"type"=>"long", "doc_values"=>true}}}, {"date_fields"=>{"match"=>"", "match_mapping_type"=>"date", "mapping"=>{"type"=>"date", "doc_values"=>true}}}, {"geo_point_fields"=>{"match"=>"", "match_mapping_type"=>"geo_point", "mapping"=>{"type"=>"geo_point", "doc_values"=>true}}}], "properties"=>{"@timestamp"=>{"type"=>"date", "doc_values"=>true}, "@version"=>{"type"=>"string", "index"=>"not_analyzed", "doc_values"=>true}, "geoip"=>{"type"=>"object", "dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip", "doc_values"=>true}, "location"=>{"type"=>"geo_point", "doc_values"=>true}, "latitude"=>{"type"=>"float", "doc_values"=>true}, "longitude"=>{"type"=>"float", "doc_values"=>true}}}}}}}}

I ran the config test file and it's results come back OK
I'm a bit of a newb with this, please advise, thanks
I'm hosting my elasticsearch from a different host computer than my logstash, it's able to connect over port 9200 just fine but it can't seem to send the data.

That's not an error, it's just letting you know what it's doing.

What's your config look like?

This is part of the Logstash config file:
input {

#Production Logs#############################
file {
type => "BRO_httplog"
path => "/var/log/.bro_http_sincedb"
sincedb_path => "/var/log/.bro_http_sincedb"
}
file {
type => "BRO_known_certslog"
path => "/var/log/.bro_known_certs_sincedb"
sincedb_path => "/var/log/.bro_known_certs_sincedb"
}
file {
type => "BRO_noticelog"
path => "/var/log/.bro_notice_sincedb"
sincedb_path => "/var/log/.bro_notice_sincedb"
}
file {
type => "BRO_known_hostslog"
path => "/var/log/.bro_known_hosts_sincedb"
sincedb_path => "/var/log/.bro_known_hosts_sincedb"
}
file {
type => "BRO_known_serviceslog"
path => "/var/log/.bro_known_services_sincedb"
sincedb_path => "/var/log/.bro_known_services_sincedb"
}
file {
type => "BRO_sshlog"
path => "/var/log/.bro_ssh_sincedb"
sincedb_path => "/var/log/.bro_ssh_sincedb"
}
file {
type => "BRO_dpdlog"
path => "/var/log/.bro_dpd_sincedb"
sincedb_path => "/var/log/.bro_dpd_sincedb"
}
file {
type => "BRO_connlog"
path => "/var/log/.bro_conn_sincedb"
sincedb_path => "/var/log/.bro_conn_sincedb"
}
file {
type => "BRO_weirdlog"
path => "/var/log/.bro_weird_sincedb"
sincedb_path => "/var/log/.bro_weird_sincedb"
}
file {
type => "BRO_app_statslog"
path => "/var/log/.bro_appstats_sincedb"
sincedb_path => "/var/log/.bro_appstats_sincedb"
}
file {
type => "BRO_dhcplog"
path => "/var/log/.bro_dhcp_sincedb"
sincedb_path => "/var/log/.bro_dhcp_sincedb"
}
file {
type => "BRO_fileslog"
path => "/var/log/.bro_files_sincedb"
sincedb_path => "/var/log/.bro_files_sincedb"
}
file {
type => "BRO_ssllog"
path => "/var/log/.bro_ssl_sincedb"
sincedb_path => "/var/log/.bro_ssl_sincedb"
}
file {
type => "BRO_noticelog"
path => "/var/log/.bro_notice_sincedb"
sincedb_path => "/var/log/.bro_notice_sincedb"
}
file {
type => "BRO_softwarelog"
path => "/var/log/.bro_software_sincedb"
sincedb_path => "/var/log/.bro_software_sincedb"
}
file {
type => "BRO_dnslog"
path => "/var/log/.bro_dns_sincedb"
sincedb_path => "/var/log/.bro_dns_sincedb"
}
file {
type => "BRO_intellog"
path => "/var/log/.bro_intel_sincedb"
sincedb_path => "/var/log/.bro_intel_sincedb"
}
}

filter {
if [message] =~ /^#/ {
drop { }
}
else {

# BRO_app_statslog ######################
  if [type] == "BRO_app_statslog" {
    grok {
      match => [ "message", "(?<ts>(.*?))\t(?<ts_delta>(.*?))\t(?<app>(.*?))\t(?<uniq_hosts>(.*?))\t(?<hits>(.*?))\t(?<bytes>(.*))" ]
    }
  }

# BRO_connlog ######################
  if [type] == "BRO_connlog" {
    grok {
	    match => [ 
		    "message", "(?<ts>(.*?))\t(?<uid>(.*?))\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t(?<proto>(.*?))\t(?<service>(.*?))\t(?<duration>(.*?))\t(?<orig_bytes>(.*?))\t(?<resp_bytes>(.*?))\t(?<conn_state>(.*?))\t(?<local_orig>(.*?))\t(?<missed_bytes>(.*?))\t(?<history>(.*?))\t(?<orig_pkts>(.*?))\t(?<orig_ip_bytes>(.*?))\t(?<resp_pkts>(.*?))\t(?<resp_ip_bytes>(.*?))\t(?<tunnel_parents>(.*?))\t(?<orig_cc>(.*?))\t(?<resp_cc>(.*?))\t(?<sensorname>(.*))",
		    "message", "(?<ts>(.*?))\t(?<uid>(.*?))\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t(?<proto>(.*?))\t(?<service>(.*?))\t(?<duration>(.*?))\t(?<orig_bytes>(.*?))\t(?<resp_bytes>(.*?))\t(?<conn_state>(.*?))\t(?<local_orig>(.*?))\t(?<missed_bytes>(.*?))\t(?<history>(.*?))\t(?<orig_pkts>(.*?))\t(?<orig_ip_bytes>(.*?))\t(?<resp_pkts>(.*?))\t(?<resp_ip_bytes>(.*?))\t(%{NOTSPACE:tunnel_parents})"
	    ]
    }
  }

# BRO_noticelog ######################
  if [type] == "BRO_noticelog" {
    grok { 
      match => [ "message", "(?<ts>(.*?))\t(?<uid>(.*?))\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t(?<fuid>(.*?))\t(?<file_mime_type>(.*?))\t(?<file_desc>(.*?))\t(?<proto>(.*?))\t(?<note>(.*?))\t(?<msg>(.*?))\t(?<sub>(.*?))\t(?<src>(.*?))\t(?<dst>(.*?))\t(?<p>(.*?))\t(?<n>(.*?))\t(?<peer_descr>(.*?))\t(?<actions>(.*?))\t(?<suppress_for>(.*?))\t(?<dropped>(.*?))\t(?<remote_location.country_code>(.*?))\t(?<remote_location.region>(.*?))\t(?<remote_location.city>(.*?))\t(?<remote_location.latitude>(.*?))\t(?<remote_location.longitude>(.*))" ]
    }
  }


# BRO_dhcplog ######################
  if [type] == "BRO_dhcplog" {
    grok { 
      match => [ "message", "(?<ts>(.*?))\t(?<uid>(.*?))\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t(?<mac>(.*?))\t(?<assigned_ip>(.*?))\t(?<lease_time>(.*?))\t(?<trans_id>(.*))" ]
    }
  }

# BRO_dnslog ######################
  if [type] == "BRO_dnslog" {
    grok {
      match => [ "message", "(?<ts>(.*?))\t(?<uid>(.*?))\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t(?<proto>(.*?))\t(?<trans_id>(.*?))\t(?<query>(.*?))\t(?<qclass>(.*?))\t(?<qclass_name>(.*?))\t(?<qtype>(.*?))\t(?<qtype_name>(.*?))\t(?<rcode>(.*?))\t(?<rcode_name>(.*?))\t(?<AA>(.*?))\t(?<TC>(.*?))\t(?<RD>(.*?))\t(?<RA>(.*?))\t(?<Z>(.*?))\t(?<answers>(.*?))\t(?<TTLs>(.*?))\t(?<rejected>(.*))" ]
    }
  }

# BRO_softwarelog ######################
  if [type] == "BRO_softwarelog" {
    grok { 
      match => [ "message", "(?<ts>(.*?))\t(?<bro_host>(.*?))\t(?<host_p>(.*?))\t(?<software_type>(.*?))\t(?<name>(.*?))\t(?<version.major>(.*?))\t(?<version.minor>(.*?))\t(?<version.minor2>(.*?))\t(?<version.minor3>(.*?))\t(?<version.addl>(.*?))\t(?<unparsed_version>(.*))" ]
    }
  }

# BRO_dpdlog ######################
  if [type] == "BRO_dpdlog" {
    grok {
      match => [ "message", "(?<ts>(.*?))\t(?<uid>(.*?))\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t(?<proto>(.*?))\t(?<analyzer>(.*?))\t(?<failure_reason>(.*))" ]
    }
  }

# BRO_fileslog ######################
  if [type] == "BRO_fileslog" {
    grok {
      match => [ "message", "(?<ts>(.*?))\t(?<fuid>(.*?))\t(?<tx_hosts>(.*?))\t(?<rx_hosts>(.*?))\t(?<conn_uids>(.*?))\t(?<source>(.*?))\t(?<depth>(.*?))\t(?<analyzers>(.*?))\t(?<mime_type>(.*?))\t(?<filename>(.*?))\t(?<duration>(.*?))\t(?<local_orig>(.*?))\t(?<is_orig>(.*?))\t(?<seen_bytes>(.*?))\t(?<total_bytes>(.*?))\t(?<missing_bytes>(.*?))\t(?<overflow_bytes>(.*?))\t(?<timedout>(.*?))\t(?<parent_fuid>(.*?))\t(?<md5>(.*?))\t(?<sha1>(.*?))\t(?<sha256>(.*?))\t(?<extracted>(.*))" ]
    }
  }

This is the other part

BRO_httplog

  if [type] == "BRO_httplog" {
    grok {
      match => [ "message", "(?<ts>(.*?))\t(?<uid>(.*?))\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t(?<trans_depth>(.*?))\t(?<method>(.*?))\t(?<bro_host>(.*?))\t(?<uri>(.*?))\t(?<referrer>(.*?))\t(?<user_agent>(.*?))\t(?<request_body_len>(.*?))\t(?<response_body_len>(.*?))\t(?<status_code>(.*?))\t(?<status_msg>(.*?))\t(?<info_code>(.*?))\t(?<info_msg>(.*?))\t(?<filename>(.*?))\t(?<http_tags>(.*?))\t(?<username>(.*?))\t(?<password>(.*?))\t(?<proxied>(.*?))\t(?<orig_fuids>(.*?))\t(?<orig_mime_types>(.*?))\t(?<resp_fuids>(.*?))\t(?<resp_mime_types>(.*))" ]
    }
  }

# BRO_known_certslog ######################
  if [type] == "BRO_known_certslog" {
    grok {
      match => [ "message", "(?<ts>(.*?))\t(?<bro_host>(.*?))\t(?<port_num>(.*?))\t(?<subject>(.*?))\t(?<issuer_subject>(.*?))\t(?<serial>(.*))" ]
    }
  }

# BRO_known_hostslog ######################
  if [type] == "BRO_known_hostslog" {
    grok {
      match => [ "message", "(?<ts>(.*?))\t(?<bro_host>(.*))" ]
    }
  }

# BRO_known_serviceslog ######################
  if [type] == "BRO_known_serviceslog" {
    grok {
      match => [ "message", "(?<ts>(.*?))\t(?<bro_host>(.*?))\t(?<port_num>(.*?))\t(?<port_proto>(.*?))\t(?<service>(.*))" ]
    }
  }

# BRO_sshlog ######################
  if [type] == "BRO_sshlog" {
    grok {
      match => [ "message", "(?<ts>(.*?))\t(?<uid>(.*?))\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t(?<status>(.*?))\t(?<direction>(.*?))\t(?<client>(.*?))\t(?<server>(.*?))\t(?<remote_location.country_code>(.*?))\t(?<remote_location.region>(.*?))\t(?<remote_location.city>(.*?))\t(?<remote_location.latitude>(.*?))\t(?<remote_location.longitude>(.*))" ]
    }
  }

# BRO_ssllog ######################
  if [type] == "BRO_ssllog" {
    grok {
      match => [ "message", "(?<ts>(.*?))\t(?<uid>(.*?))\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t(?<version>(.*?))\t(?<cipher>(.*?))\t(?<server_name>(.*?))\t(?<session_id>(.*?))\t(?<subject>(.*?))\t(?<issuer_subject>(.*?))\t(?<not_valid_before>(.*?))\t(?<not_valid_after>(.*?))\t(?<last_alert>(.*?))\t(?<client_subject>(.*?))\t(?<client_issuer_subject>(.*?))\t(?<cert_hash>(.*?))\t(?<validation_status>(.*))" ]
    }
  }

# BRO_weirdlog ######################
if [type] == "BRO_weirdlog" {
	grok {
		match => [ "message", "(?<ts>(.*?))\t(?<uid>(.*?))\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t(?<name>(.*?))\t(?<addl>(.*?))\t(?<notice>(.*?))\t(?<peer>(.*))" ]
    	}
}
if [type]== "BRO_intellog" {
  grok {
    match => [ "message", "(?<ts>(.*?))\t%{DATA:uid}\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t%{DATA:fuid}\t%{DATA:file_mime_type}\t%{DATA:file_desc}\t(?<seen.indicator>(.*?))\t(?<seen.indicator_type>(.*?))\t(?<seen.where>(.*?))\t%{NOTSPACE:sources}" ]
 }

}
}
date {
match => [ "ts", "UNIX" ]
}
}
filter {
if [bro_host] {
mutate {
replace => [ "host", "%{bro_host}" ]
}
}
}
filter {
if "BRO" in [type] {
if [id.orig_h] {
mutate {
add_field => [ "senderbase_lookup", "http://www.senderbase.org/lookup/?search_string=%{id.orig_h}" ]
add_field => [ "CBL_lookup", "http://cbl.abuseat.org/lookup.cgi?ip=%{id.orig_h}" ]
add_field => [ "Spamhaus_lookup", "http://www.spamhaus.org/query/bl?ip=%{id.orig_h}" ]
}
}
mutate {
add_tag => [ "BRO" ]
}
mutate {
convert => [ "id.orig_p", "integer" ]
convert => [ "id.resp_p", "integer" ]
convert => [ "orig_bytes", "integer" ]
convert => [ "resp_bytes", "integer" ]
convert => [ "missed_bytes", "integer" ]
convert => [ "orig_pkts", "integer" ]
convert => [ "orig_ip_bytes", "integer" ]
convert => [ "resp_pkts", "integer" ]
convert => [ "resp_ip_bytes", "integer" ]
}
}
}
filter {
if [type] == "BRO_connlog" {
#The following makes use of the translate filter (logstash contrib) to convert conn_state into human text. Saves having to look up values for packet introspection
translate {
field => "conn_state"
destination => "conn_state_full"
dictionary => [
"S0", "Connection attempt seen, no reply",
"S1", "Connection established, not terminated",
"S2", "Connection established and close attempt by originator seen (but no reply from responder)",
"S3", "Connection established and close attempt by responder seen (but no reply from originator)",
"SF", "Normal SYN/FIN completion",
"REJ", "Connection attempt rejected",
"RSTO", "Connection established, originator aborted (sent a RST)",
"RSTR", "Established, responder aborted",
"RSTOS0", "Originator sent a SYN followed by a RST, we never saw a SYN-ACK from the responder",
"RSTRH", "Responder sent a SYN ACK followed by a RST, we never saw a SYN from the (purported) originator",
"SH", "Originator sent a SYN followed by a FIN, we never saw a SYN ACK from the responder (hence the connection was 'half' open)",
"SHR", "Responder sent a SYN ACK followed by a FIN, we never saw a SYN from the originator",
"OTH", "No SYN seen, just midstream traffic (a 'partial connection' that was not later closed)"
]
}
}
}

Resolve @source_host to FQDN if possible if missing for some types of logging using source_host_ip from above

filter {
if [id.orig_h] {
if ![id.orig_h-resolved] {
mutate {
add_field => [ "id.orig_h-resolved", "%{id.orig_h}" ]
}
dns {
reverse => [ "id.orig_h-resolved" ]
action => "replace"
}
}
}
}
filter {
if [id.resp_h] {
if ![id.resp_h-resolved] {
mutate {
add_field => [ "id.resp_h-resolved", "%{id.resp_h}" ]
}
dns {
reverse => [ "id.resp_h-resolved" ]
action => "replace"
}
}
}
}

output {
elasticsearch{
hosts => ["10.0.2.159:9200"]
#bind_host => "192.168.1.191"
index => ["logstash-%(+YYYY.MM.DD)"]
#protocol => ["http"]
#port => 9200
template_overwrite => true
template => "/opt/logstash-5.2.2/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-6.2.6-java/lib/logstash/outputs/elasticsearch/elasticsearch-template-es2x.json"
#cluster => ["logstash-cluster"]
flush_size => 1
#tdout { codec => rubydebug }

}
stdout { codec => rubydebug }

}

Your path to read the files is the same as the path for the sincedb, this means nothing will be read.

1 Like

Thank you for the help.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.