Need different index for each log file path.
1> access log in access.conf file -> same server
2> csv file in task_engine.csv -> same server
Need to have task_engine.csv file in different index so that I can pull it in machine learning .
Issue: I am not getting task_engine.csv file at all, when I start manually it works but I am seeing acecess logs as well in the same index. I need it to work without manual start, more over need to have task_engine.csv in different index so I can use it in machine learning.
OS: RHEL 7
Logstash version: logstash-6.7.1-1.noarch
=====================================================================
pwd
/etc/logstash/conf.d
#cat access.conf
input {
file {
path => "/var/log/access_log"
path => "/var/log/access_log**122019"
start_position => "beginning"
}
}
filter {
if [path] =~ "access" {
mutate { replace => { "type" => "apache_access" } }
mutate { remove_field => [ "tags", "type", "_type", "_score" ] }
grok {
match => [ "message", "%{IP:client_ip} %{USER:ident} %{USER:auth} [%{HTTPDATE:apache_timestamp}] "%{WORD:method} /%{NOTSPACE:request_page} HTTP/%{NUMBER:http_version}" %{NUMBER:server_response} (?:%{NUMBER:bytes}|-)" ]
}
}
date {
match => [ "timestamp" , "dd/MMM/yyyy:HH:mm:ss Z" ]
}
geoip {
source => "clientip"
}
}
output {
elasticsearch {
hosts => ["Same_IP:9200"]
index => "logstash-prod-accesslog-%{+YYYY.MM.dd}"
}
stdout { codec => rubydebug }
}
pwd
/etc/logstash/conf.d
cat tasks_engine.conf
input {
file {
path => "/var/log/task_engine.csv"
start_position => "beginning"
tags => [ "task" ]
}
}
filter {
csv {
separator => ";"
columns => ["process_id" , "task_id" , "status" , "created_iso" , "created" , "week" , "year" , "updated_iso" , "user" , "project" , "region" , "process" , "hostname" , "error_message" , "extended_task_name"]
}
}