Split logstash config into multiple files

I am just learning elasticsearch and I need to know how to correctly split a configuration file into multiple. I'm using the official logstash on docker with ports bound on 9600 and 5044. Originally I had a working single logstash file without conditionals like so:

input {
    beats {
        port => '5044'
    }
}

filter
{
	grok{
		match => {
			"message" => "%{TIMESTAMP_ISO8601:timestamp} \[(?<event_source>[\w\s]+)\]:\[(?<log_type>[\w\s]+)\]:\[(?<id>\d+)\] %{GREEDYDATA:details}"
			"source" => "%{GREEDYDATA}\\%{GREEDYDATA:app}.log"
		}
	}
	mutate{
		convert => { "id" => "integer" }
	}
	date {
		match => [ "timestamp", "ISO8601" ]
		locale => en
		remove_field => "timestamp"
	}
}


output
{
	elasticsearch {
		hosts => ["http://elastic:9200"]
		index => "logstash-supportworks"
	}

}

When I wanted to add metricbeat I decided to split that configuration into a new file. So I ended up with 3 files:

__input.conf

input {
    beats {
        port => '5044'
    }
}

metric.conf

# for testing I'm adding no filters just to see what the data looks like

output {
  if ['@metadata']['beat'] == 'metricbeat' {
    elasticsearch {
        hosts => ["http://elastic:9200"]
        index => "%{[@metadata][beat]}-%{[@metadata][version]}" 
    }
  }
}

supportworks.conf

filter
{
	if ["source"] =~ /Supportwork Server/ {
		grok{
			match => {
				"message" => "%{TIMESTAMP_ISO8601:timestamp} \[(?<event_source>[\w\s]+)\]:\[(?<log_type>[\w\s]+)\]:\[(?<id>\d+)\] %{GREEDYDATA:details}"
				"source" => "%{GREEDYDATA}\\%{GREEDYDATA:app}.log"
			}
		}
		mutate{
			convert => { "id" => "integer" }
		}
		date {
			match => [ "timestamp", "ISO8601" ]
			locale => en
			remove_field => "timestamp"
		}
	}
}


output
{
	if ["source"] =~ /Supportwork Server/ {
		elasticsearch {
			hosts => ["http://elastic:9200"]
			index => "logstash-supportworks"
		}
	}

}

Now no data is being sent to the ES instance. I have verified that filebeat at least is running and publishing messages, so I'd expect to at least see that much going to ES. Here's a published message from my server running filebeat

2019-03-06T09:16:44.634-0800    DEBUG   [publish]       pipeline/processor.go:308       Publish event: {
  "@timestamp": "2019-03-06T17:16:44.634Z",
  "@metadata": {
    "beat": "filebeat",
    "type": "doc",
    "version": "6.6.1"
  },
  "source": "C:\\Program Files (x86)\\Hornbill\\Supportworks Server\\log\\swserver.log",
  "offset": 4773212,
  "log": {
    "file": {
      "path": "C:\\Program Files (x86)\\Hornbill\\Supportworks Server\\log\\swserver.log"
    }
  },
  "message": "2019-03-06 09:16:42 [COMMS]:[INFO ]:[4924] Helpdesk API (5005) Socket error while idle - 10053",
  "prospector": {
    "type": "log"
  },
  "input": {
    "type": "log"
  },
  "beat": {
    "name": "WIN-22VRRIEO8LM",
    "hostname": "WIN-22VRRIEO8LM",
    "version": "6.6.1"
  },
  "host": {
    "name": "WIN-22VRRIEO8LM",
    "architecture": "x86_64",
    "os": {
      "platform": "windows",
      "version": "6.3",
      "family": "windows",
      "name": "Windows Server 2012 R2 Standard",
      "build": "9600.0"
    },
    "id": "e5887ac2-6fbf-45ef-998d-e40437066f56"
  }
}

I managed to get this working by using the mutate plugin to replace back slashes in the path with forward slashes. Seems like the regex match was getting hung up on the backslashes. I also removed the quotes in the field accessors.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.