log file
{
"id": "AAA",
"num": "-17.3595",
"num": "-145.494",
"name": "Anaa",
"author": "Anaaauthor",
"place": "Tuamotu-Gambier",
"landmark": "French Polynesia",
"spcid": "12512819",
"target": "Pacific/Midway",
"mob": "",
"type": "plats",
"email": "",
"url": "",
"length": "4646",
"emev": "7",
"isma": "NTGA",
"move": "2",
},
{
"id": "AAA",
"num": "-17.3595",
"num": "-145.494",
"name": "Anaa",
"author": "Anaaauthor",
"place": "Tuamotu-Gambier",
"landmark": "French Polynesia",
"spcid": "12512819",
"target": "Pacific/Midway",
"mob": "",
"type": "plats",
"email": "",
"url": "",
"length": "4646",
"emev": "7",
"isma": "NTGA",
"move": "2",
},
{
"id": "AAA",
"num": "-17.3595",
"num": "-145.494",
"name": "Anaa",
"author": "Anaaauthor",
"place": "Tuamotu-Gambier",
"landmark": "French Polynesia",
"spcid": "12512819",
"target": "Pacific/Midway",
"mob": "",
"type": "plats",
"email": "",
"url": "",
"length": "4646",
"emev": "7",
"isma": "NTGA",
"move": "2",
},
.... and more
filebeat input
type: log
processors:
- add_tags:
tags: [jsonLogs]
- decode_json_fields:
fields: ["message"]
target: "data"
enabled: true # Change to true to enable this input configuration.
paths:
- C:\elk\jenkins\Json* # Paths that should be crawled and fetched. Glob based paths.
multiline.type: pattern
multiline.pattern: '^{'
multiline.negate: true
multiline.match: after
multiline.max_lines: 1000000
logstash input
input {
beats {
port => 5044
codec => "json"
}
}
filter {
if "jsonLogs" in [tags] {
json {
source => "message"
}
split {
field => "message"
}
output {
elasticsearch {
hosts => ["localhost:9200"]
index => "jsonlog"
}
stdout { codec => rubydebug }