Google BigQuery output plugin

Hello! I have logs with this structure:

{
"syslog_program" : bla bla bla,
"message" : <14>Dec 8 18:45:50 bla bla bla ,
"type" : syslog,
"syslog_message" : bla bla bla
"@timestamp" : 2017-12-08T17:45:50.000Z,
"syslog_hostname" : MOW-NB0036.horsgroup.com,
"port" : 51328,
"syslog_timestamp" : Dec 8 18:45:50,
"@version" : 1,
"host" : localhost,
"ip_address" : 0.0.0.0
"@metdata" : { "ip_address" : 127.0.0.1 }
}

So i need to output this logs to bigquery. I used this code of config file:

input {
tcp {
host => "127.0.0.1"
port => 5000
type => syslog
}
}

filter {
if [type] == "syslog" {
grok {
match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:[%{POSINT:syslog_pid}])?: %{GREEDYDATA:syslog_message}" }
}
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
}
mutate { remove_field => [ "[metdata][ip_address]", "metdata", "@metdata", "[@metdata][ip_adress]" ] }
}
}

output {
google_bigquery {
project_id => "etl-to-bigquery"
dataset => "LOGSTASH"
csv_schema => "syslog_program:STRING,message:STRING,type:STRING,syslog_message:STRING,timestamp:STRING,syslog_hostname:STRING,port:INTEGER,syslog_timestamp:STRING,version:STRING,host:STRING"
key_path => "/var/local/ETL-TO-BIGQUERY-64f6ab42e01e.p12"
key_password => "notasecret"
service_account => "google-cloud-sdk@etl-to-bigquery.iam.gserviceaccount.com"
flush_interval_secs => 2
uploader_interval_secs => 15
deleter_interval_secs => 60
}
}

But all the time i have an error with @metdata field (i tried to remove it but doesn't work):

"message"=>"Error while reading data, error message: JSON parsing error in row starting at position 0: No such field: metdata."}]}

How can i add RECORD field to my csv schema? Any help?

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.