I'm trying to send logstash data to BigQuery with this plugin:
The files are being written in tmp_files folder, but I'm seeing no data in Big Query and I see no error in Big Query console.
This is my logstash conf file:
input
{
http
{
host => "0.0.0.0"
port => 31311
}
}
filter
{
grok {
match => { "id" => "%{URIPARAM:id}?" }
}
kv
{
field_split => "&?"
source => "[headers][request_uri]"
}
}
output
{
google_bigquery
{
codec => "plain"
project_id => "..."
dataset => "logs"
csv_schema => "message:STRING,version:INTEGER,timestamp:TIMESTAMP,type:STRING,host:STRING,path:STRING"
key_path => "abc.p12"
service_account => "123-compute@developer.gserviceaccount.com"
temp_directory => "/tmp/logstash-bq"
temp_file_prefix => "logstash_bq"
date_pattern => "%Y-%m-%dT%H:00"
flush_interval_secs => 2
uploader_interval_secs => 15
deleter_interval_secs => 15
}
stdout { codec => rubydebug }
}
What am I missing? And how to fix?