Hello!
I encountered an error when I used logstash to transfer kafka data to google bigquery
[ERROR][logstash.outputs.googlebigquery][main][30729410306cb4d98635a59cbf171cea3b769fd734b29b17e925785d4edac5ba] Error uploading data. {:exception=>com.google.cloud.bigquery.BigQueryException: Request payload size exceeds the limit: 10485760 bytes.}
this is my config
output {
36 if[database] == "t8891" {
37 google_bigquery {
38 project_id => "newcar8891"
39 dataset => "logstash"
40 csv_schema => "original:STRING,timestamp:TIMESTAMP,id:STRING,topic:STRING,consumer_group:STRING,partition:STRING,offset:STRING,key:STRING,database:STRING"
41 json_key_file => "/home/shurui/bin/newcar8891-maxwell.json"
42 error_directory => "/opt/module/bqerror"
43 table_prefix => "logstash_t8891"
44 date_pattern => "%Y-%m"
45 batch_size => 6000
46 flush_interval_secs => 10
47 batch_size_bytes => 6000000
48 }
49 }