I am trying to index my data into elasticsearch cluster through logstash. Currently my data is in aws postgres and i have to move data from postgres to elasticsearch.
Below is my logstash config file:
input {
jdbc {
jdbc_connection_string => "jdbc:postgresql://dtacsqldb-ccms-postgresql-dtacqual-1a.cpzfa7z6lrue.us-east-1.rds.amazonaws.com/dtacqual"
jdbc_user => "dtacuser"
jdbc_password => "xyz"
jdbc_driver_library => "/solr-index/Logstash/logstash-7.12.1/logstash-core/lib/jars/postgresql-jdbc.jar"
jdbc_driver_class => "org.postgresql.Driver"
schedule => "* * * * *" # cronjob schedule format (see "Helpful Links")
#jdbc_paging_enabled => "true"
#jdbc_page_size => "300"
statement => "SELECT DISTINCT PYID AS "ID" FROM U90CCMWT.Marketing LIMIT 5"
}
}
output {
# used to output the values in the terminal (DEBUGGING)
# once everything is working, comment out this line
stdout { codec => "json" }
# used to output the values into elasticsearch
elasticsearch {
hosts => ["http://ip_of_node:9200"]
index => "vikings_es"
#document_id => "{101}"
#doc_as_upsert => true # upserts documents (e.g. if the document does not exist, creates a new record)
}
}
i am getting below error while running this logstash file for indexing data
Using bundled JDK: /solr-index/Logstash/logstash-7.12.1/jdk
OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
Sending Logstash logs to /solr-index/Logstash/logstash-7.12.1/logs which is now configured via log4j2.properties
[2021-05-20T06:27:41,874][INFO ][logstash.runner ] Log4j configuration path used is: /solr-index/Logstash/logstash-7.12.1/config/log4j2.properties
[2021-05-20T06:27:41,886][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"7.12.1", "jruby.version"=>"jruby 9.2.13.0 (2.5.7) 2020-08-03 9a89c94bcc OpenJDK 64-Bit Server VM 11.0.10+9 on 11.0.10+9 +indy +jit [linux-x86_64]"}
[2021-05-20T06:27:42,300][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
[2021-05-20T06:27:42,913][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2021-05-20T06:27:43,152][ERROR][logstash.agent ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:test, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of [ \t\r\n], "#", "{", "}" at line 11, column 41 (byte 582) after input {\n jdbc {\n jdbc_connection_string => "jdbc:postgresql://dtacsqldb-ccms-postgresql-dtacqual-1a.cpzfa7z6lrue.us-east-1.rds.amazonaws.com/dtacqual"\n jdbc_user => "dtacuser"\n jdbc_password => "xyz"\n jdbc_driver_library => "/solr-index/Logstash/logstash-7.12.1/logstash-core/lib/jars/postgresql-jdbc.jar"\n jdbc_driver_class => "org.postgresql.Driver"\n schedule => "* * * * *" # cronjob schedule format (see "Helpful Links")\n\t#jdbc_paging_enabled => "true"\n\t#jdbc_page_size => "300"\n\tstatement => "SELECT DISTINCT PYID AS "", :backtrace=>["/solr-index/Logstash/logstash-7.12.1/logstash-core/lib/logstash/compiler.rb:32:in compile_imperative'", "org/logstash/execution/AbstractPipelineExt.java:184:in
initialize'", "org/logstash/execution/JavaBasePipelineExt.java:69:in initialize'", "/solr-index/Logstash/logstash-7.12.1/logstash-core/lib/logstash/java_pipeline.rb:47:in
initialize'", "/solr-index/Logstash/logstash-7.12.1/logstash-core/lib/logstash/pipeline_action/create.rb:52:in execute'", "/solr-index/Logstash/logstash-7.12.1/logstash-core/lib/logstash/agent.rb:389:in
block in converge_state'"]}
[2021-05-20T06:27:43,257][INFO ][logstash.runner ] Logstash shut down.
[2021-05-20T06:27:43,268][FATAL][org.logstash.Logstash ] Logstash stopped processing because of an error: (SystemExit) exit
org.jruby.exceptions.SystemExit: (SystemExit) exit
at org.jruby.RubyKernel.exit(org/jruby/RubyKernel.java:747) ~[jruby-complete-9.2.13.0.jar:?]
at org.jruby.RubyKernel.exit(org/jruby/RubyKernel.java:710) ~[jruby-complete-9.2.13.0.jar:?]
at solr_minus_index.Logstash.logstash_minus_7_dot_12_dot_1.lib.bootstrap.environment.(/solr-index/Logstash/logstash-7.12.1/lib/bootstrap/environment.rb:89) ~[?:?]
I tried to escape the special character by trying the below queries but none of them worked
"SELECT DISTINCT PYID AS" +""IDD""+ "FROM U90CCMWT.Marketing LIMIT 5"
"SELECT DISTINCT PYID AS ""IDD"" FROM U90CCMWT.Marketing LIMIT 5"
"SELECT DISTINCT PYID AS \""IDD\"" FROM U90CCMWT.Marketing LIMIT 5"
"SELECT DISTINCT PYID AS \"IDD\" FROM U90CCMWT.Marketing LIMIT 5
"SELECT DISTINCT PYID AS IDD FROM U90CCMWT.Marketing LIMIT 5
"SELECT DISTINCT PYID AS "IDD" FROM U90CCMWT.Marketing LIMIT 5
"SELECT DISTINCT PYID AS \t "IDD" \t FROM U90CCMWT.Marketing LIMIT 5
"SELECT DISTINCT PYID AS "ID#" FROM U90CCMWT.Marketing LIMIT 5"
"SELECT DISTINCT PYID AS "ID
" FROM U90CCMWT.Marketing LIMIT 5"
"SELECT DISTINCT PYID AS UPPER(ID)" FROM U90CCMWT.Marketing LIMIT 5"
'SELECT DISTINCT PYID AS "ID" FROM U90CCMWT.Marketing LIMIT 5'
"SELECT DISTINCT PYID AS "ID=>" FROM U90CCMWT.Marketing LIMIT 5"
"SELECT DISTINCT 'PYID' AS ID FROM U90CCMWT.Marketing LIMIT 5"
"SELECT DISTINCT PYID AS ''ID'' FROM U90CCMWT.Marketing LIMIT 5"
Please help me to resolve this issue.Preformatted text