Merging two indices in logstash based on the same field

hi all,

I am using following config file:

input {
jdbc {
    jdbc_driver_library => "F:\driver\sqljdbc_6.0\enu\jre8\sqljdbc42.jar"
    jdbc_driver_class => "com.microsoft.sqlserver.jdbc.SQLServerDriver"
    jdbc_connection_string => "jdbc:sqlserver://local:3333;databaseName=Total"
    jdbc_user => "****"
    jdbc_password => "****"
	schedule => "0 10-23/1 * * *"
    statement => "
	DECLARE @DDate1  CHAR(10)
   select @DDate1=REPLACE(MAX(Date),'/','') from Total.dbo.Total_Control
   select [BaseDate_Accept_Setel]
      ,[Financial_Date]
      ,[mcycle_no] from dbo.vw_Total where (BaseDate_Accept_Setel=@DDate1) AND (BaseDate_Accept_Setel> :sql_last_value) 
	"
use_column_value => "true"
tracking_column => "basedate_accept_setel"
}

}
filter {
mutate {
  add_field => {
   "basedate" => "%{basedate_accept_setel}"
   
  }
  convert => {
  "basedate" => "string"
  }
}
ruby {
            code => "
            event.set('Myear', event.get('basedate')[0..3])
            event.set('Mmonth', event.get('basedate')[4..5])
            event.set('Mday', event.get('basedate')[6..7])
            "
    }
	mutate {
	add_field => {
		"mp_date" => "%{Myear}/%{Mmonth}/%{Mday}"
	}
	}
	
jdbc_streaming {
 jdbc_driver_library => "F:\driver\sqljdbc_6.0\enu\jre8\sqljdbc42.jar"
    jdbc_driver_class => "com.microsoft.sqlserver.jdbc.SQLServerDriver"
    jdbc_connection_string => "jdbc:sqlserver://local:3333;databaseName=Total"
    jdbc_user => "****"
    jdbc_password => "****"
    parameters => {"mine" => "mp_date"}
    statement => "
	DECLARE @ddate2  CHAR(10)
	set @ddate2=:mine
SELECT Total_OnLine.dbo.mp2md(@ddate2) as gdate"
target => "gdate"
}
mutate { replace => { "gdate" => "%{[gdate][0][gdate]}" } }
    mutate { gsub => [ "gdate", "/", "-" ] }
elasticsearch {
hosts => ["localhost"]
index => "disp_%{gdate}"
query => '{"query" : {"match" : {"cycleno":"%{mcycle_no}"}}}'
fields => {
   "ps" => "mp"
   "cycleno" => "mycyc"
    }
    }
    }

output {
  elasticsearch { 
    hosts => ["http://192.168.170.153:9200"]
    index => "total_%{gdate}"
    user => "logstash_internal25"
    password => "x-pack-test-password"
 }
  stdout { codec => rubydebug }
}

but following error has been found:

[2019-03-06T12:53:06,641][WARN ][logstash.filters.elasticsearch] 
Failed to query elasticsearch for previous event 
{:index=>"disp_%{gdate}", :error=>"[400] {\"error\": 
{\"root_cause\": 
[{\"type\":\"query_shard_exception\",\"reason\":\"Failed to parse 
query [{\\\"query\\\" : {\\\"match\\\" : {\\\"cycleno\\\" :  "0002128604\\\"}}}]\",\"index_uuid\":\"hLKV1GtGS0iMX1ZDeYpMeQ\",\" 
index\":\"disp_2018-11- 
24\"}],\"type\":\"search_phase_execution_exception\",\"reason\":\"all 
shards failed\",\"phase\":\"query\",\"grouped\":true,\"failed_shards\":[{\"shard\":0,\"index\":\"disp_2018-11-24\",\"node\":\"oveLWBrcQK6VQQaOFGoNSQ\",\"reason\":{\"type\":\"query_shard_exception\",\"reason\":\"Failed to parse query [{\\\"query\\\" : {\\\"match\\\" : {\\\"cycleno\\\" : \\\"0002128604\\\"}}}]\",\"index_uuid\":\"hLKV1GtGS0iMX1ZDeYpMeQ\",\"index\":\"disp_2018-11-24\",\"caused_by\":{\"type\":\"parse_exception\",\"reason\":\"Cannot parse '{\\\"query\\\" : {\\\"match\\\" : {\\\"cycleno\\\" : \\\"0002128604\\\"}}}': Encountered \\\" <RANGE_GOOP> \\\": \\\"\\\" at line 1, column 9.\\r\\nWas expecting:\\r\\n    \\\"TO\\\" ...\\r\\n    \",\"caused_by\":{\"type\":\"parse_exception\",\"reason\":\"Encountered \\\" <RANGE_GOOP> \\\": \\\"\\\" at line 1, column 9.\\r\\nWas expecting:\\r\\n    \\\"TO\\\" ...\\r\\n    \"}}}}]},\"status\":400}"}

actually, a field named as "cycleno" is in "disp_" index and a field named as "mcycle_no" is in jdbc input, I want to have a query so that if "cycleno=mcycle_no", add "mp" and "mycyc" fields to the index "total_" where fields "ps" and "cycleno" are in "disp_*" index. any advice will be so appreciated

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.