[2020-10-10T05:30:55,854][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
[2020-10-10T05:30:57,935][INFO ][org.reflections.Reflections] Reflections took 31 ms to scan 1 urls, producing 22 keys and 45 values
[2020-10-10T05:30:59,051][INFO ][logstash.outputs.elasticsearch][main] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://localhost:9200/]}}
[2020-10-10T05:30:59,327][WARN ][logstash.outputs.elasticsearch][main] Restored connection to ES instance {:url=>"http://localhost:9200/"}
[2020-10-10T05:30:59,382][INFO ][logstash.outputs.elasticsearch][main] ES Output version determined {:es_version=>7}
[2020-10-10T05:30:59,387][WARN ][logstash.outputs.elasticsearch][main] Detected a 6.x and above cluster: the
type event field won't be used to determine the document _type {:es_version=>7}
[2020-10-10T05:30:59,445][INFO ][logstash.outputs.elasticsearch][main] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//localhost:9200"]}
[2020-10-10T05:30:59,564][INFO ][logstash.outputs.elasticsearch][main] Using a default mapping template {:es_version=>7, :ecs_compatibility=>:disabled}
[2020-10-10T05:30:59,650][INFO ][logstash.outputs.elasticsearch][main] Attempting to install template {:manage_template=>{"index_patterns"=>"logstash-*", "version"=>60001, "settings"=>{"index.refresh_interval"=>"5s", "number_of_shards"=>1}, "mappings"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"*", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}
[2020-10-10T05:30:59,724][INFO ][logstash.javapipeline ][main] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>2, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50, "pipeline.max_inflight"=>250, "pipeline.sources"=>["C:/elastic/logstash-7.9.2/bin/sql01.conf"], :thread=>"#<Thread:0x689874ac run>"} [2020-10-10T05:31:00,514][INFO ][logstash.javapipeline ][main] Pipeline Java execution initialization time {"seconds"=>0.78}
[2020-10-10T05:31:00,764][INFO ][logstash.javapipeline ][main] Pipeline started {"pipeline.id"=>"main"}
[2020-10-10T05:31:00,920][INFO ][logstash.agent ] Pipelines running {:count=>1, :running_pipelines=>[:main], :non_running_pipelines=>[]} [2020-10-10T05:31:02,025][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2020-10-10T05:31:02,421][INFO ][logstash.inputs.jdbc ][main][fd942172197591ef32314a3a90c00cd51e0d48d19f5e69854a8ca207a6fa28dc] (0.037660s) SELECT * FROM DBO.incidentes
[2020-10-10T05:31:03,700][INFO ][logstash.runner ] Logstash shut down.
input {
jdbc {
jdbc_connection_string => "jdbc:sqlserver://pserverw:1433;databaseName=TEST01;integratedSecurity=false;"
jdbc_driver_library => "C:/jdbc/sqljdbc_4.2/enu/jre8/sqljdbc42.jar"
jdbc_driver_class => "com.microsoft.sqlserver.jdbc.SQLServerDriver"
jdbc_user => "sa"
jdbc_password => "Enfoque0103200"
statement => "SELECT * FROM incidentes"
}
}
output {
elasticsearch {
hosts => ["localhost:9200"]
index => "sql01"
}
}