Thank you for the answers, I was able to get the key and certificates but now logstash is giving me an error.
[2018-04-11T10:11:40,270][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://elastic:xxxxxx@localhost:9200/]}}
[2018-04-11T10:11:40,271][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://elastic:xxxxxx@localhost:9200/, :path=>"/"}
[2018-04-11T10:11:40,279][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://elastic:xxxxxx@localhost:9200/"}
[2018-04-11T10:11:40,290][INFO ][logstash.outputs.elasticsearch] ES Output version determined {:es_version=>nil}
[2018-04-11T10:11:40,291][WARN ][logstash.outputs.elasticsearch] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>6}
[2018-04-11T10:11:40,291][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>nil}
[2018-04-11T10:11:40,294][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-*", "version"=>60001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"_default_"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"*", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}}
[2018-04-11T10:11:40,304][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//localhost:9200"]}
[2018-04-11T10:11:40,600][INFO ][logstash.pipeline ] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>4, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>5, "pipeline.max_inflight"=>500, :thread=>"#<Thread:0x13d13cf run>"}
[2018-04-11T10:11:40,605][INFO ][logstash.inputs.beats ] Beats inputs: Starting input listener {:address=>"0.0.0.0:5044"}
[2018-04-11T10:11:40,611][ERROR][logstash.pipeline ] Error registering plugin {:pipeline_id=>"main", :plugin=>"<LogStash::Inputs::Beats port=>5044, ssl=>true, ssl_certificate_authorities=>[\"C:/Users/jordyd/Documents/selfhostedsearch/ELK/logstash-6.1.3/cert/ca.crt\"], ssl_certificate=>\"C:/Users/jordyd/Documents/selfhostedsearch/ELK/logstash-6.1.3/cert/instance.crt\", ssl_key=>\"C:/Users/jordyd/Documents/selfhostedsearch/ELK/logstash-6.1.3/cert/instance.key\", ssl_verify_mode=>\"force_peer\", id=>\"c7ccdd57fa15ab8f83753208a1455a3551ac4dc202e4fdc76397396bd58afdb8\", enable_metric=>true, codec=><LogStash::Codecs::Plain id=>\"plain_518dfb31-8772-45dd-8137-8134b19b10a2\", enable_metric=>true, charset=>\"UTF-8\">, host=>\"0.0.0.0\", include_codec_tag=>true, ssl_handshake_timeout=>10000, tls_min_version=>1, tls_max_version=>1.2, cipher_suites=>[\"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384\", \"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384\", \"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256\", \"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256\", \"TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384\", \"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384\", \"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256\", \"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256\"], client_inactivity_timeout=>60, executor_threads=>16>", :error=>"Cipher `TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384` is not available", :thread=>"#<Thread:0x13d13cf run>"}
[2018-04-11T10:11:41,298][ERROR][logstash.pipeline ] Pipeline aborted due to error {:pipeline_id=>"main", :exception=>#<LogStash::ConfigurationError: Cipher `TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384` is not available>, :backtrace=>["C:/Users/jordyd/Documents/selfhostedsearch/ELK/logstash-6.1.3/vendor/bundle/jruby/2.3.0/gems/logstash-input-beats-5.0.6-java/lib/logstash/inputs/beats.rb:170:in `create_server'", "C:/Users/jordyd/Documents/selfhostedsearch/ELK/logstash-6.1.3/vendor/bundle/jruby/2.3.0/gems/logstash-input-beats-5.0.6-java/lib/logstash/inputs/beats.rb:158:in `register'", "C:/Users/jordyd/Documents/selfhostedsearch/ELK/logstash-6.1.3/logstash-core/lib/logstash/pipeline.rb:343:in `register_plugin'", "C:/Users/jordyd/Documents/selfhostedsearch/ELK/logstash-6.1.3/logstash-core/lib/logstash/pipeline.rb:354:in `block in register_plugins'", "org/jruby/RubyArray.java:1734:in `each'", "C:/Users/jordyd/Documents/selfhostedsearch/ELK/logstash-6.1.3/logstash-core/lib/logstash/pipeline.rb:354:in `register_plugins'", "C:/Users/jordyd/Documents/selfhostedsearch/ELK/logstash-6.1.3/logstash-core/lib/logstash/pipeline.rb:510:in `start_inputs'", "C:/Users/jordyd/Documents/selfhostedsearch/ELK/logstash-6.1.3/logstash-core/lib/logstash/pipeline.rb:401:in `start_workers'", "C:/Users/jordyd/Documents/selfhostedsearch/ELK/logstash-6.1.3/logstash-core/lib/logstash/pipeline.rb:288:in `run'", "C:/Users/jordyd/Documents/selfhostedsearch/ELK/logstash-6.1.3/logstash-core/lib/logstash/pipeline.rb:248:in `block in start'"], :thread=>"#<Thread:0x13d13cf run>"}
[2018-04-11T10:11:41,301][ERROR][logstash.agent ] Failed to execute action {:id=>:main, :action_type=>LogStash::ConvergeResult::FailedAction, :message=>"Could not execute action: LogStash::PipelineAction::Create/pipeline_id:main, action_result: false", :backtrace=>nil}
input {
beats {
port => "5044"
ssl => true
ssl_certificate_authorities => ["C:/Users/jordyd/Documents/selfhostedsearch/ELK/logstash-6.1.3/cert/ca.crt"]
ssl_certificate => "C:/Users/jordyd/Documents/selfhostedsearch/ELK/logstash-6.1.3/cert/instance.crt"
ssl_key => "C:/Users/jordyd/Documents/selfhostedsearch/ELK/logstash-6.1.3/cert/instance.key"
ssl_verify_mode => "force_peer"
}
}