Hi All
I am a ElasticSearch noob... I am trying to upload data from ServiceNow to ElasticSearch... I am facing some issues with the data and the conf file
I tried to take assistance from @Gauti who assisted me with the below configration after I was unable to work using the conf file i had created.
Execution Command
c:\ELK\logstash-7.9.1\bin>logstash -f C:\ELK\00_GIC\ServiceNow\Version1\SNOW1_2.
conf > C:\ELK\00_GIC\ServiceNow\Version1\SNOW_Output_1_2.txt
I get the warning as below:
C:/ELK/logstash-7.9.1/vendor/bundle/jruby/2.5.0/gems/rufus-scheduler-3.0.9/lib/r
ufus/scheduler/cronline.rb:77: warning: constant ::Fixnum is deprecated
SNOW.conf
input {
http_poller {
urls => {
snowinc => {
url => "https://dev53396.service-now.com/incident_list.do?JSONv2&incident_list.do?JSONv2&display_value=True&sysparm_exclude_reference_link=True&sysparm_fields=resolved_by%2Cstate%2Ccmdb_ci%2Cpriority%2Ccaller_id%2Cshort_description%2Cassignment_group%2Cassigned_to%2Cseverity%2Clocation%2Cnumber&sysparm_limit=10sysparm_view=json_view"
user => "admin"
password => "tZRJnmUac79D"
headers => {Accept => "application/json"}
}
}
request_timeout => 60
metadata_target => "http_poller_metadata"
schedule => { cron => "* * * * * UTC"}
codec => "json"
}
}
filter
{
json {source => "records" }
split{ field => ["records"] }
}
output {
elasticsearch {
hosts => ["localhost:9200"]
index => "incidentsnow"
action=>update
document_id => "%{[result][number]}"
doc_as_upsert =>true
}
stdout { codec => rubydebug }
}
If I enter the URL in the browser, I am able to see the correct set of records.
Sample
{
"records":[
{
"parent":"",
"made_sla":"true",
"caused_by":"",
"watch_list":"",
"upon_reject":"cancel",
"sys_updated_on":"2016-12-14 02:46:44",
"child_incidents":"0",
"hold_reason":"",
"task_effective_number":"INC0000060",
"approval_history":"",
"number":"INC0000060",
"resolved_by":"5137153cc611227c000bbd1bd8cd2007",
"sys_updated_by":"employee",
"opened_by":"681ccaf9c0a8016400b98a06818d57c7",
"user_input":"",
"sys_created_on":"2016-12-12 15:19:57",
"sys_domain":"global",
"state":"7",
"route_reason":"",
"sys_created_by":"employee",
"knowledge":"false",
"order":"",
"__status":"success",
"calendar_stc":"102197",
"closed_at":"2016-12-14 02:46:44",
"cmdb_ci":"109562a3c611227500a7b7ff98cc0dc7",
"delivery_plan":"",
"contract":"",
"impact":"2",
"active":"false",
"work_notes_list":"",
"business_service":"27d32778c0a8000b00db970eeaa60f16",
"priority":"3",
"sys_domain_path":"\/",
"rfc":"",
"time_worked":"",
"expected_start":"",
"opened_at":"2016-12-12 15:19:57",
"business_duration":"1970-01-01 08:00:00",
"group_list":"",
"work_end":"",
"caller_id":"681ccaf9c0a8016400b98a06818d57c7",
"reopened_time":"",
"resolved_at":"2016-12-13 21:43:14",
"approval_set":"",
"subcategory":"email",
"work_notes":"",
"universal_request":"",
"short_description":"Unable to connect to email",
"close_code":"Solved (Permanently)",
"correlation_display":"",
"delivery_task":"",
"work_start":"",
"assignment_group":"287ebd7da9fe198100f92cc8d1d2154e",
"additional_assignee_list":"",
"business_stc":"28800",
"description":"I am unable to connect to the email server. It appears to be down.",
"calendar_duration":"1970-01-02 04:23:17",
"close_notes":"This incident is resolved.",
"notify":"1",
"service_offering":"",
"sys_class_name":"incident",
"closed_by":"681ccaf9c0a8016400b98a06818d57c7",
"follow_up":"",
"parent_incident":"",
"sys_id":"1c741bd70b2322007518478d83673af3",
"contact_type":"self-service",
"reopened_by":"",
"incident_state":"7",
"urgency":"2",
"problem_id":"",
"company":"31bea3d53790200044e0bfc8bcbe5dec",
"reassignment_count":"2",
"activity_due":"2016-12-13 01:26:36",
"assigned_to":"5137153cc611227c000bbd1bd8cd2007",
"severity":"3",
"comments":"",
"approval":"not requested",
"sla_due":"",
"comments_and_work_notes":"",
"due_date":"",
"sys_mod_count":"15",
"reopen_count":"0",
"sys_tags":"",
"escalation":"0",
"upon_approval":"proceed",
"correlation_id":"",
"location":"",
"category":"inquiry"
}]
}
I see an index created, but the the data that gets stored in ElasticSearch is as below: -
Error/Data displayed in Kibana Discover
http_request_failure.error:Connection refused: connect http_request_failure.name:snowinc http_request_failure.backtrace: - http_request_failure.request.method:get http_request_failure.request.url:https://dev53396.service-now.com/incident_list.do?JSONv2&incident_list.do?JSONv2&display_value=True&sysparm_exclude_reference_link=True&sysparm_fields=resolved_by%2Cstate%2Ccmdb_ci%2Cpriority%2Ccaller_id%2Cshort_description%2Cassignment_group%2Cassigned_to%2Cseverity%2Clocation%2Cnumber&sysparm_limit=10sysparm_view=json_view
The Error i get in the Output is as below: -
Error.log
Sending Logstash logs to c:/ELK/logstash-7.9.1/logs which is now configured via log4j2.properties
[2020-12-16T13:15:53,087][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"7.9.1", "jruby.version"=>"jruby 9.2.13.0 (2.5.7) 2020-08-03 9a89c94bcc Java HotSpot(TM) 64-Bit Server VM 25.271-b09 on 1.8.0_271-b09 +indy +jit [mswin32-x86_64]"}
[2020-12-16T13:15:53,337][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
[2020-12-16T13:15:55,431][INFO ][org.reflections.Reflections] Reflections took 47 ms to scan 1 urls, producing 22 keys and 45 values
[2020-12-16T13:15:58,337][INFO ][logstash.outputs.elasticsearch][main] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://localhost:9200/]}}
[2020-12-16T13:15:58,556][WARN ][logstash.outputs.elasticsearch][main] Restored connection to ES instance {:url=>"http://localhost:9200/"}
[2020-12-16T13:15:58,603][INFO ][logstash.outputs.elasticsearch][main] ES Output version determined {:es_version=>7}
[2020-12-16T13:15:58,619][WARN ][logstash.outputs.elasticsearch][main] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>7}
[2020-12-16T13:15:58,681][INFO ][logstash.outputs.elasticsearch][main] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//localhost:9200"]}
[2020-12-16T13:15:58,744][INFO ][logstash.outputs.elasticsearch][main] Using a default mapping template {:es_version=>7, :ecs_compatibility=>:disabled}
[2020-12-16T13:15:58,837][INFO ][logstash.javapipeline ][main] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>8, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50, "pipeline.max_inflight"=>1000, "pipeline.sources"=>["C:/ELK/00_GIC/ServiceNow/Version1/SNOW1_2.conf"], :thread=>"#<Thread:0x1e99609 run>"}
[2020-12-16T13:15:58,869][INFO ][logstash.outputs.elasticsearch][main] Attempting to install template {:manage_template=>{"index_patterns"=>"logstash-*", "version"=>60001, "settings"=>{"index.refresh_interval"=>"5s", "number_of_shards"=>1}, "mappings"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"*", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}
[2020-12-16T13:15:59,728][INFO ][logstash.javapipeline ][main] Pipeline Java execution initialization time {"seconds"=>0.89}
[2020-12-16T13:15:59,759][INFO ][logstash.inputs.http_poller][main] Registering http_poller Input {:type=>nil, :schedule=>{"cron"=>"* * * * * UTC"}, :timeout=>nil}
[2020-12-16T13:15:59,790][INFO ][logstash.javapipeline ][main] Pipeline started {"pipeline.id"=>"main"}
[2020-12-16T13:15:59,853][INFO ][logstash.agent ] Pipelines running {:count=>1, :running_pipelines=>[:main], :non_running_pipelines=>[]}
[2020-12-16T13:16:00,290][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2020-12-16T13:17:01,572][WARN ][logstash.filters.split ][main][2f0609164e1dfab3ac26fbbca582fb37a8b76cc2241a7949a2160df1b390eb5c] Only String and Array types are splittable. field:result is of type = NilClass
{#### KEEPS REPEATING
"@version" => "1",
"@timestamp" => 2020-12-16T07:47:01.416Z,
"http_poller_metadata" => {
"host" => "Server1",
"request" => {
"auth" => {
"pass" => "tZRJnmUac79D",
"eager" => true,
"user" => "admin"
},
"headers" => {
"Accept" => "application/json"
},
"method" => "get",
"url" => "https://dev53396.service-now.com/incident_list.do?JSONv2&incident_list.do?JSONv2&display_value=True&sysparm_exclude_reference_link=True&sysparm_fields=resolved_by%2Cstate%2Ccmdb_ci%2Cpriority%2Ccaller_id%2Cshort_description%2Cassignment_group%2Cassigned_to%2Cseverity%2Clocation%2Cnumber&sysparm_limit=10sysparm_view=json_view"
},
"runtime_seconds" => nil,
"name" => "snowinc"
},
"tags" => [
[0] "_http_request_failure",
[1] "_split_type_failure"
],
"http_request_failure" => {
"runtime_seconds" => 1.156,
"request" => {
"auth" => {
"pass" => "tZRJnmUac79D",
"eager" => true,
"user" => "admin"
},
"headers" => {
"Accept" => "application/json"
},
"method" => "get",
"url" => "https://dev53396.service-now.com/incident_list.do?JSONv2&incident_list.do?JSONv2&display_value=True&sysparm_exclude_reference_link=True&sysparm_fields=resolved_by%2Cstate%2Ccmdb_ci%2Cpriority%2Ccaller_id%2Cshort_description%2Cassignment_group%2Cassigned_to%2Cseverity%2Clocation%2Cnumber&sysparm_limit=10sysparm_view=json_view"
},
"backtrace" => nil,
"error" => "Connection refused: connect",
"name" => "snowinc"
}
}**#### KEEPS REPEATING**
The area marked as #### KEEPS REPEATING
Need some assistance here
Thanks