It doesn't works - The expected output remains same and logstash failed to index . I guess I am missing a small portion of something. This is the entire config of my logstash.
input {
http {
port => "8080"
type => "json"
}
}
filter {
ruby {
path => "/usr/share/logstash/filter/removeKeys.rb"
script_params => { keys => [ "archive_url", "assignees_url", "avatar_url", "blobs_url", "branches_url", "clone_url", "collaborators_url", "comments_url", "commits_url", "compare_url", "contents_url", "contributors_url", "deployments_url", "diff_url", "downloads_url", "events_url", "followers_url", "following_url", "forks_url", "gists_url", "git_commits_url", "git_refs_url", "git_tags_url", "git_url", "hooks_url", "html_url", "issue_comment_url", "issue_events_url", "issues_url", "issue_url", "keys_url", "labels_url", "languages_url", "members_url", "merges_url", "milestones_url", "mirror_url", "notifications_url", "organizations_url", "patch_url", "public_members_url", "pull_request_url", "pulls_url", "received_events_url", "releases_url", "repos_url", "review_comments_url", "review_comment_url", "ssh_url", "stargazers_url", "starred_url", "statuses_url", "subscribers_url", "subscriptions_url", "subscription_url", "svn_url", "tags_url", "teams_url", "trees_url", "website_url", "ldap_dn", "gravatar_id", "node_id", "spdx_id", "stargazers_count", "has_wiki", "license", "stargazers", "mirror_url", "x_github_enterprise_host", "http_version", "x_github_enterprise_version", "http_host", "request_path", "http_accept", "request_method", "content_type", "organization.description", "repository.license", "commits" ] }
}
date { match => [ "[comment][created_at]", "ISO8601" ] target => "[comment][created_at]" }
date { match => [ "[comment][updated_at]", "ISO8601" ] target => "[comment][updated_at]" }
date { match => [ "[enterprise][created_at]", "ISO8601" ] target => "[enterprise][created_at]" }
date { match => [ "[issue][closed_at]", "ISO8601" ] target => "[issue][closed_at]" }
date { match => [ "[issue][created_at]", "ISO8601" ] target => "[issue][created_at]" }
date { match => [ "[issue][updated_at]", "ISO8601" ] target => "[issue][updated_at]" }
date { match => [ "[pull_request][base][repo][created_at]", "ISO8601" ] target => "[pull_request][base][repo][created_at]" }
date { match => [ "[pull_request][base][repo][pushed_at]", "ISO8601" ] target => "[pull_request][base][repo][pushed_at]" }
date { match => [ "[pull_request][base][repo][updated_at]", "ISO8601" ] target => "[pull_request][base][repo][updated_at]" }
date { match => [ "[pull_request][closed_at]", "ISO8601" ] target => "[pull_request][closed_at]" }
date { match => [ "[pull_request][created_at]", "ISO8601" ] target => "[pull_request][created_at]" }
date { match => [ "[pull_request][updated_at]", "ISO8601" ] target => "[pull_request][updated_at]" }
date { match => [ "[pull_request][head][repo][created_at]", "ISO8601" ] target => "[pull_request][head][repo][created_at]" }
date { match => [ "[pull_request][head][repo][pushed_at]", "ISO8601" ] target => "[pull_request][head][repo][pushed_at]" }
date { match => [ "[pull_request][head][repo][updated_at]", "ISO8601" ] target => "[pull_request][head][repo][updated_at]" }
date { match => [ "[pull_request][merged_at]", "ISO8601" ] target => "[pull_request][merged_at]" }
date { match => [ "[pull_request][merged_at]", "ISO8601" ] target => "[pull_request][merged_at]" }
mutate {
lowercase => [ "[repository][name]" ]
}
if "push" in [headers][x_github_event] {
ruby {
code => '
t = Time.at(event.get("@timestamp").to_f)
event.set("[repository][created_at]", t.strftime("%Y-%m-%d %H:%M:%s"))
'
}
}
}
output {
stdout { codec => rubydebug { metadata => true } }
kafka {
codec => json
topic_id => "githubevent"
bootstrap_servers => "host:9092"
max_request_size => 104857600
}
}
The error which I recieved from logstash is below.
[2020-11-23T07:24:12,198][WARN ][logstash.outputs.elasticsearch][github-kafka][githudata_elastic] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"for-testing_project_pipeline-push-2020.11.23", :routing=>nil, :_type=>"_doc"}, #<LogStash::Event:0x12cc0b06>], :response=>{"index"=>{"_index"=>"for-testing_project_pipeline-push-2020.11.23", "_type"=>"_doc", "_id"=>"QEL883UBZ9G1Tbi08uzD", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse field [repository.created_at] of type [long] in document with id 'QEL883UBZ9G1Tbi08uzD'. Preview of field's value: '2020-11-23 07:24:1606116249'", "caused_by"=>{"type"=>"illegal_argument_exception", "reason"=>"For input string: \"2020-11-23 07:24:1606116249\""}}}}}
Requesting you to have a look at it.
Regards
TJ