Logstash filter plugin install No `java.exe' executable found on PATH

I want to get the fingerprint plugin for logstash on an airgapped windows system. For this I need to download and install it on a system, which has internet access and continue from there? Like in this shown. https://www.elastic.co/guide/en/logstash/current/offline-plugins.html

I unzipped logstash on a windows system, which has internet connection. I am want to run the command and it is getting me this result back: No java.exe executable found on path. Am I doing something wrong? Do I miss something? Or is there another way, to get this into the airgapped system?

C:\logstash-8.0.1\bin>logstash-plugin install logstash-filter-fingerprint
"Using bundled JDK: ."
No `java.exe' executable found on PATH.

From my understanding, logstash provides its own JDK?

Thanks for help

The fingerprint plugin is already bundled with logstash, there is no need to install it, did you try to use it and got any error?

Yes, i tried to use the function described in the blog for GDPR. https://www.elastic.co/de/blog/gdpr-personal-data-pseudonymization-part-1 Basically the example.

My conf is:

# The # character at the beginning of a line indicates a comment. Use
# comments to describe your configuration.
input {
  beats {
    port => 5044
  }
}
# The filter part of this file is commented out to indicate that it is
# optional.
filter {
	#mutate {
	#	remove_field => ["agent", "log", "input", "ecs", "tags"]
	#}
	# add fields with data for sourcename and sourceip from existing data host
	mutate {
		add_field => {
			"sourceName" => "%{[host][name]}"
			"sourceIP" => "%{[host][ip]}"
		}
	}
	mutate {
		remove_field => ["host"]
	}
	# normalize logs with grok and break on match
	grok {
		break_on_match => true
		match => {
			"message" => [
				"^%{DATA:processtype},%{SPACE}(?<eventtimestamp>%{YEAR}.%{MONTHNUM}.%{MONTHDAY} %{TIME}),%{SPACE}%{WORD:typeevent},(%{SPACE}%{LOGLEVEL:loglevel},)?%{SPACE}%{DATA:msg}'(?<username>%{WORD}.%{WORD})'%{GREEDYDATA:msg1}$",
				"^%{DATA:processtype},%{SPACE}(?<eventtimestamp>%{YEAR}.%{MONTHNUM}.%{MONTHDAY} %{TIME}),%{SPACE}%{WORD:typeevent},(%{SPACE}%{LOGLEVEL:loglevel},)?%{SPACE}%{GREEDYDATA:msg}$",
				"^(?<eventtimestamp>%{YEAR}.%{MONTHNUM}.%{MONTHDAY} %{TIME})%{SPACE}%{WORD:typeevent}%{SPACE}%{GREEDYDATA:msg}$",
				"^(?<eventtimestamp>%{YEAR}.%{MONTHNUM}.%{MONTHDAY} %{TIME})%{SPACE}%{GREEDYDATA:msg}$",
				"^%{GREEDYDATA:msg}$"
			]
		}
	}
	# pseudonymize username field
	if [username] {
		ruby {
        code => "event.set('identities',[])"
		}
        #fingerprint username
        fingerprint {
            method => "SHA256"
            source => ["username"]
            key => "${FINGERPRINT_KEY}"
        }
		
		#create sub document under identities field
        mutate {
			add_field => {
				'[identities][0][key]' => "%{fingerprint}"  
				'[identities][0][value]' => "%{username}" 
			} 
		}
		#overwrite username field with fingerprint
        mutate { 
			replace => { 
				"username" => "%{fingerprint}" 
			} 
		}
		#extract sub documents and yield a new document for each one into the LS pipeline
		ruby {
			code => "event.get(identities).each { |p| e=LogStash::Event.new(p); e.tag(identities); new_event_block.call(e); } "
		}
		#remove fields on original doc
		mutate { 
			remove_field => ["fingerprint","identities"] 
		}
	}
}
output {
	if "identities" in [tags] {
		elasticsearch {
			hosts => ["https://localhost:9200"]
			ssl => true
			cacert => 'C:\PATH\certs\http_ca.crt'
			index => "identities"
			document_id => "%{[key]}"
			action => "create"
			user => user
			password => pw
			failure_type_logging_whitelist => ["version_conflict_engine_exception"]
		}
	}
	if [sourceName] == "sourcename1" or [sourceName] == "sourcename2" {
		elasticsearch {
			hosts => ["https://localhost:9200"]
			ssl => true
			cacert => 'C:\PATH\certs\http_ca.crt'
			index => "name"
			user => user
			password => pw
		}
	}
	else{
	  elasticsearch {
		hosts => ["https://localhost:9200"]
		ssl => true
		cacert => 'C:\PATH\certs\http_ca.crt'
		index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}"
		user => user
		password => pw
	  }
  }
}

I am getting this Errors on my cmd, not knowing FINGERPRINT_KEY

C:\PATH\logstash-8.0.1>bin\logstash.bat -f probe-pipeline2.conf --config.reload.automatic
"Using bundled JDK: ."
OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
Sending Logstash logs to C:/PATH/logstash-8.0.1/logs which is now configured via log4j2.properties
[2022-04-28T18:46:53,075][INFO ][logstash.runner          ] Log4j configuration path used is: C:\PATH\logstash-8.0.1\config\log4j2.properties
[2022-04-28T18:46:53,091][INFO ][logstash.runner          ] Starting Logstash {"logstash.version"=>"8.0.1", "jruby.version"=>"jruby 9.2.20.1 (2.5.8) 2021-11-30 2a2962fbd1 OpenJDK 64-Bit Server VM 11.0.13+8 on 11.0.13+8 +indy +jit [mswin32-x86_64]"}
[2022-04-28T18:46:53,106][INFO ][logstash.runner          ] JVM bootstrap flags: [-Xms1g, -Xmx1g, -XX:+UseConcMarkSweepGC, -XX:CMSInitiatingOccupancyFraction=75, -XX:+UseCMSInitiatingOccupancyOnly, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djruby.compile.invokedynamic=true, -Djruby.jit.threshold=0, -Djruby.regexp.interruptible=true, -XX:+HeapDumpOnOutOfMemoryError, -Djava.security.egd=file:/dev/urandom, -Dlog4j2.isThreadContextMapInheritable=true, --add-opens=java.base/java.security=ALL-UNNAMED, --add-opens=java.base/java.io=ALL-UNNAMED, --add-opens=java.base/java.nio.channels=ALL-UNNAMED, --add-opens=java.base/sun.nio.ch=ALL-UNNAMED, --add-opens=java.management/sun.management=ALL-UNNAMED]
[2022-04-28T18:46:53,184][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
[2022-04-28T18:46:55,999][INFO ][logstash.agent           ] Successfully started Logstash API endpoint {:port=>9600, :ssl_enabled=>false}
[2022-04-28T18:46:58,331][INFO ][org.reflections.Reflections] Reflections took 78 ms to scan 1 urls, producing 120 keys and 417 values
[2022-04-28T18:46:59,882][ERROR][logstash.agent           ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"Java::JavaLang::IllegalStateException", :message=>"Unable to configure plugins: Cannot evaluate `${FINGERPRINT_KEY}`. Replacement variable `FINGERPRINT_KEY` is not defined in a Logstash secret store or an environment entry and there is no default value given.", :backtrace=>["org.logstash.config.ir.CompiledPipeline.<init>(CompiledPipeline.java:120)", "org.logstash.execution.JavaBasePipelineExt.initialize(JavaBasePipelineExt.java:85)", "org.logstash.execution.JavaBasePipelineExt$INVOKER$i$1$0$initialize.call(JavaBasePipelineExt$INVOKER$i$1$0$initialize.gen)", "org.jruby.internal.runtime.methods.JavaMethod$JavaMethodN.call(JavaMethod.java:837)", "org.jruby.ir.runtime.IRRuntimeHelpers.instanceSuper(IRRuntimeHelpers.java:1169)", "org.jruby.ir.runtime.IRRuntimeHelpers.instanceSuperSplatArgs(IRRuntimeHelpers.java:1156)", "org.jruby.ir.targets.InstanceSuperInvokeSite.invoke(InstanceSuperInvokeSite.java:39)", "C_3a_.Users.NAME.ElasticStack.logstash_minus_8_dot_0_dot_1.logstash_minus_core.lib.logstash.java_pipeline.RUBY$method$initialize$0(C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/java_pipeline.rb:47)", "org.jruby.internal.runtime.methods.CompiledIRMethod.call(CompiledIRMethod.java:80)", "org.jruby.internal.runtime.methods.MixedModeIRMethod.call(MixedModeIRMethod.java:70)", "org.jruby.runtime.callsite.CachingCallSite.cacheAndCall(CachingCallSite.java:333)", "org.jruby.runtime.callsite.CachingCallSite.call(CachingCallSite.java:87)", "org.jruby.RubyClass.newInstance(RubyClass.java:939)", "org.jruby.RubyClass$INVOKER$i$newInstance.call(RubyClass$INVOKER$i$newInstance.gen)", "org.jruby.ir.targets.InvokeSite.invoke(InvokeSite.java:207)", "C_3a_.Users.NAME.ElasticStack.logstash_minus_8_dot_0_dot_1.logstash_minus_core.lib.logstash.pipeline_action.create.RUBY$method$execute$0(C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/pipeline_action/create.rb:50)", "C_3a_.Users.NAME.ElasticStack.logstash_minus_8_dot_0_dot_1.logstash_minus_core.lib.logstash.pipeline_action.create.RUBY$method$execute$0$__VARARGS__(C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/pipeline_action/create.rb:49)", "org.jruby.internal.runtime.methods.CompiledIRMethod.call(CompiledIRMethod.java:80)", "org.jruby.internal.runtime.methods.MixedModeIRMethod.call(MixedModeIRMethod.java:70)", "org.jruby.ir.targets.InvokeSite.invoke(InvokeSite.java:207)", "C_3a_.Users.NAME.ElasticStack.logstash_minus_8_dot_0_dot_1.logstash_minus_core.lib.logstash.agent.RUBY$block$converge_state$2(C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/agent.rb:376)", "org.jruby.runtime.CompiledIRBlockBody.callDirect(CompiledIRBlockBody.java:138)", "org.jruby.runtime.IRBlockBody.call(IRBlockBody.java:58)", "org.jruby.runtime.IRBlockBody.call(IRBlockBody.java:52)", "org.jruby.runtime.Block.call(Block.java:139)", "org.jruby.RubyProc.call(RubyProc.java:318)", "org.jruby.internal.runtime.RubyRunnable.run(RubyRunnable.java:105)", "java.base/java.lang.Thread.run(Thread.java:829)"]}
warning: thread "Converge PipelineAction::Create<main>" terminated with exception (report_on_exception is true):
LogStash::Error: Don't know how to handle `Java::JavaLang::IllegalStateException` for `PipelineAction::Create<main>`
          create at org/logstash/execution/ConvergeResultExt.java:135
             add at org/logstash/execution/ConvergeResultExt.java:60
  converge_state at C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/agent.rb:389
[2022-04-28T18:46:59,913][ERROR][logstash.agent           ] An exception happened when converging configuration {:exception=>LogStash::Error, :message=>"Don't know how to handle `Java::JavaLang::IllegalStateException` for `PipelineAction::Create<main>`"}
[2022-04-28T18:47:00,070][FATAL][logstash.runner          ] An unexpected error occurred! {:error=>#<LogStash::Error: Don't know how to handle `Java::JavaLang::IllegalStateException` for `PipelineAction::Create<main>`>, :backtrace=>["org/logstash/execution/ConvergeResultExt.java:135:in `create'", "org/logstash/execution/ConvergeResultExt.java:60:in `add'", "C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/agent.rb:389:in `block in converge_state'"]}
[2022-04-28T18:47:00,085][FATAL][org.logstash.Logstash    ] Logstash stopped processing because of an error: (SystemExit) exit
org.jruby.exceptions.SystemExit: (SystemExit) exit
        at org.jruby.RubyKernel.exit(org/jruby/RubyKernel.java:747) ~[jruby-complete-9.2.20.1.jar:?]
        at org.jruby.RubyKernel.exit(org/jruby/RubyKernel.java:710) ~[jruby-complete-9.2.20.1.jar:?]
        at C_3a_.Users.NAME.ElasticStack.logstash_minus_8_dot_0_dot_1.lib.bootstrap.environment.<main>(C:\PATH\logstash-8.0.1\lib\bootstrap\environment.rb:94) ~[?:?]

Why are you setting the key option at all? If no key is used then the fingerprint filter will create a hash of the fields. A key is only needed if you and a third-party have a shared secret and they need cryptographic authentication that it was you who created the hash (which is then called an HMAC).

Ok, if I uncomment the line with the FINGERPRINT_KEY it gives me now an Error message, that identities is not defined.

C:\PATH\logstash-8.0.1>bin\logstash.bat -f probe-pipeline2.conf --config.reload.automatic
"Using bundled JDK: ."
OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
Sending Logstash logs to C:/PATH/logstash-8.0.1/logs which is now configured via log4j2.properties
[2022-04-29T12:54:42,433][INFO ][logstash.runner          ] Log4j configuration path used is: C:\PATH\logstash-8.0.1\config\log4j2.properties
[2022-04-29T12:54:42,433][INFO ][logstash.runner          ] Starting Logstash {"logstash.version"=>"8.0.1", "jruby.version"=>"jruby 9.2.20.1 (2.5.8) 2021-11-30 2a2962fbd1 OpenJDK 64-Bit Server VM 11.0.13+8 on 11.0.13+8 +indy +jit [mswin32-x86_64]"}
[2022-04-29T12:54:42,449][INFO ][logstash.runner          ] JVM bootstrap flags: [-Xms1g, -Xmx1g, -XX:+UseConcMarkSweepGC, -XX:CMSInitiatingOccupancyFraction=75, -XX:+UseCMSInitiatingOccupancyOnly, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djruby.compile.invokedynamic=true, -Djruby.jit.threshold=0, -Djruby.regexp.interruptible=true, -XX:+HeapDumpOnOutOfMemoryError, -Djava.security.egd=file:/dev/urandom, -Dlog4j2.isThreadContextMapInheritable=true, --add-opens=java.base/java.security=ALL-UNNAMED, --add-opens=java.base/java.io=ALL-UNNAMED, --add-opens=java.base/java.nio.channels=ALL-UNNAMED, --add-opens=java.base/sun.nio.ch=ALL-UNNAMED, --add-opens=java.management/sun.management=ALL-UNNAMED]
[2022-04-29T12:54:42,527][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
[2022-04-29T12:54:44,339][INFO ][logstash.agent           ] Successfully started Logstash API endpoint {:port=>9600, :ssl_enabled=>false}
[2022-04-29T12:54:46,275][INFO ][org.reflections.Reflections] Reflections took 63 ms to scan 1 urls, producing 120 keys and 417 values
[2022-04-29T12:54:47,884][INFO ][logstash.javapipeline    ] Pipeline `main` is configured with `pipeline.ecs_compatibility: v8` setting. All plugins in this pipeline will default to `ecs_compatibility => v8` unless explicitly configured otherwise.
[2022-04-29T12:54:47,933][INFO ][logstash.outputs.elasticsearch][main] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["https://localhost:9200"]}
[2022-04-29T12:54:48,261][INFO ][logstash.outputs.elasticsearch][main] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[https://elastic:xxxxxx@localhost:9200/]}}
[2022-04-29T12:54:48,651][WARN ][logstash.outputs.elasticsearch][main] Restored connection to ES instance {:url=>"https://elastic:xxxxxx@localhost:9200/"}
[2022-04-29T12:54:48,667][INFO ][logstash.outputs.elasticsearch][main] Elasticsearch version determined (8.0.1) {:es_version=>8}
[2022-04-29T12:54:48,667][WARN ][logstash.outputs.elasticsearch][main] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>8}
[2022-04-29T12:54:48,776][INFO ][logstash.outputs.elasticsearch][main] Config is not compliant with data streams. `data_stream => auto` resolved to `false`
[2022-04-29T12:54:48,808][WARN ][logstash.outputs.elasticsearch][main] Elasticsearch Output configured with `ecs_compatibility => v8`, which resolved to an UNRELEASED preview of version 8.0.0 of the Elastic Common Schema. Once ECS v8 and an updated release of this plugin are publicly available, you will need to update this plugin to resolve this warning.
[2022-04-29T12:54:48,808][INFO ][logstash.outputs.elasticsearch][main] Using a default mapping template {:es_version=>8, :ecs_compatibility=>:v8}
[2022-04-29T12:54:48,823][INFO ][logstash.outputs.elasticsearch][main] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["https://localhost:9200"]}
[2022-04-29T12:54:48,901][INFO ][logstash.outputs.elasticsearch][main] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[https://elastic:xxxxxx@localhost:9200/]}}
[2022-04-29T12:54:49,011][WARN ][logstash.outputs.elasticsearch][main] Restored connection to ES instance {:url=>"https://elastic:xxxxxx@localhost:9200/"}
[2022-04-29T12:54:49,011][INFO ][logstash.outputs.elasticsearch][main] Elasticsearch version determined (8.0.1) {:es_version=>8}
[2022-04-29T12:54:49,026][WARN ][logstash.outputs.elasticsearch][main] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>8}
[2022-04-29T12:54:49,069][INFO ][logstash.outputs.elasticsearch][main] Config is not compliant with data streams. `data_stream => auto` resolved to `false`
[2022-04-29T12:54:49,069][INFO ][logstash.outputs.elasticsearch][main] Config is not compliant with data streams. `data_stream => auto` resolved to `false`
[2022-04-29T12:54:49,069][WARN ][logstash.outputs.elasticsearch][main] Elasticsearch Output configured with `ecs_compatibility => v8`, which resolved to an UNRELEASED preview of version 8.0.0 of the Elastic Common Schema. Once ECS v8 and an updated release of this plugin are publicly available, you will need to update this plugin to resolve this warning.
[2022-04-29T12:54:49,069][INFO ][logstash.outputs.elasticsearch][main] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["https://localhost:9200"]}
[2022-04-29T12:54:49,084][INFO ][logstash.outputs.elasticsearch][main] Using a default mapping template {:es_version=>8, :ecs_compatibility=>:v8}
[2022-04-29T12:54:49,100][INFO ][logstash.outputs.elasticsearch][main] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[https://elastic:xxxxxx@localhost:9200/]}}
[2022-04-29T12:54:49,186][WARN ][logstash.outputs.elasticsearch][main] Restored connection to ES instance {:url=>"https://elastic:xxxxxx@localhost:9200/"}
[2022-04-29T12:54:49,200][INFO ][logstash.outputs.elasticsearch][main] Elasticsearch version determined (8.0.1) {:es_version=>8}
[2022-04-29T12:54:49,200][WARN ][logstash.outputs.elasticsearch][main] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>8}
[2022-04-29T12:54:49,216][INFO ][logstash.outputs.elasticsearch][main] Config is not compliant with data streams. `data_stream => auto` resolved to `false`
[2022-04-29T12:54:49,216][INFO ][logstash.outputs.elasticsearch][main] Config is not compliant with data streams. `data_stream => auto` resolved to `false`
[2022-04-29T12:54:49,216][WARN ][logstash.outputs.elasticsearch][main] Elasticsearch Output configured with `ecs_compatibility => v8`, which resolved to an UNRELEASED preview of version 8.0.0 of the Elastic Common Schema. Once ECS v8 and an updated release of this plugin are publicly available, you will need to update this plugin to resolve this warning.
[2022-04-29T12:54:49,231][WARN ][logstash.filters.grok    ][main] ECS v8 support is a preview of the unreleased ECS v8, and uses the v1 patterns. When Version 8 of the Elastic Common Schema becomes available, this plugin will need to be updated
[2022-04-29T12:54:49,247][INFO ][logstash.outputs.elasticsearch][main] Using a default mapping template {:es_version=>8, :ecs_compatibility=>:v8}
[2022-04-29T12:54:49,591][INFO ][logstash.javapipeline    ][main] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>2, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50, "pipeline.max_inflight"=>250, "pipeline.sources"=>["C:/PATH/logstash-8.0.1/probe-pipeline2.conf"], :thread=>"#<Thread:0x117e5c06 run>"}
[2022-04-29T12:54:50,888][INFO ][logstash.javapipeline    ][main] Pipeline Java execution initialization time {"seconds"=>1.2}
[2022-04-29T12:54:50,919][INFO ][logstash.inputs.beats    ][main] Starting input listener {:address=>"0.0.0.0:5044"}
[2022-04-29T12:54:50,934][INFO ][logstash.javapipeline    ][main] Pipeline started {"pipeline.id"=>"main"}
[2022-04-29T12:54:51,013][INFO ][org.logstash.beats.Server][main][HEXANUMBER] Starting server on port: 5044
[2022-04-29T12:54:51,059][INFO ][logstash.agent           ] Pipelines running {:count=>1, :running_pipelines=>[:main], :non_running_pipelines=>[]}
[2022-04-29T12:55:09,504][ERROR][logstash.filters.ruby    ][main][HEXANUMBER] Ruby exception occurred: undefined local variable or method `identities' for #<LogStash::Filters::Ruby:0x27d71f25> {:class=>"NameError", :backtrace=>["(ruby filter code):2:in `block in filter_method'", "C:/PATH/logstash-8.0.1/vendor/bundle/jruby/2.5.0/gems/logstash-filter-ruby-3.1.8/lib/logstash/filters/ruby.rb:96:in `inline_script'", "C:/PATH/logstash-8.0.1/vendor/bundle/jruby/2.5.0/gems/logstash-filter-ruby-3.1.8/lib/logstash/filters/ruby.rb:89:in `filter'", "C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/filters/base.rb:159:in `do_filter'", "C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/filters/base.rb:178:in `block in multi_filter'", "org/jruby/RubyArray.java:1821:in `each'", "C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/filters/base.rb:175:in `multi_filter'", "org/logstash/config/ir/compiler/AbstractFilterDelegatorExt.java:134:in `multi_filter'", "C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/java_pipeline.rb:299:in `block in start_workers'"]}
[2022-04-29T12:55:09,504][ERROR][logstash.filters.ruby    ][main][HEXANUMBER] Ruby exception occurred: undefined local variable or method `identities' for #<LogStash::Filters::Ruby:0x27d71f25> {:class=>"NameError", :backtrace=>["(ruby filter code):2:in `block in filter_method'", "C:/PATH/logstash-8.0.1/vendor/bundle/jruby/2.5.0/gems/logstash-filter-ruby-3.1.8/lib/logstash/filters/ruby.rb:96:in `inline_script'", "C:/PATH/logstash-8.0.1/vendor/bundle/jruby/2.5.0/gems/logstash-filter-ruby-3.1.8/lib/logstash/filters/ruby.rb:89:in `filter'", "C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/filters/base.rb:159:in `do_filter'", "C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/filters/base.rb:178:in `block in multi_filter'", "org/jruby/RubyArray.java:1821:in `each'", "C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/filters/base.rb:175:in `multi_filter'", "org/logstash/config/ir/compiler/AbstractFilterDelegatorExt.java:134:in `multi_filter'", "C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/java_pipeline.rb:299:in `block in start_workers'"]}
[2022-04-29T12:55:10,232][ERROR][logstash.filters.ruby    ][main][HEXANUMBER] Ruby exception occurred: undefined local variable or method `identities' for #<LogStash::Filters::Ruby:0x27d71f25> {:class=>"NameError", :backtrace=>["(ruby filter code):2:in `block in filter_method'", "C:/PATH/logstash-8.0.1/vendor/bundle/jruby/2.5.0/gems/logstash-filter-ruby-3.1.8/lib/logstash/filters/ruby.rb:96:in `inline_script'", "C:/PATH/logstash-8.0.1/vendor/bundle/jruby/2.5.0/gems/logstash-filter-ruby-3.1.8/lib/logstash/filters/ruby.rb:89:in `filter'", "C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/filters/base.rb:159:in `do_filter'", "C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/filters/base.rb:178:in `block in multi_filter'", "org/jruby/RubyArray.java:1821:in `each'", "C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/filters/base.rb:175:in `multi_filter'", "org/logstash/config/ir/compiler/AbstractFilterDelegatorExt.java:134:in `multi_filter'", "C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/java_pipeline.rb:299:in `block in start_workers'"]}
[2022-04-29T12:55:23,428][ERROR][logstash.filters.ruby    ][main][HEXANUMBER] Ruby exception occurred: undefined local variable or method `identities' for #<LogStash::Filters::Ruby:0x27d71f25> {:class=>"NameError", :backtrace=>["(ruby filter code):2:in `block in filter_method'", "C:/PATH/logstash-8.0.1/vendor/bundle/jruby/2.5.0/gems/logstash-filter-ruby-3.1.8/lib/logstash/filters/ruby.rb:96:in `inline_script'", "C:/PATH/logstash-8.0.1/vendor/bundle/jruby/2.5.0/gems/logstash-filter-ruby-3.1.8/lib/logstash/filters/ruby.rb:89:in `filter'", "C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/filters/base.rb:159:in `do_filter'", "C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/filters/base.rb:178:in `block in multi_filter'", "org/jruby/RubyArray.java:1821:in `each'", "C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/filters/base.rb:175:in `multi_filter'", "org/logstash/config/ir/compiler/AbstractFilterDelegatorExt.java:134:in `multi_filter'", "C:/PATH/logstash-8.0.1/logstash-core/lib/logstash/java_pipeline.rb:299:in `block in start_workers'"]}

What am I missing?

There is no variable called "identities", so that e.tag method raises that exception. Perhaps you need to quote it?

Yes with quoting no errors appear. Thanks.

Somehow the username, I want to pseudomyze, does not get a hash value. I expected a hash number for the "key" but it gives me a hardcoded text "%{fingerprint}". Also, it just wrote the once into the identities index. The other data, is written into another index, which would be in the output in my else statement.

What mistake did I do, that I am actually getting not the hash, but the hard coded word %{fingerprint} and writing the newly data into the other index?

My filter function, for pseudonymization:

if [username] {
		ruby {
			code => "event.set('identities',[])"
		}
        #fingerprint username
        fingerprint {
            method => "SHA256"
            source => ["username"]
            #key => "${FINGERPRINT_KEY}"
        }
		
		#create sub document under identities field
        mutate {
			add_field => {
				'[identities][0][key]' => "%{fingerprint}"  
				'[identities][0][value]' => "%{username}" 
			} 
		}
		#overwrite username field with fingerprint
        mutate { 
			replace => { 
				"username" => "%{fingerprint}" 
			} 
		}
		#extract sub documents and yield a new document for each one into the LS pipeline
		ruby {
			code => "event.get('identities').each { |p| e=LogStash::Event.new(p); e.tag('identities'); new_event_block.call(e); } "
		}
		#remove fields on original doc
		mutate { 
			remove_field => ["fingerprint","identities"] 
		}
	}

Output Pipeline: 1st Output for identities, 2nd for normal data based on a custom field and third (where the newly "identities" land) Basically all other identities getting placed into third output, but I want them in the first.

output {
   if "identities" in [tags] {
   	elasticsearch {
   		hosts => ["https://localhost:9200"]
   		ssl => true
   		cacert => PATH
   		index => "identities"
   		document_id => "%{[key]}"
   		action => "create"
   		user => USER
   		password => ***
   		failure_type_logging_whitelist => ["version_conflict_engine_exception"]
   	}
   }
   if [sourceName] == "SERVER01" or [sourceName] == "SERVER02" {
   	elasticsearch {
   		hosts => ["https://localhost:9200"]
   		ssl => true
   		cacert => PATH
   		index => "DATA-INDEX"
   		user => USER
   		password => ***
   	}
   }
   else{
     elasticsearch {
   	hosts => ["https://localhost:9200"]
   	ssl => true
   	cacert => PATH
   	index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}"
   	user => USER
   	password => ***
     }
 }
}

In Kibana I am getting in my identities Index following:

{
  "_index": "identities",
  "_id": "%{fingerprint}",
  "_version": 1,
  "_score": 1,
  "_source": {
    "@timestamp": "2022-05-02T06:39:20.725652400Z",
    "tags": [
      "identities"
    ],
    "key": "%{fingerprint}",
    "value": "VALUENAME",
    "@version": "1"
  },
  "fields": {
    "@timestamp": [
      "2022-05-02T06:39:20.725Z"
    ],
    "value.keyword": [
      "VALUENAME"
    ],
    "tags.keyword": [
      "identities"
    ],
    "@version": [
      "1"
    ],
    "key.keyword": [
      "%{fingerprint}"
    ],
    "@version.keyword": [
      "1"
    ],
    "value": [
      "VALUENAME"
    ],
    "key": [
      "%{fingerprint}"
    ],
    "tags": [
      "identities"
    ]
  }
}

However, I have just this result. The other "identities" are in the third output.

{
  "_index": "%{[@metadata][beat]}-%{[@metadata][version]}-2022.05.02",
  "_id": "DOcQhYABdOHb9VhD8uET",
  "_version": 1,
  "_score": 1,
  "_source": {
    "tags": [
      "identities"
    ],
    "@version": "1",
    "value": "VALUENAME",
    "key": "%{fingerprint}",
    "@timestamp": "2022-05-02T13:58:41.920592800Z"
  },
  "fields": {
    "@timestamp": [
      "2022-05-02T13:58:41.920Z"
    ],
    "value.keyword": [
      "VALUENAME"
    ],
    "tags.keyword": [
      "identities"
    ],
    "@version": [
      "1"
    ],
    "key.keyword": [
      "%{fingerprint}"
    ],
    "@version.keyword": [
      "1"
    ],
    "value": [
      "VALUENAME"
    ],
    "key": [
      "%{fingerprint}"
    ],
    "tags": [
      "identities"
    ]
  }
}

Any ideas? It propably is just a minor mistake, but I do not find it..

If ECS is enabled then the target for the fingerprint filter is [event][hash], not [fingerprint].

Thank you. That is working.
I figured out, that for the first Event, I am getting that into my identities index.
The same usernames, (2nd and following) getting saved into index "%{[@metadata][beat]}-%[@metadata][version]}-Date format" I know it is going into the third output. How am I preventing it? Without canceling the third Output option?

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.