Logstash import failed

Hi,

I made a successful import data into ELK, logstash, elastic search kibana, after several days i rerun import i get error:

io/console not supported; tty will not be manipulated
Install the multipart-post gem.
LoadError: no such file to load -- composite_io
require at org/jruby/RubyKernel.java:1072
require at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/polyglot-0.3.5/lib/polyglot.rb:65
(root) at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/faraday-0.9.1/lib/faraday/upload_io.rb:2
require at org/jruby/RubyKernel.java:1072
require at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/polyglot-0.3.5/lib/polyglot.rb:65
(root) at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/faraday-0.9.1/lib/faraday.rb:1
each at org/jruby/RubyArray.java:1613
require_libs at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/faraday-0.9.1/lib/faraday.rb:80
require_libs at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/faraday-0.9.1/lib/faraday.rb:79
Faraday at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/faraday-0.9.1/lib/faraday.rb:240
require at org/jruby/RubyKernel.java:1072
require at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/polyglot-0.3.5/lib/polyglot.rb:65
(root) at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/faraday-0.9.1/lib/faraday.rb:16
require at org/jruby/RubyKernel.java:1072
require at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/polyglot-0.3.5/lib/polyglot.rb:65
(root) at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/elasticsearch-transport-1.0.12/lib/elasticsearch/transport.rb:1
require at org/jruby/RubyKernel.java:1072
require at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/polyglot-0.3.5/lib/polyglot.rb:65
(root) at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/elasticsearch-transport-1.0.12/lib/elasticsearch/transport.rb:5
(root) at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/elasticsearch-1.0.12/lib/elasticsearch.rb:1
map at org/jruby/RubyArray.java:2412
(root) at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/elasticsearch-1.0.12/lib/elasticsearch.rb:3
each at org/jruby/RubyArray.java:1613
(root) at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/logstash-output-elasticsearch-1.0.7-java/lib/logstash/outputs/elastic
search/protocol.rb:1
initialize at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/logstash-output-elasticsearch-1.0.7-java/lib/logstash/outputs/elastic
search/protocol.rb:56
register at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/logstash-output-elasticsearch-1.0.7-java/lib/logstash/outputs/elastic
search.rb:441
register at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/logstash-output-elasticsearch-1.0.7-java/lib/logstash/outputs/elastic
search.rb:437
call at org/jruby/RubyProc.java:271
start_outputs at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/logstash-core-1.5.4-java/lib/logstash/pipeline.rb:164
call at org/jruby/RubyProc.java:271
run at E:/Tools/ELK/logstash-1.5.4/logstash-1.5.4/vendor/bundle/jrub
y/1.9/gems/logstash-core-1.5.4-java/lib/logstash/pipeline.rb:83

I don't know what is wrong no new applications were installed, java

i'm newbie in ELK.

Thank you for support

What does your config look like?
Can you upgrade?

Hi,

This is my config file

input {

file {
type => "dbrows"
path => "E:/PerformanceTestsResults/ct.perf/2.0.99.53967/Jmeter/LoginOWSports/Test_Scenario_50__2016_05_30--12_58__baseline/SqlReports/TableRecordcount.csv"
sincedb_path => "E:/Tools/ELK/logstash-1.5.4/.dbsince_TableRecordcount"
discover_interval => 600
start_position => "beginning"
}

}

filter {

grok {
patterns_dir => "../patterns"
match => [ "message", ""%{WORD:Schema}.%{WORD:TableName}","%{NUMBER:Rows_Before_Test}","%{NUMBER:Rows_After_Test}","%{NUMBER:Difference}""]
match => [ "path", "%{GREEDYDATA}:/%{GREEDYDATA}/%{GREEDYDATA:Environment}/%{GREEDYDATA:BOVersion}/%{GREEDYDATA:TestTool}/%{GREEDYDATA:TestScenario}/%{GREEDYDATA:TestCase}%{GREEDYDATA:Date}--%{GREEDYDATA:Time}%{GREEDYDATA:Comment}/%{GREEDYDATA}"]
}

}

output {
stdout{}
elasticsearch {
host => localhost
protocol => "http"
port => "9200"
index => "rows"
}
}

data in the file:
"TableName","Records Before Test","Records After Test","Difference"
"Uss","2681208","2681208","0"
"Pover","0","0","0"
"PCycle","11246699","11246699","0"
"PTransaction","0","0","0"
"Log","14571968","14571979","11"
"TransactionStep","3817021","3817021","0"
"Action","0","0","0"
"Process","0","0","0"
"Step","15856880","15856891","11"
"Ticket","0","0","0"
"kResult","2544676","2544676","0"

hi,

made an upgrade tu latest version + java 8

then i get

The signal HUP is in use by the JVM and will not work correctly on this platform,

with running simple script.
logstash -e 'input { stdin { } } output { elasticsearch { host => localhost } }'

i remove, and works fine on logstast 1.5.4.

br,Dani

then i get this error;
WARNING: [logstash-PERFCL002-472460-13456] failed to send ping to [[#zen_unicast
_1#][PERFCL002][inet[localhost/127.0.0.1:9200]]]
org.elasticsearch.transport.SendRequestTransportException: [][inet[localhost/127
.0.0.1:9200]][internal:discovery/zen/unicast_gte_1_4]
at org.elasticsearch.transport.TransportService.sendRequest(TransportSer
vice.java:286)
at org.elasticsearch.discovery.zen.ping.unicast.UnicastZenPing.sendPingR
equestTo14NodeWithFallback(UnicastZenPing.java:431)
at org.elasticsearch.discovery.zen.ping.unicast.UnicastZenPing.access$11
00(UnicastZenPing.java:62)
at org.elasticsearch.discovery.zen.ping.unicast.UnicastZenPing$3.run(Uni
castZenPing.java:385)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.
java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor
.java:617)
at java.lang.Thread.run(Thread.java:745)
Caused by: org.elasticsearch.transport.NodeNotConnectedException: [][inet[localh
ost/127.0.0.1:9200]] Node not connected
at org.elasticsearch.transport.netty.NettyTransport.nodeChannel(NettyTra
nsport.java:964)
at org.elasticsearch.transport.netty.NettyTransport.sendRequest(NettyTra
nsport.java:656)
at org.elasticsearch.transport.TransportService.sendRequest(TransportSer
vice.java:276)
... 6 more

←[31mGot error to send bulk of actions: blocked by: [SERVICE_UNAVAILABLE/1/state
not recovered / initialized];[SERVICE_UNAVAILABLE/2/no master]; {:level=>:error
}←[0m
←[33mFailed to flush outgoing items {:outgoing_count=>4, :exception=>"Java::OrgE
lasticsearchClusterBlock::ClusterBlockException", :backtrace=>["org.elasticsearc
h.cluster.block.ClusterBlocks.globalBlockedException(org/elasticsearch/cluster/b
lock/ClusterBlocks.java:151)", "org.elasticsearch.cluster.block.ClusterBlocks.gl

any help?

thank you.

Br,Dani

What does your config look like?

in what location do you need config file

in logstash config files are used to read data from specific folders,

send a folder where should i use config file

please understand i use ELK for a few weeks

Well you defined a config with a file input and an ES output, right?
That is the one we want to see.

This was send in the first answer:
input {

file {
type => "dbrows"
path =>
"E:/PerformanceTestsResults/ct.perf/2.0.99.53967/Jmeter/LoginOWSports/Test_Scenario_50__2016_05_30--12_58__baseline/SqlReports/TableRecordcount.csv"
sincedb_path => "E:/Tools/ELK/logstash-1.5.4/.dbsince_TableRecordcount"
discover_interval => 600
start_position => "beginning"
}

}

filter {

grok {
patterns_dir => "../patterns"
match =>
[ "message",
""%{WORD:Schema}.%{WORD:TableName}","%{NUMBER:Rows_Before_Test}","%{NUMBER:Rows_After_Test}","%{NUMBER:Difference}""]

match => [ "path", 

"%{GREEDYDATA}:/%{GREEDYDATA}/%{GREEDYDATA:Environment}/%{GREEDYDATA:BOVersion}/%{GREEDYDATA:TestTool}/%{GREEDYDATA:TestScenario}/%{GREEDYDATA:TestCase}%{GREEDYDATA:Date}--%{GREEDYDATA:Time}%{GREEDYDATA:Comment}/%{GREEDYDATA}"]
}

output {
stdout{}
elasticsearch {
host => localhost
protocol => "http"
port => "9200"
index => "rows"
}
}

data in the file:
"TableName","Records Before Test","Records After Test","Difference"
"Uss","2681208","2681208","0"
"Pover","0","0","0"
"PCycle","11246699","11246699","0"
"PTransaction","0","0","0"
"Log","14571968","14571979","11"
"TransactionStep","3817021","3817021","0"
"Action","0","0","0"
"Process","0","0","0"
"Step","15856880","15856891","11"
"Ticket","0","0","0"
"kResult","2544676","2544676","0"

You must not be on 2.3, the protocol => "http" option wont work with that.

Now ES returns : error when importing this file:

  at org.elasticsearch.common.netty.util.internal.DeadLockProofWorker$1.ru

(DeadLockProofWorker.java:42)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.
ava:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor
java:617)
at java.lang.Thread.run(Thread.java:745)
2016-06-08 10:47:17,906][WARN ][http.netty ] [Time Bomb] Caught e
ception while handling client http traffic, closing connection [id: 0x6ba10afe,
/127.0.0.1:56060 => /127.0.0.1:9200]
ava.lang.IllegalArgumentException: invalid version format: PERFCL002♀10.17.38.1
9
at org.elasticsearch.common.netty.handler.codec.http.HttpVersion.(
ttpVersion.java:94)
at org.elasticsearch.common.netty.handler.codec.http.HttpVersion.valueOf
HttpVersion.java:62)
at org.elasticsearch.common.netty.handler.codec.http.HttpRequestDecoder.
reateMessage(HttpRequestDecoder.java:75)
at org.elasticsearch.common.netty.handler.codec.http.HttpMessageDecoder.
ecode(HttpMessageDecoder.java:191)
at org.elasticsearch.common.netty.handler.codec.http.HttpMessageDecoder.
ecode(HttpMessageDecoder.java:102)
at org.elasticsearch.common.netty.handler.codec.replay.ReplayingDecoder.
allDecode(ReplayingDecoder.java:500)
at org.elasticsearch.common.netty.handler.codec.replay.ReplayingDecoder.
essageReceived(ReplayingDecoder.java:435)
at org.elasticsearch.common.netty.channel.SimpleChannelUpstreamHandler.h
ndleUpstream(SimpleChannelUpstreamHandler.java:70)
at org.elasticsearch.common.netty.channel.DefaultChannelPipeline.sendUps
ream(DefaultChannelPipeline.java:564)
at org.elasticsearch.common.netty.channel.DefaultChannelPipeline$Default
hannelHandlerContext.sendUpstream(DefaultChannelPipeline.java:791)
at org.elasticsearch.common.netty.OpenChannelsHandler.handleUpstream(Ope
ChannelsHandler.java:74)
at org.elasticsearch.common.netty.channel.DefaultChannelPipeline.sendUps
ream(DefaultChannelPipeline.java:564)
at org.elasticsearch.common.netty.channel.DefaultChannelPipeline.sendUps
ream(DefaultChannelPipeline.java:559)
at org.elasticsearch.common.netty.channel.Channels.fireMessageReceived(C
annels.java:268)
at org.elasticsearch.common.netty.channel.Channels.fireMessageReceived(C
annels.java:255)
at org.elasticsearch.common.netty.channel.socket.nio.NioWorker.read(NioW
rker.java:88)
at org.elasticsearch.common.netty.channel.socket.nio.AbstractNioWorker.p
ocess(AbstractNioWorker.java:108)
at org.elasticsearch.common.netty.channel.socket.nio.AbstractNioSelector
run(AbstractNioSelector.java:337)
at org.elasticsearch.common.netty.channel.socket.nio.AbstractNioWorker.r
n(AbstractNioWorker.java:89)
at org.elasticsearch.common.netty.channel.socket.nio.NioWorker.run(NioWo
ker.java:178)
at org.elasticsearch.common.netty.util.ThreadRenamingRunnable.run(Thread
enamingRunnable.java:108)
at org.elasticsearch.common.netty.util.internal.DeadLockProofWorker$1.ru
(DeadLockProofWorker.java:42)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.
ava:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor
java:617)
at java.lang.Thread.run(Thread.java:745)

this error is returned in lashstash:

←[31mGot error to send bulk of actions: blocked by: [SERVICE_UNAVAILABLE/1/state
not recovered / initialized];[SERVICE_UNAVAILABLE/2/no master]; {:level=>:error
}←[0m
←[33mFailed to flush outgoing items {:outgoing_count=>4, :exception=>"Java::OrgE
lasticsearchClusterBlock::ClusterBlockException", :backtrace=>["org.elasticsearc
h.cluster.block.ClusterBlocks.globalBlockedException(org/elasticsearch/cluster/b
lock/ClusterBlocks.java:151)", "org.elasticsearch.cluster.block.ClusterBlocks.gl
obalBlockedRaiseException(org/elasticsearch/cluster/block/ClusterBlocks.java:141
)", "org.elasticsearch.action.bulk.TransportBulkAction.executeBulk(org/elasticse
arch/action/bulk/TransportBulkAction.java:215)", "org.elasticsearch.action.bulk.
TransportBulkAction.access$000(org/elasticsearch/action/bulk/TransportBulkAction
.java:67)", "org.elasticsearch.action.bulk.TransportBulkAction$1.onFailure(org/e
lasticsearch/action/bulk/TransportBulkAction.java:153)", "org.elasticsearch.acti
on.support.TransportAction$ThreadedActionListener$2.run(org/elasticsearch/action
/support/TransportAction.java:137)", "java.util.concurrent.ThreadPoolExecutor.ru
nWorker(java/util/concurrent/ThreadPoolExecutor.java:1142)", "java.util.concurre
nt.ThreadPoolExecutor$Worker.run(java/util/concurrent/ThreadPoolExecutor.java:61
7)", "java.lang.Thread.run(java/lang/Thread.java:745)"], :level=>:warn}←[0m