Error 0.19.6 - Compress Field


(Gustavo Maia) #1

When i using the compress field i have this problem.

[2012-06-25 14:53:31,003][DEBUG][action.search.type ]
[Abomination] [209] Failed to execute fetch phase
java.lang.IndexOutOfBoundsException: Index: 1872067, Size: 22
at java.util.ArrayList.rangeCheck(ArrayList.java:571)
at java.util.ArrayList.get(ArrayList.java:349)
at org.apache.lucene.index.FieldInfos.fieldInfo(FieldInfos.java:255)
at org.apache.lucene.index.FieldsReader.doc(FieldsReader.java:238)
at org.apache.lucene.index.SegmentReader.document(SegmentReader.java:492)
at org.apache.lucene.index.DirectoryReader.document(DirectoryReader.java:568)
at org.apache.lucene.search.IndexSearcher.doc(IndexSearcher.java:264)
at org.elasticsearch.search.fetch.FetchPhase.loadDocument(FetchPhase.java:288)
at org.elasticsearch.search.fetch.FetchPhase.execute(FetchPhase.java:158)
at org.elasticsearch.search.SearchService.executeFetchPhase(SearchService.java:430)
at org.elasticsearch.search.action.SearchServiceTransportAction.sendExecuteFetch(SearchServiceTransportAction.java:344)
at org.elasticsearch.action.search.type.TransportSearchQueryThenFetchAction$AsyncAction.executeFetch(TransportSearchQueryThenFetchAction.java:149)
at org.elasticsearch.action.search.type.TransportSearchQueryThenFetchAction$AsyncAction$2.run(TransportSearchQueryThenFetchAction.java:136)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1110)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:603)

Obs: When i disable compress field, i did not see nay error.

My Setting

{
"diarioindex" : {
"settings" : {
"index.analysis.analyzer.documentofacetanalyzer.tokenizer" : "whitespace",
"index.analysis.filter.jus_stop.type" : "stop",
"index.store.type" : "niofs",
"index.analysis.filter.jus_stop.enable_position_increments" : "false",
"index.analysis.filter.jus_word_delimiter.type_table_path" :
"/opt/elasticsearch/config/delimiter.txt",
"index.number_of_replicas" : "1",
"index.analysis.filter.jus_word_delimiter.catenate_numbers" : "true",
"index.analysis.filter.jus_word_delimiter.preserve_original" : "true",
"index.store.compress.stored" : "true",
"index.analysis.filter.jus_word_delimiter.catenate_words" : "true",
"index.analysis.filter.jus_word_delimiter.split_on_numerics" : "true",
"index.analysis.filter.jus_word_delimiter.catenate_all" : "true",
"index.analysis.filter.jus_stop.ignore_case" : "true",
"index.analysis.analyzer.documentochildanalyzer.tokenizer" : "whitespace",
"index.number_of_shards" : "10",
"index.analysis.filter.jus_stop.lang" : "brazilian",
"index.analysis.analyzer.default.filter.3" : "lowercase",
"index.analysis.analyzer.default.filter.4" : "jus_stop",
"index.analysis.analyzer.default.tokenizer" : "whitespace",
"index.analysis.filter.jus_word_delimiter.type" : "word_delimiter",
"index.analysis.analyzer.default.filter.1" : "standard",
"index.analysis.analyzer.default.filter.2" : "jus_word_delimiter",
"index.analysis.filter.my_synonym.synonyms_path" :
"/opt/elasticsearch/config/synonyms_atos.txt",
"index.analysis.analyzer.default.filter.0" : "accentFilter",
"index.analysis.filter.my_synonym.type" : "synonym",
"index.fs.memory.enabled" : "true",
"index.analysis.filter.jus_stop.stopwords_path" :
"/opt/elasticsearch/config/stopwords.txt",
"index.version.created" : "190699"
}
}
}

--
Gustavo Maia


(Shay Banon) #2

Can you gist a recreation?

On Mon, Jun 25, 2012 at 7:58 PM, Gustavo Maia gustavobbmaia@gmail.comwrote:

When i using the compress field i have this problem.

[2012-06-25 14:53:31,003][DEBUG][action.search.type ]
[Abomination] [209] Failed to execute fetch phase
java.lang.IndexOutOfBoundsException: Index: 1872067, Size: 22
at java.util.ArrayList.rangeCheck(ArrayList.java:571)
at java.util.ArrayList.get(ArrayList.java:349)
at org.apache.lucene.index.FieldInfos.fieldInfo(FieldInfos.java:255)
at org.apache.lucene.index.FieldsReader.doc(FieldsReader.java:238)
at
org.apache.lucene.index.SegmentReader.document(SegmentReader.java:492)
at
org.apache.lucene.index.DirectoryReader.document(DirectoryReader.java:568)
at
org.apache.lucene.search.IndexSearcher.doc(IndexSearcher.java:264)
at
org.elasticsearch.search.fetch.FetchPhase.loadDocument(FetchPhase.java:288)
at
org.elasticsearch.search.fetch.FetchPhase.execute(FetchPhase.java:158)
at
org.elasticsearch.search.SearchService.executeFetchPhase(SearchService.java:430)
at
org.elasticsearch.search.action.SearchServiceTransportAction.sendExecuteFetch(SearchServiceTransportAction.java:344)
at
org.elasticsearch.action.search.type.TransportSearchQueryThenFetchAction$AsyncAction.executeFetch(TransportSearchQueryThenFetchAction.java:149)
at
org.elasticsearch.action.search.type.TransportSearchQueryThenFetchAction$AsyncAction$2.run(TransportSearchQueryThenFetchAction.java:136)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1110)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:603)

Obs: When i disable compress field, i did not see nay error.

My Setting

{
"diarioindex" : {
"settings" : {
"index.analysis.analyzer.documentofacetanalyzer.tokenizer" :
"whitespace",
"index.analysis.filter.jus_stop.type" : "stop",
"index.store.type" : "niofs",
"index.analysis.filter.jus_stop.enable_position_increments" : "false",
"index.analysis.filter.jus_word_delimiter.type_table_path" :
"/opt/elasticsearch/config/delimiter.txt",
"index.number_of_replicas" : "1",
"index.analysis.filter.jus_word_delimiter.catenate_numbers" : "true",
"index.analysis.filter.jus_word_delimiter.preserve_original" : "true",
"index.store.compress.stored" : "true",
"index.analysis.filter.jus_word_delimiter.catenate_words" : "true",
"index.analysis.filter.jus_word_delimiter.split_on_numerics" : "true",
"index.analysis.filter.jus_word_delimiter.catenate_all" : "true",
"index.analysis.filter.jus_stop.ignore_case" : "true",
"index.analysis.analyzer.documentochildanalyzer.tokenizer" :
"whitespace",
"index.number_of_shards" : "10",
"index.analysis.filter.jus_stop.lang" : "brazilian",
"index.analysis.analyzer.default.filter.3" : "lowercase",
"index.analysis.analyzer.default.filter.4" : "jus_stop",
"index.analysis.analyzer.default.tokenizer" : "whitespace",
"index.analysis.filter.jus_word_delimiter.type" : "word_delimiter",
"index.analysis.analyzer.default.filter.1" : "standard",
"index.analysis.analyzer.default.filter.2" : "jus_word_delimiter",
"index.analysis.filter.my_synonym.synonyms_path" :
"/opt/elasticsearch/config/synonyms_atos.txt",
"index.analysis.analyzer.default.filter.0" : "accentFilter",
"index.analysis.filter.my_synonym.type" : "synonym",
"index.fs.memory.enabled" : "true",
"index.analysis.filter.jus_stop.stopwords_path" :
"/opt/elasticsearch/config/stopwords.txt",
"index.version.created" : "190699"
}
}
}

--
Gustavo Maia


(Gustavo Maia) #3

I disabled _source can it be this problem?

2012/6/25 Shay Banon kimchy@gmail.com:

Can you gist a recreation?

On Mon, Jun 25, 2012 at 7:58 PM, Gustavo Maia gustavobbmaia@gmail.com
wrote:

When i using the compress field i have this problem.

[2012-06-25 14:53:31,003][DEBUG][action.search.type ]
[Abomination] [209] Failed to execute fetch phase
java.lang.IndexOutOfBoundsException: Index: 1872067, Size: 22
at java.util.ArrayList.rangeCheck(ArrayList.java:571)
at java.util.ArrayList.get(ArrayList.java:349)
at
org.apache.lucene.index.FieldInfos.fieldInfo(FieldInfos.java:255)
at org.apache.lucene.index.FieldsReader.doc(FieldsReader.java:238)
at
org.apache.lucene.index.SegmentReader.document(SegmentReader.java:492)
at
org.apache.lucene.index.DirectoryReader.document(DirectoryReader.java:568)
at
org.apache.lucene.search.IndexSearcher.doc(IndexSearcher.java:264)
at
org.elasticsearch.search.fetch.FetchPhase.loadDocument(FetchPhase.java:288)
at
org.elasticsearch.search.fetch.FetchPhase.execute(FetchPhase.java:158)
at
org.elasticsearch.search.SearchService.executeFetchPhase(SearchService.java:430)
at
org.elasticsearch.search.action.SearchServiceTransportAction.sendExecuteFetch(SearchServiceTransportAction.java:344)
at
org.elasticsearch.action.search.type.TransportSearchQueryThenFetchAction$AsyncAction.executeFetch(TransportSearchQueryThenFetchAction.java:149)
at
org.elasticsearch.action.search.type.TransportSearchQueryThenFetchAction$AsyncAction$2.run(TransportSearchQueryThenFetchAction.java:136)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1110)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:603)

Obs: When i disable compress field, i did not see nay error.

My Setting

{
"diarioindex" : {
"settings" : {
"index.analysis.analyzer.documentofacetanalyzer.tokenizer" :
"whitespace",
"index.analysis.filter.jus_stop.type" : "stop",
"index.store.type" : "niofs",
"index.analysis.filter.jus_stop.enable_position_increments" :
"false",
"index.analysis.filter.jus_word_delimiter.type_table_path" :
"/opt/elasticsearch/config/delimiter.txt",
"index.number_of_replicas" : "1",
"index.analysis.filter.jus_word_delimiter.catenate_numbers" : "true",
"index.analysis.filter.jus_word_delimiter.preserve_original" :
"true",
"index.store.compress.stored" : "true",
"index.analysis.filter.jus_word_delimiter.catenate_words" : "true",
"index.analysis.filter.jus_word_delimiter.split_on_numerics" :
"true",
"index.analysis.filter.jus_word_delimiter.catenate_all" : "true",
"index.analysis.filter.jus_stop.ignore_case" : "true",
"index.analysis.analyzer.documentochildanalyzer.tokenizer" :
"whitespace",
"index.number_of_shards" : "10",
"index.analysis.filter.jus_stop.lang" : "brazilian",
"index.analysis.analyzer.default.filter.3" : "lowercase",
"index.analysis.analyzer.default.filter.4" : "jus_stop",
"index.analysis.analyzer.default.tokenizer" : "whitespace",
"index.analysis.filter.jus_word_delimiter.type" : "word_delimiter",
"index.analysis.analyzer.default.filter.1" : "standard",
"index.analysis.analyzer.default.filter.2" : "jus_word_delimiter",
"index.analysis.filter.my_synonym.synonyms_path" :
"/opt/elasticsearch/config/synonyms_atos.txt",
"index.analysis.analyzer.default.filter.0" : "accentFilter",
"index.analysis.filter.my_synonym.type" : "synonym",
"index.fs.memory.enabled" : "true",
"index.analysis.filter.jus_stop.stopwords_path" :
"/opt/elasticsearch/config/stopwords.txt",
"index.version.created" : "190699"
}
}
}

--
Gustavo Maia

--
Gustavo Maia


(Shay Banon) #4

And do you store anything else?

On Mon, Jun 25, 2012 at 8:14 PM, Gustavo Maia gustavobbmaia@gmail.comwrote:

I disabled _source can it be this problem?

2012/6/25 Shay Banon kimchy@gmail.com:

Can you gist a recreation?

On Mon, Jun 25, 2012 at 7:58 PM, Gustavo Maia gustavobbmaia@gmail.com
wrote:

When i using the compress field i have this problem.

[2012-06-25 14:53:31,003][DEBUG][action.search.type ]
[Abomination] [209] Failed to execute fetch phase
java.lang.IndexOutOfBoundsException: Index: 1872067, Size: 22
at java.util.ArrayList.rangeCheck(ArrayList.java:571)
at java.util.ArrayList.get(ArrayList.java:349)
at
org.apache.lucene.index.FieldInfos.fieldInfo(FieldInfos.java:255)
at
org.apache.lucene.index.FieldsReader.doc(FieldsReader.java:238)

   at

org.apache.lucene.index.SegmentReader.document(SegmentReader.java:492)
at

org.apache.lucene.index.DirectoryReader.document(DirectoryReader.java:568)

   at

org.apache.lucene.search.IndexSearcher.doc(IndexSearcher.java:264)
at

org.elasticsearch.search.fetch.FetchPhase.loadDocument(FetchPhase.java:288)

   at

org.elasticsearch.search.fetch.FetchPhase.execute(FetchPhase.java:158)
at

org.elasticsearch.search.SearchService.executeFetchPhase(SearchService.java:430)

   at

org.elasticsearch.search.action.SearchServiceTransportAction.sendExecuteFetch(SearchServiceTransportAction.java:344)

   at

org.elasticsearch.action.search.type.TransportSearchQueryThenFetchAction$AsyncAction.executeFetch(TransportSearchQueryThenFetchAction.java:149)

   at

org.elasticsearch.action.search.type.TransportSearchQueryThenFetchAction$AsyncAction$2.run(TransportSearchQueryThenFetchAction.java:136)

   at

java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1110)

   at

java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:603)

Obs: When i disable compress field, i did not see nay error.

My Setting

{
"diarioindex" : {
"settings" : {
"index.analysis.analyzer.documentofacetanalyzer.tokenizer" :
"whitespace",
"index.analysis.filter.jus_stop.type" : "stop",
"index.store.type" : "niofs",
"index.analysis.filter.jus_stop.enable_position_increments" :
"false",
"index.analysis.filter.jus_word_delimiter.type_table_path" :
"/opt/elasticsearch/config/delimiter.txt",
"index.number_of_replicas" : "1",
"index.analysis.filter.jus_word_delimiter.catenate_numbers" :
"true",

 "index.analysis.filter.jus_word_delimiter.preserve_original" :

"true",
"index.store.compress.stored" : "true",
"index.analysis.filter.jus_word_delimiter.catenate_words" : "true",
"index.analysis.filter.jus_word_delimiter.split_on_numerics" :
"true",
"index.analysis.filter.jus_word_delimiter.catenate_all" : "true",
"index.analysis.filter.jus_stop.ignore_case" : "true",
"index.analysis.analyzer.documentochildanalyzer.tokenizer" :
"whitespace",
"index.number_of_shards" : "10",
"index.analysis.filter.jus_stop.lang" : "brazilian",
"index.analysis.analyzer.default.filter.3" : "lowercase",
"index.analysis.analyzer.default.filter.4" : "jus_stop",
"index.analysis.analyzer.default.tokenizer" : "whitespace",
"index.analysis.filter.jus_word_delimiter.type" : "word_delimiter",
"index.analysis.analyzer.default.filter.1" : "standard",
"index.analysis.analyzer.default.filter.2" : "jus_word_delimiter",
"index.analysis.filter.my_synonym.synonyms_path" :
"/opt/elasticsearch/config/synonyms_atos.txt",
"index.analysis.analyzer.default.filter.0" : "accentFilter",
"index.analysis.filter.my_synonym.type" : "synonym",
"index.fs.memory.enabled" : "true",
"index.analysis.filter.jus_stop.stopwords_path" :
"/opt/elasticsearch/config/stopwords.txt",
"index.version.created" : "190699"
}
}
}

--
Gustavo Maia

--
Gustavo Maia


(Shay Banon) #5

Even if not though, it wouldn't matter, you shouldn't get this failure. Is
there a chance for a recreations? You probably don't need to have all the
analysis configuration, just the basic compression and mapping, index
sample data, and execute the search that causes the problem.

On Mon, Jun 25, 2012 at 8:17 PM, Shay Banon kimchy@gmail.com wrote:

And do you store anything else?

On Mon, Jun 25, 2012 at 8:14 PM, Gustavo Maia gustavobbmaia@gmail.comwrote:

I disabled _source can it be this problem?

2012/6/25 Shay Banon kimchy@gmail.com:

Can you gist a recreation?

On Mon, Jun 25, 2012 at 7:58 PM, Gustavo Maia gustavobbmaia@gmail.com
wrote:

When i using the compress field i have this problem.

[2012-06-25 14:53:31,003][DEBUG][action.search.type ]
[Abomination] [209] Failed to execute fetch phase
java.lang.IndexOutOfBoundsException: Index: 1872067, Size: 22
at java.util.ArrayList.rangeCheck(ArrayList.java:571)
at java.util.ArrayList.get(ArrayList.java:349)
at
org.apache.lucene.index.FieldInfos.fieldInfo(FieldInfos.java:255)
at
org.apache.lucene.index.FieldsReader.doc(FieldsReader.java:238)

   at

org.apache.lucene.index.SegmentReader.document(SegmentReader.java:492)
at

org.apache.lucene.index.DirectoryReader.document(DirectoryReader.java:568)

   at

org.apache.lucene.search.IndexSearcher.doc(IndexSearcher.java:264)
at

org.elasticsearch.search.fetch.FetchPhase.loadDocument(FetchPhase.java:288)

   at

org.elasticsearch.search.fetch.FetchPhase.execute(FetchPhase.java:158)
at

org.elasticsearch.search.SearchService.executeFetchPhase(SearchService.java:430)

   at

org.elasticsearch.search.action.SearchServiceTransportAction.sendExecuteFetch(SearchServiceTransportAction.java:344)

   at

org.elasticsearch.action.search.type.TransportSearchQueryThenFetchAction$AsyncAction.executeFetch(TransportSearchQueryThenFetchAction.java:149)

   at

org.elasticsearch.action.search.type.TransportSearchQueryThenFetchAction$AsyncAction$2.run(TransportSearchQueryThenFetchAction.java:136)

   at

java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1110)

   at

java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:603)

Obs: When i disable compress field, i did not see nay error.

My Setting

{
"diarioindex" : {
"settings" : {
"index.analysis.analyzer.documentofacetanalyzer.tokenizer" :
"whitespace",
"index.analysis.filter.jus_stop.type" : "stop",
"index.store.type" : "niofs",
"index.analysis.filter.jus_stop.enable_position_increments" :
"false",
"index.analysis.filter.jus_word_delimiter.type_table_path" :
"/opt/elasticsearch/config/delimiter.txt",
"index.number_of_replicas" : "1",
"index.analysis.filter.jus_word_delimiter.catenate_numbers" :
"true",

 "index.analysis.filter.jus_word_delimiter.preserve_original" :

"true",
"index.store.compress.stored" : "true",
"index.analysis.filter.jus_word_delimiter.catenate_words" :
"true",

 "index.analysis.filter.jus_word_delimiter.split_on_numerics" :

"true",
"index.analysis.filter.jus_word_delimiter.catenate_all" : "true",
"index.analysis.filter.jus_stop.ignore_case" : "true",
"index.analysis.analyzer.documentochildanalyzer.tokenizer" :
"whitespace",
"index.number_of_shards" : "10",
"index.analysis.filter.jus_stop.lang" : "brazilian",
"index.analysis.analyzer.default.filter.3" : "lowercase",
"index.analysis.analyzer.default.filter.4" : "jus_stop",
"index.analysis.analyzer.default.tokenizer" : "whitespace",
"index.analysis.filter.jus_word_delimiter.type" :
"word_delimiter",

 "index.analysis.analyzer.default.filter.1" : "standard",
 "index.analysis.analyzer.default.filter.2" : "jus_word_delimiter",
 "index.analysis.filter.my_synonym.synonyms_path" :

"/opt/elasticsearch/config/synonyms_atos.txt",
"index.analysis.analyzer.default.filter.0" : "accentFilter",
"index.analysis.filter.my_synonym.type" : "synonym",
"index.fs.memory.enabled" : "true",
"index.analysis.filter.jus_stop.stopwords_path" :
"/opt/elasticsearch/config/stopwords.txt",
"index.version.created" : "190699"
}
}
}

--
Gustavo Maia

--
Gustavo Maia


(Gustavo Maia) #6

Yes, I have this problem in some documents, not in all.

2012/6/25 Shay Banon kimchy@gmail.com:

And do you store anything else?

On Mon, Jun 25, 2012 at 8:14 PM, Gustavo Maia gustavobbmaia@gmail.com
wrote:

I disabled _source can it be this problem?

2012/6/25 Shay Banon kimchy@gmail.com:

Can you gist a recreation?

On Mon, Jun 25, 2012 at 7:58 PM, Gustavo Maia gustavobbmaia@gmail.com
wrote:

When i using the compress field i have this problem.

[2012-06-25 14:53:31,003][DEBUG][action.search.type ]
[Abomination] [209] Failed to execute fetch phase
java.lang.IndexOutOfBoundsException: Index: 1872067, Size: 22
at java.util.ArrayList.rangeCheck(ArrayList.java:571)
at java.util.ArrayList.get(ArrayList.java:349)
at
org.apache.lucene.index.FieldInfos.fieldInfo(FieldInfos.java:255)
at
org.apache.lucene.index.FieldsReader.doc(FieldsReader.java:238)
at
org.apache.lucene.index.SegmentReader.document(SegmentReader.java:492)
at

org.apache.lucene.index.DirectoryReader.document(DirectoryReader.java:568)
at
org.apache.lucene.search.IndexSearcher.doc(IndexSearcher.java:264)
at

org.elasticsearch.search.fetch.FetchPhase.loadDocument(FetchPhase.java:288)
at
org.elasticsearch.search.fetch.FetchPhase.execute(FetchPhase.java:158)
at

org.elasticsearch.search.SearchService.executeFetchPhase(SearchService.java:430)
at

org.elasticsearch.search.action.SearchServiceTransportAction.sendExecuteFetch(SearchServiceTransportAction.java:344)
at

org.elasticsearch.action.search.type.TransportSearchQueryThenFetchAction$AsyncAction.executeFetch(TransportSearchQueryThenFetchAction.java:149)
at

org.elasticsearch.action.search.type.TransportSearchQueryThenFetchAction$AsyncAction$2.run(TransportSearchQueryThenFetchAction.java:136)
at

java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1110)
at

java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:603)

Obs: When i disable compress field, i did not see nay error.

My Setting

{
"diarioindex" : {
"settings" : {
"index.analysis.analyzer.documentofacetanalyzer.tokenizer" :
"whitespace",
"index.analysis.filter.jus_stop.type" : "stop",
"index.store.type" : "niofs",
"index.analysis.filter.jus_stop.enable_position_increments" :
"false",
"index.analysis.filter.jus_word_delimiter.type_table_path" :
"/opt/elasticsearch/config/delimiter.txt",
"index.number_of_replicas" : "1",
"index.analysis.filter.jus_word_delimiter.catenate_numbers" :
"true",
"index.analysis.filter.jus_word_delimiter.preserve_original" :
"true",
"index.store.compress.stored" : "true",
"index.analysis.filter.jus_word_delimiter.catenate_words" :
"true",
"index.analysis.filter.jus_word_delimiter.split_on_numerics" :
"true",
"index.analysis.filter.jus_word_delimiter.catenate_all" : "true",
"index.analysis.filter.jus_stop.ignore_case" : "true",
"index.analysis.analyzer.documentochildanalyzer.tokenizer" :
"whitespace",
"index.number_of_shards" : "10",
"index.analysis.filter.jus_stop.lang" : "brazilian",
"index.analysis.analyzer.default.filter.3" : "lowercase",
"index.analysis.analyzer.default.filter.4" : "jus_stop",
"index.analysis.analyzer.default.tokenizer" : "whitespace",
"index.analysis.filter.jus_word_delimiter.type" :
"word_delimiter",
"index.analysis.analyzer.default.filter.1" : "standard",
"index.analysis.analyzer.default.filter.2" : "jus_word_delimiter",
"index.analysis.filter.my_synonym.synonyms_path" :
"/opt/elasticsearch/config/synonyms_atos.txt",
"index.analysis.analyzer.default.filter.0" : "accentFilter",
"index.analysis.filter.my_synonym.type" : "synonym",
"index.fs.memory.enabled" : "true",
"index.analysis.filter.jus_stop.stopwords_path" :
"/opt/elasticsearch/config/stopwords.txt",
"index.version.created" : "190699"
}
}
}

--
Gustavo Maia

--
Gustavo Maia

--
Gustavo Maia


(Gustavo Maia) #7

Ok. I will try it.

I have some problem to execute it, because i use my API.

I try generate json files.

2012/6/25 Shay Banon kimchy@gmail.com:

Even if not though, it wouldn't matter, you shouldn't get this failure. Is
there a chance for a recreations? You probably don't need to have all the
analysis configuration, just the basic compression and mapping, index sample
data, and execute the search that causes the problem.

On Mon, Jun 25, 2012 at 8:17 PM, Shay Banon kimchy@gmail.com wrote:

And do you store anything else?

On Mon, Jun 25, 2012 at 8:14 PM, Gustavo Maia gustavobbmaia@gmail.com
wrote:

I disabled _source can it be this problem?

2012/6/25 Shay Banon kimchy@gmail.com:

Can you gist a recreation?

On Mon, Jun 25, 2012 at 7:58 PM, Gustavo Maia gustavobbmaia@gmail.com
wrote:

When i using the compress field i have this problem.

[2012-06-25 14:53:31,003][DEBUG][action.search.type ]
[Abomination] [209] Failed to execute fetch phase
java.lang.IndexOutOfBoundsException: Index: 1872067, Size: 22
at java.util.ArrayList.rangeCheck(ArrayList.java:571)
at java.util.ArrayList.get(ArrayList.java:349)
at
org.apache.lucene.index.FieldInfos.fieldInfo(FieldInfos.java:255)
at
org.apache.lucene.index.FieldsReader.doc(FieldsReader.java:238)
at
org.apache.lucene.index.SegmentReader.document(SegmentReader.java:492)
at

org.apache.lucene.index.DirectoryReader.document(DirectoryReader.java:568)
at
org.apache.lucene.search.IndexSearcher.doc(IndexSearcher.java:264)
at

org.elasticsearch.search.fetch.FetchPhase.loadDocument(FetchPhase.java:288)
at
org.elasticsearch.search.fetch.FetchPhase.execute(FetchPhase.java:158)
at

org.elasticsearch.search.SearchService.executeFetchPhase(SearchService.java:430)
at

org.elasticsearch.search.action.SearchServiceTransportAction.sendExecuteFetch(SearchServiceTransportAction.java:344)
at

org.elasticsearch.action.search.type.TransportSearchQueryThenFetchAction$AsyncAction.executeFetch(TransportSearchQueryThenFetchAction.java:149)
at

org.elasticsearch.action.search.type.TransportSearchQueryThenFetchAction$AsyncAction$2.run(TransportSearchQueryThenFetchAction.java:136)
at

java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1110)
at

java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:603)

Obs: When i disable compress field, i did not see nay error.

My Setting

{
"diarioindex" : {
"settings" : {
"index.analysis.analyzer.documentofacetanalyzer.tokenizer" :
"whitespace",
"index.analysis.filter.jus_stop.type" : "stop",
"index.store.type" : "niofs",
"index.analysis.filter.jus_stop.enable_position_increments" :
"false",
"index.analysis.filter.jus_word_delimiter.type_table_path" :
"/opt/elasticsearch/config/delimiter.txt",
"index.number_of_replicas" : "1",
"index.analysis.filter.jus_word_delimiter.catenate_numbers" :
"true",
"index.analysis.filter.jus_word_delimiter.preserve_original" :
"true",
"index.store.compress.stored" : "true",
"index.analysis.filter.jus_word_delimiter.catenate_words" :
"true",
"index.analysis.filter.jus_word_delimiter.split_on_numerics" :
"true",
"index.analysis.filter.jus_word_delimiter.catenate_all" : "true",
"index.analysis.filter.jus_stop.ignore_case" : "true",
"index.analysis.analyzer.documentochildanalyzer.tokenizer" :
"whitespace",
"index.number_of_shards" : "10",
"index.analysis.filter.jus_stop.lang" : "brazilian",
"index.analysis.analyzer.default.filter.3" : "lowercase",
"index.analysis.analyzer.default.filter.4" : "jus_stop",
"index.analysis.analyzer.default.tokenizer" : "whitespace",
"index.analysis.filter.jus_word_delimiter.type" :
"word_delimiter",
"index.analysis.analyzer.default.filter.1" : "standard",
"index.analysis.analyzer.default.filter.2" :
"jus_word_delimiter",
"index.analysis.filter.my_synonym.synonyms_path" :
"/opt/elasticsearch/config/synonyms_atos.txt",
"index.analysis.analyzer.default.filter.0" : "accentFilter",
"index.analysis.filter.my_synonym.type" : "synonym",
"index.fs.memory.enabled" : "true",
"index.analysis.filter.jus_stop.stopwords_path" :
"/opt/elasticsearch/config/stopwords.txt",
"index.version.created" : "190699"
}
}
}

--
Gustavo Maia

--
Gustavo Maia

--
Gustavo Maia


(system) #8