Dot replace is not working

Hi Support,

I have below configuration for index.

PUT dottest
{
"settings":{
"index" : {
"number_of_shards" : 1,
"number_of_replicas" : 0
},
"analysis": {
"analyzer": {
"splchar_analyzer": {
"tokenizer": "standard",
"char_filter": [
"spl_char_filter"
],
"filter": [
"lowercase"
]
}
},
"char_filter": {
"spl_char_filter": {
"type": "pattern_replace",
"pattern": "\.",
"replacement": " "
}
}
}
},
"mappings": {
"properties": {
"text": {
"type": "text",
"analyzer": "splchar_analyzer"
}
}
}
}

Its works while check by using below request.

POST dottest/_analyze
{
"analyzer": "splchar_analyzer",
"text": """This is something 152045.525"""
}

I got below token, which is fine for me.

{
"tokens" : [
{
"token" : "this",
"start_offset" : 0,
"end_offset" : 4,
"type" : "",
"position" : 0
},
{
"token" : "is",
"start_offset" : 5,
"end_offset" : 7,
"type" : "",
"position" : 1
},
{
"token" : "something",
"start_offset" : 8,
"end_offset" : 17,
"type" : "",
"position" : 2
},
{
"token" : "152045",
"start_offset" : 18,
"end_offset" : 24,
"type" : "",
"position" : 3
},
{
"token" : "525",
"start_offset" : 25,
"end_offset" : 28,
"type" : "",
"position" : 4
}
]
}

While document insert via logstash script, it not spiting dot with space.
Here is live index document example:

{
"_index" : "indexname",
"_type" : "_doc",
"_id" : "1278041941",
"_version" : 2,
"found" : true,
"took" : 142,
"term_vectors" : {
"filecontent" : {
"field_statistics" : {
"sum_doc_freq" : 2848226634,
"doc_count" : 18534325,
"sum_ttf" : 6310068234
},
"terms" : {
"5961.20" : {
"term_freq" : 1,
"tokens" : [
{
"position" : 20,
"start_offset" : 145,
"end_offset" : 152
}
]
},
"6119.27" : {
"term_freq" : 1,
"tokens" : [
{
"position" : 23,
"start_offset" : 167,
"end_offset" : 174
}
]
},
"6153.10" : {
"term_freq" : 1,
"tokens" : [
{
"position" : 17,
"start_offset" : 123,
"end_offset" : 130
}
]
},
"6339.60" : {
"term_freq" : 1,
"tokens" : [
{
"position" : 14,
"start_offset" : 101,
"end_offset" : 108
}
]
},
"6819.20" : {
"term_freq" : 1,
"tokens" : [
{
"position" : 11,
"start_offset" : 79,
"end_offset" : 86
}
]
},
"document" : {
"term_freq" : 2,
"tokens" : [
{
"position" : 1,
"start_offset" : 8,
"end_offset" : 16
},
{
"position" : 3,
"start_offset" : 22,
"end_offset" : 30
}
]
},
"for" : {
"term_freq" : 1,
"tokens" : [
{
"position" : 5,
"start_offset" : 34,
"end_offset" : 37
}
]
},
"hrs" : {
"term_freq" : 5,
"tokens" : [
{
"position" : 10,
"start_offset" : 71,
"end_offset" : 74
},
{
"position" : 13,
"start_offset" : 93,
"end_offset" : 96
},
{
"position" : 16,
"start_offset" : 115,
"end_offset" : 118
},
{
"position" : 19,
"start_offset" : 137,
"end_offset" : 140
},
{
"position" : 22,
"start_offset" : 159,
"end_offset" : 162
}
]
},
"is" : {
"term_freq" : 1,
"tokens" : [
{
"position" : 4,
"start_offset" : 31,
"end_offset" : 33
}
]
},
"labor" : {
"term_freq" : 5,
"tokens" : [
{
"position" : 9,
"start_offset" : 65,
"end_offset" : 70
},
{
"position" : 12,
"start_offset" : 87,
"end_offset" : 92
},
{
"position" : 15,
"start_offset" : 109,
"end_offset" : 114
},
{
"position" : 18,
"start_offset" : 131,
"end_offset" : 136
},
{
"position" : 21,
"start_offset" : 153,
"end_offset" : 158
}
]
},
"only" : {
"term_freq" : 1,
"tokens" : [
{
"position" : 8,
"start_offset" : 54,
"end_offset" : 58
}
]
},
"purpose" : {
"term_freq" : 1,
"tokens" : [
{
"position" : 7,
"start_offset" : 46,
"end_offset" : 53
}
]
},
"testing" : {
"term_freq" : 2,
"tokens" : [
{
"position" : 0,
"start_offset" : 0,
"end_offset" : 7
},
{
"position" : 6,
"start_offset" : 38,
"end_offset" : 45
}
]
},
"this" : {
"term_freq" : 1,
"tokens" : [
{
"position" : 2,
"start_offset" : 17,
"end_offset" : 21
}
]
}
}
}
}
}

Highlighted text not spited.

Is there any issue in mapping or in logstash?
Can anyone help me on this issue?

Thanks