Synonym token filter didn't work properly with Chinese

I'm a Chinese user, and I found sometimes synonym token filter could not deal with Chinese correctly.
Here is my sample code.You can copy and paste into your Sense directly.
Please tell me why and how to resolve this problem.

Elasticsearch version: 2.3.4
Kibana version:4.5.3

# delate if already exists
DELETE my_index

# new index
PUT my_index
{
  "settings": {
    "analysis": {
      "filter": {
        "synonym_filter": {
          "type": "synonym",
          "synonyms": [
            "愉悦,欢乐",
            "游轮,邮轮"
          ]
        }
      },
      "analyzer": {
        "synonym_analyzer_standard": {
          "type": "custom",
          "tokenizer": "standard",
          "filter": [
            "synonym_filter"
          ]
        }
      }
    }
  },
  "mappings": {
    "product": {
      "properties": {
        "name_standard": {
          "type": "string",
          "analyzer": "synonym_analyzer_standard"
        }
      }
    }
  }
}

# put data
PUT my_index/product/1
{
  "name_standard":"邮轮"
}
PUT my_index/product/2
{
  "name_standard":"游轮"
}
PUT my_index/product/3
{
  "name_standard":"欢乐"
}
PUT my_index/product/4
{
  "name_standard":"愉悦"
}

# With standard tokenizer ,synonym "邮轮,游轮" is ok.
# These two query will return the ducoments which id is 1 and 2
GET my_index/product/_search
{
  "query": {
    "match": {
      "name_standard": "邮轮"
    }
  }
}
GET my_index/product/_search
{
  "query": {
    "match": {
      "name_standard": "游轮"
    }
  }
}

# With standard tokenizer ,The problem comes when synonym "愉悦,欢乐" and I am puzzled.
# This query just return the ducoment which is 4
GET my_index/product/_search
{
  "query": {
    "match": {
      "name_standard": "愉悦"
    }
  }
}
# This query just return the ducoment which is 3
GET my_index/product/_search
{
  "query": {
    "match": {
      "name_standard": "欢乐"
    }
  }
}`Preformatted text`