Trouble using NEST

I have the following "query" I need to rewrite it using NEST and I can't seem to get it right.

PUT company
{
  "settings": {
    "index": {
      "analysis": {
        "filter": {},
        "analyzer": {
          "keyword_analyzer": {
            "filter": [
              "lowercase",
              "asciifolding",
              "trim"
            ],
            "char_filter": [],
            "type": "custom",
            "tokenizer": "keyword"
          },
          "edge_ngram_analyzer": {
            "filter": [
              "lowercase"
            ],
            "tokenizer": "edge_ngram_tokenizer"
          },
          "edge_ngram_search_analyzer": {
            "tokenizer": "lowercase"
          }
        },
        "tokenizer": {
          "edge_ngram_tokenizer": {
            "type": "edge_ngram",
            "min_gram": 2,
            "max_gram": 5,
            "token_chars": [
              "letter"
            ]
          }
        }
      }
    }
  },
  "mappings": {
    "_doc": {
      "properties": {
        "companyId": {
          "type": "keyword"
        },
        "companyName": {
          "type": "text",
          "fields": {
            "keywordstring": {
              "type": "text",
              "analyzer": "keyword_analyzer"
            },
            "edgengram": {
              "type": "text",
              "analyzer": "edge_ngram_analyzer",
              "search_analyzer": "edge_ngram_search_analyzer"
            },
            "completion": {
              "type": "completion",
              "contexts": [
                {
                  "name": "companyGroupId",
                  "type": "category",
                  "path": "companyGroupId"
                }
              ]
            }
          },
          "analyzer": "standard"
        },
        "companyTradingName": {
          "type": "text",
          "index": false
        },
        "companyGroupId": {
          "type": "keyword"
        },
        "agencyId": {
          "type": "keyword"
        }
      }
    }
  }
}

This is to where I got to, I need to STILL NEED TO ADD THE keywordstring AND edgengram to the CompanyName field. but can't seem to get it right. If i try and create the index i get the following error

Message: System.ArgumentException : Could not get field name for CompletionPropertyDescriptor1 mapping`

If i compare my NEST query with the raw query the companyName section look completly wrong.

 var response = this.Client.CreateIndex(
                this.indexName,
                index => index
                    .Mappings(
                        ms => ms.Map<CompanyDocument>(
                            m => m.Properties(
                                p => p
                                    .Keyword(t => t.Name(n => n.AgencyId))
                                    .Keyword(t => t.Name(n => n.CompanyGroupId))
                                    .Keyword(t => t.Name(n => n.CompanyId))
                                    .Text(t => t.Name(n => n.CompanyName.Suffix("completion")).Analyzer("keyword_analyzer").Analyzer("edge_ngram_analyzer").SearchAnalyzer("edge_ngram_search_analyzer")
                                        .Fields(ff =>
                                            ff.Completion(descriptor => descriptor.Contexts(contextsDescriptor => contextsDescriptor.Category(contextDescriptor => contextDescriptor.Name("companyGroupId").Path("companyGroupId"))))))
                                    .Text(t => t.Name(n => n.CompanyTradingName).Index(false)))))
                    .Settings(
                        f => f.Analysis(analysis => analysis
                                .Analyzers(analyzers => analyzers
                                            .Custom("keyword_analyzer", _ => _.Filters("lowercase", "asciifolding", "trim").CharFilters().Tokenizer("keyword"))
                                            .Custom("edge_ngram_analyzer", _ => _.Filters("lowercase").Tokenizer("edge_ngram_tokenizer")).Custom("edge_ngram_search_analyzer", _ => _.Tokenizer("lowercase")))
                                .Tokenizers(
                                    descriptor => descriptor.EdgeNGram("edge_ngram_tokenizer", t => t.MinGram(2).MaxGram(5).TokenChars(TokenChar.Letter))))));

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.