ClassicTokenizer.java

  1. // Copyright (c) Microsoft Corporation. All rights reserved.
  2. // Licensed under the MIT License.

  3. package com.azure.search.documents.indexes.models;

  4. import com.azure.core.annotation.Fluent;
  5. import com.fasterxml.jackson.annotation.JsonCreator;
  6. import com.fasterxml.jackson.annotation.JsonProperty;
  7. import com.fasterxml.jackson.annotation.JsonTypeInfo;
  8. import com.fasterxml.jackson.annotation.JsonTypeName;

  9. /**
  10.  * Grammar-based tokenizer that is suitable for processing most
  11.  * European-language documents. This tokenizer is implemented using Apache
  12.  * Lucene.
  13.  */
  14. @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "@odata.type")
  15. @JsonTypeName("#Microsoft.Azure.Search.ClassicTokenizer")
  16. @Fluent
  17. public final class ClassicTokenizer extends LexicalTokenizer {
  18.     /*
  19.      * The maximum token length. Default is 255. Tokens longer than the maximum
  20.      * length are split. The maximum token length that can be used is 300
  21.      * characters.
  22.      */
  23.     @JsonProperty(value = "maxTokenLength")
  24.     private Integer maxTokenLength;

  25.     /**
  26.      * Constructor of {@link ClassicTokenizer}.
  27.      *
  28.      * @param name The name of the token filter. It must only contain letters, digits,
  29.      * spaces, dashes or underscores, can only start and end with alphanumeric
  30.      * characters, and is limited to 128 characters.
  31.      */
  32.     @JsonCreator
  33.     public ClassicTokenizer(@JsonProperty(value = "name") String name) {
  34.         super(name);
  35.     }

  36.     /**
  37.      * Get the maxTokenLength property: The maximum token length. Default is
  38.      * 255. Tokens longer than the maximum length are split. The maximum token
  39.      * length that can be used is 300 characters.
  40.      *
  41.      * @return the maxTokenLength value.
  42.      */
  43.     public Integer getMaxTokenLength() {
  44.         return this.maxTokenLength;
  45.     }

  46.     /**
  47.      * Set the maxTokenLength property: The maximum token length. Default is
  48.      * 255. Tokens longer than the maximum length are split. The maximum token
  49.      * length that can be used is 300 characters.
  50.      *
  51.      * @param maxTokenLength the maxTokenLength value to set.
  52.      * @return the ClassicTokenizer object itself.
  53.      */
  54.     public ClassicTokenizer setMaxTokenLength(Integer maxTokenLength) {
  55.         this.maxTokenLength = maxTokenLength;
  56.         return this;
  57.     }
  58. }