LuceneStandardTokenizerV2.java
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
package com.azure.search.documents.indexes.implementation.models;
import com.azure.core.annotation.Fluent;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeId;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeName;
/** Breaks text following the Unicode Text Segmentation rules. This tokenizer is implemented using Apache Lucene. */
@JsonTypeInfo(
use = JsonTypeInfo.Id.NAME,
include = JsonTypeInfo.As.EXISTING_PROPERTY,
property = "@odata.type",
visible = true)
@JsonTypeName("#Microsoft.Azure.Search.StandardTokenizerV2")
@Fluent
public final class LuceneStandardTokenizerV2 extends LexicalTokenizer {
/*
* Identifies the concrete type of the tokenizer.
*/
@JsonTypeId
@JsonProperty(value = "@odata.type", required = true)
private String odataType = "#Microsoft.Azure.Search.StandardTokenizerV2";
/*
* The maximum token length. Default is 255. Tokens longer than the maximum
* length are split. The maximum token length that can be used is 300
* characters.
*/
@JsonProperty(value = "maxTokenLength")
private Integer maxTokenLength;
/**
* Creates an instance of LuceneStandardTokenizerV2 class.
*
* @param name the name value to set.
*/
@JsonCreator
public LuceneStandardTokenizerV2(@JsonProperty(value = "name", required = true) String name) {
super(name);
}
/**
* Get the odataType property: Identifies the concrete type of the tokenizer.
*
* @return the odataType value.
*/
public String getOdataType() {
return this.odataType;
}
/**
* Get the maxTokenLength property: The maximum token length. Default is 255. Tokens longer than the maximum length
* are split. The maximum token length that can be used is 300 characters.
*
* @return the maxTokenLength value.
*/
public Integer getMaxTokenLength() {
return this.maxTokenLength;
}
/**
* Set the maxTokenLength property: The maximum token length. Default is 255. Tokens longer than the maximum length
* are split. The maximum token length that can be used is 300 characters.
*
* @param maxTokenLength the maxTokenLength value to set.
* @return the LuceneStandardTokenizerV2 object itself.
*/
public LuceneStandardTokenizerV2 setMaxTokenLength(Integer maxTokenLength) {
this.maxTokenLength = maxTokenLength;
return this;
}
}