| | 1 | | // Copyright (c) Microsoft Corporation. All rights reserved. |
| | 2 | | // Licensed under the MIT License. |
| | 3 | |
|
| | 4 | | // <auto-generated/> |
| | 5 | |
|
| | 6 | | #nullable disable |
| | 7 | |
|
| | 8 | | using System; |
| | 9 | | using System.ComponentModel; |
| | 10 | |
|
| | 11 | | namespace Azure.Search.Documents.Indexes.Models |
| | 12 | | { |
| | 13 | | /// <summary> Defines the names of all tokenizers supported by Azure Cognitive Search. </summary> |
| | 14 | | public readonly partial struct LexicalTokenizerName : IEquatable<LexicalTokenizerName> |
| | 15 | | { |
| | 16 | | private readonly string _value; |
| | 17 | |
|
| | 18 | | /// <summary> Determines if two <see cref="LexicalTokenizerName"/> values are the same. </summary> |
| | 19 | | /// <exception cref="ArgumentNullException"> <paramref name="value"/> is null. </exception> |
| | 20 | | public LexicalTokenizerName(string value) |
| | 21 | | { |
| 13 | 22 | | _value = value ?? throw new ArgumentNullException(nameof(value)); |
| 13 | 23 | | } |
| | 24 | |
|
| | 25 | | private const string ClassicValue = "classic"; |
| | 26 | | private const string EdgeNGramValue = "edgeNGram"; |
| | 27 | | private const string KeywordValue = "keyword_v2"; |
| | 28 | | private const string LetterValue = "letter"; |
| | 29 | | private const string LowercaseValue = "lowercase"; |
| | 30 | | private const string MicrosoftLanguageTokenizerValue = "microsoft_language_tokenizer"; |
| | 31 | | private const string MicrosoftLanguageStemmingTokenizerValue = "microsoft_language_stemming_tokenizer"; |
| | 32 | | private const string NGramValue = "nGram"; |
| | 33 | | private const string PathHierarchyValue = "path_hierarchy_v2"; |
| | 34 | | private const string PatternValue = "pattern"; |
| | 35 | | private const string StandardValue = "standard_v2"; |
| | 36 | | private const string UaxUrlEmailValue = "uax_url_email"; |
| | 37 | | private const string WhitespaceValue = "whitespace"; |
| | 38 | |
|
| | 39 | | /// <summary> Grammar-based tokenizer that is suitable for processing most European-language documents. See http |
| 0 | 40 | | public static LexicalTokenizerName Classic { get; } = new LexicalTokenizerName(ClassicValue); |
| | 41 | | /// <summary> Tokenizes the input from an edge into n-grams of the given size(s). See https://lucene.apache.org/ |
| 0 | 42 | | public static LexicalTokenizerName EdgeNGram { get; } = new LexicalTokenizerName(EdgeNGramValue); |
| | 43 | | /// <summary> Emits the entire input as a single token. See http://lucene.apache.org/core/4_10_3/analyzers-commo |
| 0 | 44 | | public static LexicalTokenizerName Keyword { get; } = new LexicalTokenizerName(KeywordValue); |
| | 45 | | /// <summary> Divides text at non-letters. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/ |
| 0 | 46 | | public static LexicalTokenizerName Letter { get; } = new LexicalTokenizerName(LetterValue); |
| | 47 | | /// <summary> Divides text at non-letters and converts them to lower case. See http://lucene.apache.org/core/4_1 |
| 0 | 48 | | public static LexicalTokenizerName Lowercase { get; } = new LexicalTokenizerName(LowercaseValue); |
| | 49 | | /// <summary> Divides text using language-specific rules. </summary> |
| 0 | 50 | | public static LexicalTokenizerName MicrosoftLanguageTokenizer { get; } = new LexicalTokenizerName(MicrosoftLangu |
| | 51 | | /// <summary> Divides text using language-specific rules and reduces words to their base forms. </summary> |
| 0 | 52 | | public static LexicalTokenizerName MicrosoftLanguageStemmingTokenizer { get; } = new LexicalTokenizerName(Micros |
| | 53 | | /// <summary> Tokenizes the input into n-grams of the given size(s). See http://lucene.apache.org/core/4_10_3/an |
| 0 | 54 | | public static LexicalTokenizerName NGram { get; } = new LexicalTokenizerName(NGramValue); |
| | 55 | | /// <summary> Tokenizer for path-like hierarchies. See http://lucene.apache.org/core/4_10_3/analyzers-common/org |
| 0 | 56 | | public static LexicalTokenizerName PathHierarchy { get; } = new LexicalTokenizerName(PathHierarchyValue); |
| | 57 | | /// <summary> Tokenizer that uses regex pattern matching to construct distinct tokens. See http://lucene.apache. |
| 0 | 58 | | public static LexicalTokenizerName Pattern { get; } = new LexicalTokenizerName(PatternValue); |
| | 59 | | /// <summary> Standard Lucene analyzer; Composed of the standard tokenizer, lowercase filter and stop filter. Se |
| 0 | 60 | | public static LexicalTokenizerName Standard { get; } = new LexicalTokenizerName(StandardValue); |
| | 61 | | /// <summary> Tokenizes urls and emails as one token. See http://lucene.apache.org/core/4_10_3/analyzers-common/ |
| 0 | 62 | | public static LexicalTokenizerName UaxUrlEmail { get; } = new LexicalTokenizerName(UaxUrlEmailValue); |
| | 63 | | /// <summary> Divides text at whitespace. See http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/l |
| 4 | 64 | | public static LexicalTokenizerName Whitespace { get; } = new LexicalTokenizerName(WhitespaceValue); |
| | 65 | | /// <summary> Determines if two <see cref="LexicalTokenizerName"/> values are the same. </summary> |
| 0 | 66 | | public static bool operator ==(LexicalTokenizerName left, LexicalTokenizerName right) => left.Equals(right); |
| | 67 | | /// <summary> Determines if two <see cref="LexicalTokenizerName"/> values are not the same. </summary> |
| 0 | 68 | | public static bool operator !=(LexicalTokenizerName left, LexicalTokenizerName right) => !left.Equals(right); |
| | 69 | | /// <summary> Converts a string to a <see cref="LexicalTokenizerName"/>. </summary> |
| 0 | 70 | | public static implicit operator LexicalTokenizerName(string value) => new LexicalTokenizerName(value); |
| | 71 | |
|
| | 72 | | /// <inheritdoc /> |
| | 73 | | [EditorBrowsable(EditorBrowsableState.Never)] |
| 0 | 74 | | public override bool Equals(object obj) => obj is LexicalTokenizerName other && Equals(other); |
| | 75 | | /// <inheritdoc /> |
| 0 | 76 | | public bool Equals(LexicalTokenizerName other) => string.Equals(_value, other._value, StringComparison.Invariant |
| | 77 | |
|
| | 78 | | /// <inheritdoc /> |
| | 79 | | [EditorBrowsable(EditorBrowsableState.Never)] |
| 0 | 80 | | public override int GetHashCode() => _value?.GetHashCode() ?? 0; |
| | 81 | | /// <inheritdoc /> |
| 2 | 82 | | public override string ToString() => _value; |
| | 83 | | } |
| | 84 | | } |