< Summary

Class:Azure.Search.Documents.Indexes.Models.LuceneStandardTokenizer
Assembly:Azure.Search.Documents
File(s):C:\Git\azure-sdk-for-net\sdk\search\Azure.Search.Documents\src\Indexes\Models\LuceneStandardTokenizer.cs
Covered lines:33
Uncovered lines:0
Coverable lines:33
Total lines:87
Line coverage:100% (33 of 33)
Covered branches:12
Total branches:12
Branch coverage:100% (12 of 12)

Metrics

MethodCyclomatic complexity Line coverage Branch coverage
.ctor(...)-100%100%
get_MaxTokenLength()-100%100%
Azure.Core.IUtf8JsonSerializable.Write(...)-100%100%
DeserializeLuceneStandardTokenizer(...)-100%100%

File(s)

C:\Git\azure-sdk-for-net\sdk\search\Azure.Search.Documents\src\Indexes\Models\LuceneStandardTokenizer.cs

#LineLine coverage
 1// Copyright (c) Microsoft Corporation. All rights reserved.
 2// Licensed under the MIT License.
 3
 4using System.Text.Json;
 5using Azure.Core;
 6
 7namespace Azure.Search.Documents.Indexes.Models
 8{
 9    [CodeGenModel("LuceneStandardTokenizerV2")]
 10    [CodeGenSuppress(nameof(LuceneStandardTokenizer), typeof(string), typeof(string), typeof(int?))]
 11    public partial class LuceneStandardTokenizer : IUtf8JsonSerializable
 12    {
 13        /// <summary>
 14        /// Initializes a new instance of LuceneStandardTokenizer.
 15        /// </summary>
 16        /// <param name="name">
 17        /// The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores,
 18        /// can only start and end with alphanumeric characters, and is limited to 128 characters.
 19        /// </param>
 320        public LuceneStandardTokenizer(string name) : base(name)
 21        {
 322            Argument.AssertNotNull(name, nameof(name));
 23
 324            ODataType = "#Microsoft.Azure.Search.StandardTokenizerV2";
 325        }
 26
 27        /// <summary>
 28        /// The maximum token length. Default is 255.
 29        /// Tokens longer than the maximum length are split.
 30        /// The maximum token length that can be used is 300 characters.
 31        /// </summary>
 832        public int? MaxTokenLength { get; set; }
 33
 34        void global::Azure.Core.IUtf8JsonSerializable.Write(Utf8JsonWriter writer)
 35        {
 236            writer.WriteStartObject();
 237            if (MaxTokenLength != null)
 38            {
 239                writer.WritePropertyName("maxTokenLength");
 240                writer.WriteNumberValue(MaxTokenLength.Value);
 41            }
 242            writer.WritePropertyName("@odata.type");
 243            writer.WriteStringValue(ODataType);
 244            writer.WritePropertyName("name");
 245            writer.WriteStringValue(Name);
 246            writer.WriteEndObject();
 247        }
 48
 49        internal static LuceneStandardTokenizer DeserializeLuceneStandardTokenizer(JsonElement element)
 50        {
 251            int? maxTokenLength = default;
 252            string odataType = default;
 253            string name = default;
 54
 1655            foreach (var property in element.EnumerateObject())
 56            {
 657                if (property.NameEquals("@odata.type"))
 58                {
 259                    odataType = property.Value.GetString();
 260                    continue;
 61                }
 62
 463                if (property.NameEquals("name"))
 64                {
 265                    name = property.Value.GetString();
 266                    continue;
 67                }
 68
 269                if (property.NameEquals("maxTokenLength"))
 70                {
 271                    if (property.Value.ValueKind == JsonValueKind.Null)
 72                    {
 73                        continue;
 74                    }
 275                    maxTokenLength = property.Value.GetInt32();
 76                    continue;
 77                }
 78            }
 79
 280            return new LuceneStandardTokenizer(name)
 281            {
 282                ODataType = odataType,
 283                MaxTokenLength = maxTokenLength,
 284            };
 85        }
 86    }
 87}