< Summary

Class:Azure.Search.Documents.Indexes.Models.ClassicTokenizer
Assembly:Azure.Search.Documents
File(s):C:\Git\azure-sdk-for-net\sdk\search\Azure.Search.Documents\src\Generated\Models\ClassicTokenizer.cs
C:\Git\azure-sdk-for-net\sdk\search\Azure.Search.Documents\src\Generated\Models\ClassicTokenizer.Serialization.cs
Covered lines:0
Uncovered lines:33
Coverable lines:33
Total lines:97
Line coverage:0% (0 of 33)
Covered branches:0
Total branches:14
Branch coverage:0% (0 of 14)

Metrics

MethodCyclomatic complexity Line coverage Branch coverage
.ctor(...)-0%0%
.ctor(...)-0%0%
get_MaxTokenLength()-0%100%
Azure.Core.IUtf8JsonSerializable.Write(...)-0%0%
DeserializeClassicTokenizer(...)-0%0%

File(s)

C:\Git\azure-sdk-for-net\sdk\search\Azure.Search.Documents\src\Generated\Models\ClassicTokenizer.cs

#LineLine coverage
 1// Copyright (c) Microsoft Corporation. All rights reserved.
 2// Licensed under the MIT License.
 3
 4// <auto-generated/>
 5
 6#nullable disable
 7
 8using System;
 9
 10namespace Azure.Search.Documents.Indexes.Models
 11{
 12    /// <summary> Grammar-based tokenizer that is suitable for processing most European-language documents. This tokeniz
 13    public partial class ClassicTokenizer : LexicalTokenizer
 14    {
 15        /// <summary> Initializes a new instance of ClassicTokenizer. </summary>
 16        /// <param name="name"> The name of the tokenizer. It must only contain letters, digits, spaces, dashes or under
 17        /// <exception cref="ArgumentNullException"> <paramref name="name"/> is null. </exception>
 018        public ClassicTokenizer(string name) : base(name)
 19        {
 020            if (name == null)
 21            {
 022                throw new ArgumentNullException(nameof(name));
 23            }
 24
 025            ODataType = "#Microsoft.Azure.Search.ClassicTokenizer";
 026        }
 27
 28        /// <summary> Initializes a new instance of ClassicTokenizer. </summary>
 29        /// <param name="oDataType"> Identifies the concrete type of the tokenizer. </param>
 30        /// <param name="name"> The name of the tokenizer. It must only contain letters, digits, spaces, dashes or under
 31        /// <param name="maxTokenLength"> The maximum token length. Default is 255. Tokens longer than the maximum lengt
 032        internal ClassicTokenizer(string oDataType, string name, int? maxTokenLength) : base(oDataType, name)
 33        {
 034            MaxTokenLength = maxTokenLength;
 035            ODataType = oDataType ?? "#Microsoft.Azure.Search.ClassicTokenizer";
 036        }
 37
 38        /// <summary> The maximum token length. Default is 255. Tokens longer than the maximum length are split. The max
 039        public int? MaxTokenLength { get; set; }
 40    }
 41}

C:\Git\azure-sdk-for-net\sdk\search\Azure.Search.Documents\src\Generated\Models\ClassicTokenizer.Serialization.cs

#LineLine coverage
 1// Copyright (c) Microsoft Corporation. All rights reserved.
 2// Licensed under the MIT License.
 3
 4// <auto-generated/>
 5
 6#nullable disable
 7
 8using System.Text.Json;
 9using Azure.Core;
 10
 11namespace Azure.Search.Documents.Indexes.Models
 12{
 13    public partial class ClassicTokenizer : IUtf8JsonSerializable
 14    {
 15        void IUtf8JsonSerializable.Write(Utf8JsonWriter writer)
 16        {
 017            writer.WriteStartObject();
 018            if (Optional.IsDefined(MaxTokenLength))
 19            {
 020                writer.WritePropertyName("maxTokenLength");
 021                writer.WriteNumberValue(MaxTokenLength.Value);
 22            }
 023            writer.WritePropertyName("@odata.type");
 024            writer.WriteStringValue(ODataType);
 025            writer.WritePropertyName("name");
 026            writer.WriteStringValue(Name);
 027            writer.WriteEndObject();
 028        }
 29
 30        internal static ClassicTokenizer DeserializeClassicTokenizer(JsonElement element)
 31        {
 032            Optional<int> maxTokenLength = default;
 033            string odataType = default;
 034            string name = default;
 035            foreach (var property in element.EnumerateObject())
 36            {
 037                if (property.NameEquals("maxTokenLength"))
 38                {
 039                    maxTokenLength = property.Value.GetInt32();
 040                    continue;
 41                }
 042                if (property.NameEquals("@odata.type"))
 43                {
 044                    odataType = property.Value.GetString();
 045                    continue;
 46                }
 047                if (property.NameEquals("name"))
 48                {
 049                    name = property.Value.GetString();
 50                    continue;
 51                }
 52            }
 053            return new ClassicTokenizer(odataType, name, Optional.ToNullable(maxTokenLength));
 54        }
 55    }
 56}