< Summary

Class:Azure.Search.Documents.Indexes.Models.LexicalTokenizer
Assembly:Azure.Search.Documents
File(s):C:\Git\azure-sdk-for-net\sdk\search\Azure.Search.Documents\src\Generated\Models\LexicalTokenizer.cs
C:\Git\azure-sdk-for-net\sdk\search\Azure.Search.Documents\src\Generated\Models\LexicalTokenizer.Serialization.cs
C:\Git\azure-sdk-for-net\sdk\search\Azure.Search.Documents\src\Indexes\Models\LexicalTokenizer.cs
Covered lines:17
Uncovered lines:22
Coverable lines:39
Total lines:111
Line coverage:43.5% (17 of 39)
Covered branches:25
Total branches:64
Branch coverage:39% (25 of 64)

Metrics

MethodCyclomatic complexity Line coverage Branch coverage
.ctor(...)-100%100%
get_ODataType()-100%100%
get_Name()-100%100%
Azure.Core.IUtf8JsonSerializable.Write(...)-0%100%
DeserializeLexicalTokenizer(...)-34.78%38.71%
.ctor(...)-100%50%

File(s)

C:\Git\azure-sdk-for-net\sdk\search\Azure.Search.Documents\src\Generated\Models\LexicalTokenizer.cs

#LineLine coverage
 1// Copyright (c) Microsoft Corporation. All rights reserved.
 2// Licensed under the MIT License.
 3
 4// <auto-generated/>
 5
 6#nullable disable
 7
 8using System;
 9
 10namespace Azure.Search.Documents.Indexes.Models
 11{
 12    /// <summary> Base type for tokenizers. </summary>
 13    public partial class LexicalTokenizer
 14    {
 15
 16        /// <summary> Initializes a new instance of LexicalTokenizer. </summary>
 17        /// <param name="oDataType"> Identifies the concrete type of the tokenizer. </param>
 18        /// <param name="name"> The name of the tokenizer. It must only contain letters, digits, spaces, dashes or under
 419        internal LexicalTokenizer(string oDataType, string name)
 20        {
 421            ODataType = oDataType;
 422            Name = name;
 423        }
 24
 25        /// <summary> Identifies the concrete type of the tokenizer. </summary>
 3826        internal string ODataType { get; set; }
 27        /// <summary> The name of the tokenizer. It must only contain letters, digits, spaces, dashes or underscores, ca
 2728        public string Name { get; set; }
 29    }
 30}

C:\Git\azure-sdk-for-net\sdk\search\Azure.Search.Documents\src\Generated\Models\LexicalTokenizer.Serialization.cs

#LineLine coverage
 1// Copyright (c) Microsoft Corporation. All rights reserved.
 2// Licensed under the MIT License.
 3
 4// <auto-generated/>
 5
 6#nullable disable
 7
 8using System.Text.Json;
 9using Azure.Core;
 10
 11namespace Azure.Search.Documents.Indexes.Models
 12{
 13    public partial class LexicalTokenizer : IUtf8JsonSerializable
 14    {
 15        void IUtf8JsonSerializable.Write(Utf8JsonWriter writer)
 16        {
 017            writer.WriteStartObject();
 018            writer.WritePropertyName("@odata.type");
 019            writer.WriteStringValue(ODataType);
 020            writer.WritePropertyName("name");
 021            writer.WriteStringValue(Name);
 022            writer.WriteEndObject();
 023        }
 24
 25        internal static LexicalTokenizer DeserializeLexicalTokenizer(JsonElement element)
 26        {
 827            if (element.TryGetProperty("@odata.type", out JsonElement discriminator))
 28            {
 829                switch (discriminator.GetString())
 30                {
 031                    case "#Microsoft.Azure.Search.ClassicTokenizer": return ClassicTokenizer.DeserializeClassicTokenizer
 032                    case "#Microsoft.Azure.Search.EdgeNGramTokenizer": return EdgeNGramTokenizer.DeserializeEdgeNGramTok
 133                    case "#Microsoft.Azure.Search.KeywordTokenizer": return KeywordTokenizer.DeserializeKeywordTokenizer
 134                    case "#Microsoft.Azure.Search.KeywordTokenizerV2": return KeywordTokenizer.DeserializeKeywordTokeniz
 035                    case "#Microsoft.Azure.Search.MicrosoftLanguageStemmingTokenizer": return MicrosoftLanguageStemmingT
 036                    case "#Microsoft.Azure.Search.MicrosoftLanguageTokenizer": return MicrosoftLanguageTokenizer.Deseria
 037                    case "#Microsoft.Azure.Search.NGramTokenizer": return NGramTokenizer.DeserializeNGramTokenizer(eleme
 138                    case "#Microsoft.Azure.Search.PathHierarchyTokenizerV2": return PathHierarchyTokenizer.DeserializePa
 339                    case "#Microsoft.Azure.Search.PatternTokenizer": return PatternTokenizer.DeserializePatternTokenizer
 140                    case "#Microsoft.Azure.Search.StandardTokenizer": return LuceneStandardTokenizer.DeserializeLuceneSt
 141                    case "#Microsoft.Azure.Search.StandardTokenizerV2": return LuceneStandardTokenizer.DeserializeLucene
 042                    case "#Microsoft.Azure.Search.UaxUrlEmailTokenizer": return UaxUrlEmailTokenizer.DeserializeUaxUrlEm
 43                }
 44            }
 045            string odataType = default;
 046            string name = default;
 047            foreach (var property in element.EnumerateObject())
 48            {
 049                if (property.NameEquals("@odata.type"))
 50                {
 051                    odataType = property.Value.GetString();
 052                    continue;
 53                }
 054                if (property.NameEquals("name"))
 55                {
 056                    name = property.Value.GetString();
 57                    continue;
 58                }
 59            }
 060            return new LexicalTokenizer(odataType, name);
 61        }
 62    }
 63}

C:\Git\azure-sdk-for-net\sdk\search\Azure.Search.Documents\src\Indexes\Models\LexicalTokenizer.cs

#LineLine coverage
 1// Copyright (c) Microsoft Corporation. All rights reserved.
 2// Licensed under the MIT License.
 3
 4using System;
 5
 6namespace Azure.Search.Documents.Indexes.Models
 7{
 8    public partial class LexicalTokenizer
 9    {
 10        /// <summary> Initializes a new instance of LexicalTokenizer. </summary>
 11        /// <param name="name"> The name of the tokenizer. It must only contain letters, digits, spaces, dashes or under
 12        /// <exception cref="ArgumentNullException"><paramref name="name"/> is null.</exception>
 1013        private protected LexicalTokenizer(string name)
 14        {
 1015            Name = name ?? throw new ArgumentNullException(nameof(name));
 1016        }
 17    }
 18}