< Summary

Class:Azure.Search.Documents.Indexes.Models.CustomAnalyzer
Assembly:Azure.Search.Documents
File(s):C:\Git\azure-sdk-for-net\sdk\search\Azure.Search.Documents\src\Generated\Models\CustomAnalyzer.cs
C:\Git\azure-sdk-for-net\sdk\search\Azure.Search.Documents\src\Generated\Models\CustomAnalyzer.Serialization.cs
C:\Git\azure-sdk-for-net\sdk\search\Azure.Search.Documents\src\Indexes\Models\CustomAnalyzer.cs
Covered lines:0
Uncovered lines:63
Coverable lines:63
Total lines:162
Line coverage:0% (0 of 63)
Covered branches:0
Total branches:26
Branch coverage:0% (0 of 26)

Metrics

MethodCyclomatic complexity Line coverage Branch coverage
.ctor(...)-0%0%
Azure.Core.IUtf8JsonSerializable.Write(...)-0%0%
DeserializeCustomAnalyzer(...)-0%0%
.ctor(...)-0%100%
get_TokenizerName()-0%100%
get_TokenFilters()-0%100%
get_CharFilters()-0%100%

File(s)

C:\Git\azure-sdk-for-net\sdk\search\Azure.Search.Documents\src\Generated\Models\CustomAnalyzer.cs

#LineLine coverage
 1// Copyright (c) Microsoft Corporation. All rights reserved.
 2// Licensed under the MIT License.
 3
 4// <auto-generated/>
 5
 6#nullable disable
 7
 8using System;
 9using System.Collections.Generic;
 10using Azure.Core;
 11
 12namespace Azure.Search.Documents.Indexes.Models
 13{
 14    /// <summary> Allows you to take control over the process of converting text into indexable/searchable tokens. It&ap
 15    public partial class CustomAnalyzer : LexicalAnalyzer
 16    {
 17
 18        /// <summary> Initializes a new instance of CustomAnalyzer. </summary>
 19        /// <param name="oDataType"> Identifies the concrete type of the analyzer. </param>
 20        /// <param name="name"> The name of the analyzer. It must only contain letters, digits, spaces, dashes or unders
 21        /// <param name="tokenizerName"> The name of the tokenizer to use to divide continuous text into a sequence of t
 22        /// <param name="tokenFilters"> A list of token filters used to filter out or modify the tokens generated by a t
 23        /// <param name="charFilters"> A list of character filters used to prepare input text before it is processed by 
 024        internal CustomAnalyzer(string oDataType, string name, LexicalTokenizerName tokenizerName, IList<TokenFilterName
 25        {
 026            TokenizerName = tokenizerName;
 027            TokenFilters = tokenFilters;
 028            CharFilters = charFilters;
 029            ODataType = oDataType ?? "#Microsoft.Azure.Search.CustomAnalyzer";
 030        }
 31    }
 32}

C:\Git\azure-sdk-for-net\sdk\search\Azure.Search.Documents\src\Generated\Models\CustomAnalyzer.Serialization.cs

#LineLine coverage
 1// Copyright (c) Microsoft Corporation. All rights reserved.
 2// Licensed under the MIT License.
 3
 4// <auto-generated/>
 5
 6#nullable disable
 7
 8using System.Collections.Generic;
 9using System.Text.Json;
 10using Azure.Core;
 11
 12namespace Azure.Search.Documents.Indexes.Models
 13{
 14    public partial class CustomAnalyzer : IUtf8JsonSerializable
 15    {
 16        void IUtf8JsonSerializable.Write(Utf8JsonWriter writer)
 17        {
 018            writer.WriteStartObject();
 019            writer.WritePropertyName("tokenizer");
 020            writer.WriteStringValue(TokenizerName.ToString());
 021            if (Optional.IsCollectionDefined(TokenFilters))
 22            {
 023                writer.WritePropertyName("tokenFilters");
 024                writer.WriteStartArray();
 025                foreach (var item in TokenFilters)
 26                {
 027                    writer.WriteStringValue(item.ToString());
 28                }
 029                writer.WriteEndArray();
 30            }
 031            if (Optional.IsCollectionDefined(CharFilters))
 32            {
 033                writer.WritePropertyName("charFilters");
 034                writer.WriteStartArray();
 035                foreach (var item in CharFilters)
 36                {
 037                    writer.WriteStringValue(item);
 38                }
 039                writer.WriteEndArray();
 40            }
 041            writer.WritePropertyName("@odata.type");
 042            writer.WriteStringValue(ODataType);
 043            writer.WritePropertyName("name");
 044            writer.WriteStringValue(Name);
 045            writer.WriteEndObject();
 046        }
 47
 48        internal static CustomAnalyzer DeserializeCustomAnalyzer(JsonElement element)
 49        {
 050            LexicalTokenizerName tokenizer = default;
 051            Optional<IList<TokenFilterName>> tokenFilters = default;
 052            Optional<IList<string>> charFilters = default;
 053            string odataType = default;
 054            string name = default;
 055            foreach (var property in element.EnumerateObject())
 56            {
 057                if (property.NameEquals("tokenizer"))
 58                {
 059                    tokenizer = new LexicalTokenizerName(property.Value.GetString());
 060                    continue;
 61                }
 062                if (property.NameEquals("tokenFilters"))
 63                {
 064                    List<TokenFilterName> array = new List<TokenFilterName>();
 065                    foreach (var item in property.Value.EnumerateArray())
 66                    {
 067                        array.Add(new TokenFilterName(item.GetString()));
 68                    }
 069                    tokenFilters = array;
 070                    continue;
 71                }
 072                if (property.NameEquals("charFilters"))
 73                {
 074                    List<string> array = new List<string>();
 075                    foreach (var item in property.Value.EnumerateArray())
 76                    {
 077                        array.Add(item.GetString());
 78                    }
 079                    charFilters = array;
 080                    continue;
 81                }
 082                if (property.NameEquals("@odata.type"))
 83                {
 084                    odataType = property.Value.GetString();
 085                    continue;
 86                }
 087                if (property.NameEquals("name"))
 88                {
 089                    name = property.Value.GetString();
 90                    continue;
 91                }
 92            }
 093            return new CustomAnalyzer(odataType, name, tokenizer, Optional.ToList(tokenFilters), Optional.ToList(charFil
 94        }
 95    }
 96}

C:\Git\azure-sdk-for-net\sdk\search\Azure.Search.Documents\src\Indexes\Models\CustomAnalyzer.cs

#LineLine coverage
 1// Copyright (c) Microsoft Corporation. All rights reserved.
 2// Licensed under the MIT License.
 3
 4using System;
 5using System.Collections.Generic;
 6using Azure.Core;
 7
 8namespace Azure.Search.Documents.Indexes.Models
 9{
 10    public partial class CustomAnalyzer
 11    {
 12        /// <summary> Initializes a new instance of CustomAnalyzer. </summary>
 13        /// <param name="name"> The name of the analyzer. It must only contain letters, digits, spaces, dashes or unders
 14        /// <param name="tokenizerName"> The name of the tokenizer to use to divide continuous text into a sequence of t
 15        /// <exception cref="ArgumentNullException"><paramref name="name"/> is null.</exception>
 016        public CustomAnalyzer(string name, LexicalTokenizerName tokenizerName) : base(name)
 17        {
 018            TokenizerName = tokenizerName;
 019            TokenFilters = new ChangeTrackingList<TokenFilterName>();
 020            CharFilters = new ChangeTrackingList<string>();
 021            ODataType = "#Microsoft.Azure.Search.CustomAnalyzer";
 022        }
 23
 24        /// <summary> The name of the tokenizer to use to divide continuous text into a sequence of tokens, such as brea
 25        [CodeGenMember("Tokenizer")]
 026        public LexicalTokenizerName TokenizerName { get; set; }
 27
 28        /// <summary> A list of token filters used to filter out or modify the tokens generated by a tokenizer. For exam
 029        public IList<TokenFilterName> TokenFilters { get; }
 30
 31        /// <summary> A list of character filters used to prepare input text before it is processed by the tokenizer. Fo
 032        public IList<string> CharFilters { get; }
 33    }
 34}