< Summary

Class:Microsoft.Azure.Search.Models.StandardTokenizer
Assembly:Microsoft.Azure.Search.Service
File(s):C:\Git\azure-sdk-for-net\sdk\search\Microsoft.Azure.Search.Service\src\Customizations\Indexes\Models\StandardTokenizer.cs
Covered lines:0
Uncovered lines:7
Coverable lines:7
Total lines:60
Line coverage:0% (0 of 7)
Covered branches:0
Total branches:0

Metrics

MethodCyclomatic complexity Line coverage Branch coverage
.ctor()-0%100%
.ctor(...)-0%100%
get_MaxTokenLength()-0%100%
Validate()-0%100%

File(s)

C:\Git\azure-sdk-for-net\sdk\search\Microsoft.Azure.Search.Service\src\Customizations\Indexes\Models\StandardTokenizer.cs

#LineLine coverage
 1// Copyright (c) Microsoft Corporation. All rights reserved.
 2// Licensed under the MIT License. See License.txt in the project root for
 3// license information.
 4//
 5// Code generated by Microsoft (R) AutoRest Code Generator.
 6// Changes may cause incorrect behavior and will be lost if the code is
 7// regenerated.
 8
 9namespace Microsoft.Azure.Search.Models
 10{
 11    using System;
 12    using System.Linq;
 13    using System.Collections.Generic;
 14    using Newtonsoft.Json;
 15    using Microsoft.Rest;
 16    using Microsoft.Rest.Serialization;
 17    using Microsoft.Rest.Azure;
 18
 19    /// <summary>
 20    /// Breaks text following the Unicode Text Segmentation rules. This
 21    /// tokenizer is implemented using Apache Lucene.
 22    /// <see href="http://lucene.apache.org/core/4_10_3/analyzers-common/org/apache/lucene/analysis/standard/StandardTok
 23    /// </summary>
 24    [JsonObject("#Microsoft.Azure.Search.StandardTokenizer")]
 25    [Obsolete("This type is obsolete. Please use StandardTokenizerV2 instead.")]
 26    public partial class StandardTokenizer : Tokenizer
 27    {
 28        /// <summary>
 29        /// Initializes a new instance of the StandardTokenizer class.
 30        /// </summary>
 031        public StandardTokenizer() { }
 32
 33        /// <summary>
 34        /// Initializes a new instance of the StandardTokenizer class.
 35        /// </summary>
 36        public StandardTokenizer(string name, int? maxTokenLength = default(int?))
 037            : base(name)
 38        {
 039            MaxTokenLength = maxTokenLength;
 040        }
 41
 42        /// <summary>
 43        /// Gets or sets the maximum token length. Default is 255. Tokens
 44        /// longer than the maximum length are split
 45        /// </summary>
 46        [JsonProperty(PropertyName = "maxTokenLength")]
 047        public int? MaxTokenLength { get; set; }
 48
 49        /// <summary>
 50        /// Validate the object.
 51        /// </summary>
 52        /// <exception cref="ValidationException">
 53        /// Thrown if validation fails
 54        /// </exception>
 55        public override void Validate()
 56        {
 057            base.Validate();
 058        }
 59    }
 60}