< Summary

Class:Azure.Analytics.Synapse.Artifacts.Models.AzureDatabricksLinkedService
Assembly:Azure.Analytics.Synapse.Artifacts
File(s):C:\Git\azure-sdk-for-net\sdk\synapse\Azure.Analytics.Synapse.Artifacts\src\Generated\Models\AzureDatabricksLinkedService.cs
C:\Git\azure-sdk-for-net\sdk\synapse\Azure.Analytics.Synapse.Artifacts\src\Generated\Models\AzureDatabricksLinkedService.Serialization.cs
Covered lines:0
Uncovered lines:224
Coverable lines:224
Total lines:413
Line coverage:0% (0 of 224)
Covered branches:0
Total branches:106
Branch coverage:0% (0 of 106)

Metrics

MethodCyclomatic complexity Line coverage Branch coverage
.ctor(...)-0%0%
.ctor(...)-0%0%
get_Domain()-0%100%
get_AccessToken()-0%100%
get_ExistingClusterId()-0%100%
get_InstancePoolId()-0%100%
get_NewClusterVersion()-0%100%
get_NewClusterNumOfWorker()-0%100%
get_NewClusterNodeType()-0%100%
get_NewClusterSparkConf()-0%100%
get_NewClusterSparkEnvVars()-0%100%
get_NewClusterCustomTags()-0%100%
get_NewClusterDriverNodeType()-0%100%
get_NewClusterInitScripts()-0%100%
get_NewClusterEnableElasticDisk()-0%100%
get_EncryptedCredential()-0%100%
Azure.Core.IUtf8JsonSerializable.Write(...)-0%0%
DeserializeAzureDatabricksLinkedService(...)-0%0%

File(s)

C:\Git\azure-sdk-for-net\sdk\synapse\Azure.Analytics.Synapse.Artifacts\src\Generated\Models\AzureDatabricksLinkedService.cs

#LineLine coverage
 1// Copyright (c) Microsoft Corporation. All rights reserved.
 2// Licensed under the MIT License.
 3
 4// <auto-generated/>
 5
 6#nullable disable
 7
 8using System;
 9using System.Collections.Generic;
 10using Azure.Core;
 11
 12namespace Azure.Analytics.Synapse.Artifacts.Models
 13{
 14    /// <summary> Azure Databricks linked service. </summary>
 15    public partial class AzureDatabricksLinkedService : LinkedService
 16    {
 17        /// <summary> Initializes a new instance of AzureDatabricksLinkedService. </summary>
 18        /// <param name="domain"> &lt;REGION&gt;.azuredatabricks.net, domain name of your Databricks deployment. Type: s
 19        /// <param name="accessToken"> Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/a
 20        /// <exception cref="ArgumentNullException"> <paramref name="domain"/> or <paramref name="accessToken"/> is null
 021        public AzureDatabricksLinkedService(object domain, SecretBase accessToken)
 22        {
 023            if (domain == null)
 24            {
 025                throw new ArgumentNullException(nameof(domain));
 26            }
 027            if (accessToken == null)
 28            {
 029                throw new ArgumentNullException(nameof(accessToken));
 30            }
 31
 032            Domain = domain;
 033            AccessToken = accessToken;
 034            NewClusterSparkConf = new ChangeTrackingDictionary<string, object>();
 035            NewClusterSparkEnvVars = new ChangeTrackingDictionary<string, object>();
 036            NewClusterCustomTags = new ChangeTrackingDictionary<string, object>();
 037            Type = "AzureDatabricks";
 038        }
 39
 40        /// <summary> Initializes a new instance of AzureDatabricksLinkedService. </summary>
 41        /// <param name="type"> Type of linked service. </param>
 42        /// <param name="connectVia"> The integration runtime reference. </param>
 43        /// <param name="description"> Linked service description. </param>
 44        /// <param name="parameters"> Parameters for linked service. </param>
 45        /// <param name="annotations"> List of tags that can be used for describing the linked service. </param>
 46        /// <param name="additionalProperties"> . </param>
 47        /// <param name="domain"> &lt;REGION&gt;.azuredatabricks.net, domain name of your Databricks deployment. Type: s
 48        /// <param name="accessToken"> Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/a
 49        /// <param name="existingClusterId"> The id of an existing interactive cluster that will be used for all runs of
 50        /// <param name="instancePoolId"> The id of an existing instance pool that will be used for all runs of this act
 51        /// <param name="newClusterVersion"> If not using an existing interactive cluster, this specifies the Spark vers
 52        /// <param name="newClusterNumOfWorker"> If not using an existing interactive cluster, this specifies the number
 53        /// <param name="newClusterNodeType"> The node type of the new job cluster. This property is required if newClus
 54        /// <param name="newClusterSparkConf"> A set of optional, user-specified Spark configuration key-value pairs. </
 55        /// <param name="newClusterSparkEnvVars"> A set of optional, user-specified Spark environment variables key-valu
 56        /// <param name="newClusterCustomTags"> Additional tags for cluster resources. This property is ignored in insta
 57        /// <param name="newClusterDriverNodeType"> The driver node type for the new job cluster. This property is ignor
 58        /// <param name="newClusterInitScripts"> User-defined initialization scripts for the new cluster. Type: array of
 59        /// <param name="newClusterEnableElasticDisk"> Enable the elastic disk on the new cluster. This property is now 
 60        /// <param name="encryptedCredential"> The encrypted credential used for authentication. Credentials are encrypt
 061        internal AzureDatabricksLinkedService(string type, IntegrationRuntimeReference connectVia, string description, I
 62        {
 063            Domain = domain;
 064            AccessToken = accessToken;
 065            ExistingClusterId = existingClusterId;
 066            InstancePoolId = instancePoolId;
 067            NewClusterVersion = newClusterVersion;
 068            NewClusterNumOfWorker = newClusterNumOfWorker;
 069            NewClusterNodeType = newClusterNodeType;
 070            NewClusterSparkConf = newClusterSparkConf;
 071            NewClusterSparkEnvVars = newClusterSparkEnvVars;
 072            NewClusterCustomTags = newClusterCustomTags;
 073            NewClusterDriverNodeType = newClusterDriverNodeType;
 074            NewClusterInitScripts = newClusterInitScripts;
 075            NewClusterEnableElasticDisk = newClusterEnableElasticDisk;
 076            EncryptedCredential = encryptedCredential;
 077            Type = type ?? "AzureDatabricks";
 078        }
 79
 80        /// <summary> &lt;REGION&gt;.azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Ex
 081        public object Domain { get; set; }
 82        /// <summary> Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authent
 083        public SecretBase AccessToken { get; set; }
 84        /// <summary> The id of an existing interactive cluster that will be used for all runs of this activity. Type: s
 085        public object ExistingClusterId { get; set; }
 86        /// <summary> The id of an existing instance pool that will be used for all runs of this activity. Type: string 
 087        public object InstancePoolId { get; set; }
 88        /// <summary> If not using an existing interactive cluster, this specifies the Spark version of a new job cluste
 089        public object NewClusterVersion { get; set; }
 90        /// <summary> If not using an existing interactive cluster, this specifies the number of worker nodes to use for
 091        public object NewClusterNumOfWorker { get; set; }
 92        /// <summary> The node type of the new job cluster. This property is required if newClusterVersion is specified 
 093        public object NewClusterNodeType { get; set; }
 94        /// <summary> A set of optional, user-specified Spark configuration key-value pairs. </summary>
 095        public IDictionary<string, object> NewClusterSparkConf { get; }
 96        /// <summary> A set of optional, user-specified Spark environment variables key-value pairs. </summary>
 097        public IDictionary<string, object> NewClusterSparkEnvVars { get; }
 98        /// <summary> Additional tags for cluster resources. This property is ignored in instance pool configurations. <
 099        public IDictionary<string, object> NewClusterCustomTags { get; }
 100        /// <summary> The driver node type for the new job cluster. This property is ignored in instance pool configurat
 0101        public object NewClusterDriverNodeType { get; set; }
 102        /// <summary> User-defined initialization scripts for the new cluster. Type: array of strings (or Expression wit
 0103        public object NewClusterInitScripts { get; set; }
 104        /// <summary> Enable the elastic disk on the new cluster. This property is now ignored, and takes the default el
 0105        public object NewClusterEnableElasticDisk { get; set; }
 106        /// <summary> The encrypted credential used for authentication. Credentials are encrypted using the integration 
 0107        public object EncryptedCredential { get; set; }
 108    }
 109}

C:\Git\azure-sdk-for-net\sdk\synapse\Azure.Analytics.Synapse.Artifacts\src\Generated\Models\AzureDatabricksLinkedService.Serialization.cs

#LineLine coverage
 1// Copyright (c) Microsoft Corporation. All rights reserved.
 2// Licensed under the MIT License.
 3
 4// <auto-generated/>
 5
 6#nullable disable
 7
 8using System.Collections.Generic;
 9using System.Text.Json;
 10using Azure.Core;
 11
 12namespace Azure.Analytics.Synapse.Artifacts.Models
 13{
 14    public partial class AzureDatabricksLinkedService : IUtf8JsonSerializable
 15    {
 16        void IUtf8JsonSerializable.Write(Utf8JsonWriter writer)
 17        {
 018            writer.WriteStartObject();
 019            writer.WritePropertyName("type");
 020            writer.WriteStringValue(Type);
 021            if (Optional.IsDefined(ConnectVia))
 22            {
 023                writer.WritePropertyName("connectVia");
 024                writer.WriteObjectValue(ConnectVia);
 25            }
 026            if (Optional.IsDefined(Description))
 27            {
 028                writer.WritePropertyName("description");
 029                writer.WriteStringValue(Description);
 30            }
 031            if (Optional.IsCollectionDefined(Parameters))
 32            {
 033                writer.WritePropertyName("parameters");
 034                writer.WriteStartObject();
 035                foreach (var item in Parameters)
 36                {
 037                    writer.WritePropertyName(item.Key);
 038                    writer.WriteObjectValue(item.Value);
 39                }
 040                writer.WriteEndObject();
 41            }
 042            if (Optional.IsCollectionDefined(Annotations))
 43            {
 044                writer.WritePropertyName("annotations");
 045                writer.WriteStartArray();
 046                foreach (var item in Annotations)
 47                {
 048                    writer.WriteObjectValue(item);
 49                }
 050                writer.WriteEndArray();
 51            }
 052            writer.WritePropertyName("typeProperties");
 053            writer.WriteStartObject();
 054            writer.WritePropertyName("domain");
 055            writer.WriteObjectValue(Domain);
 056            writer.WritePropertyName("accessToken");
 057            writer.WriteObjectValue(AccessToken);
 058            if (Optional.IsDefined(ExistingClusterId))
 59            {
 060                writer.WritePropertyName("existingClusterId");
 061                writer.WriteObjectValue(ExistingClusterId);
 62            }
 063            if (Optional.IsDefined(InstancePoolId))
 64            {
 065                writer.WritePropertyName("instancePoolId");
 066                writer.WriteObjectValue(InstancePoolId);
 67            }
 068            if (Optional.IsDefined(NewClusterVersion))
 69            {
 070                writer.WritePropertyName("newClusterVersion");
 071                writer.WriteObjectValue(NewClusterVersion);
 72            }
 073            if (Optional.IsDefined(NewClusterNumOfWorker))
 74            {
 075                writer.WritePropertyName("newClusterNumOfWorker");
 076                writer.WriteObjectValue(NewClusterNumOfWorker);
 77            }
 078            if (Optional.IsDefined(NewClusterNodeType))
 79            {
 080                writer.WritePropertyName("newClusterNodeType");
 081                writer.WriteObjectValue(NewClusterNodeType);
 82            }
 083            if (Optional.IsCollectionDefined(NewClusterSparkConf))
 84            {
 085                writer.WritePropertyName("newClusterSparkConf");
 086                writer.WriteStartObject();
 087                foreach (var item in NewClusterSparkConf)
 88                {
 089                    writer.WritePropertyName(item.Key);
 090                    writer.WriteObjectValue(item.Value);
 91                }
 092                writer.WriteEndObject();
 93            }
 094            if (Optional.IsCollectionDefined(NewClusterSparkEnvVars))
 95            {
 096                writer.WritePropertyName("newClusterSparkEnvVars");
 097                writer.WriteStartObject();
 098                foreach (var item in NewClusterSparkEnvVars)
 99                {
 0100                    writer.WritePropertyName(item.Key);
 0101                    writer.WriteObjectValue(item.Value);
 102                }
 0103                writer.WriteEndObject();
 104            }
 0105            if (Optional.IsCollectionDefined(NewClusterCustomTags))
 106            {
 0107                writer.WritePropertyName("newClusterCustomTags");
 0108                writer.WriteStartObject();
 0109                foreach (var item in NewClusterCustomTags)
 110                {
 0111                    writer.WritePropertyName(item.Key);
 0112                    writer.WriteObjectValue(item.Value);
 113                }
 0114                writer.WriteEndObject();
 115            }
 0116            if (Optional.IsDefined(NewClusterDriverNodeType))
 117            {
 0118                writer.WritePropertyName("newClusterDriverNodeType");
 0119                writer.WriteObjectValue(NewClusterDriverNodeType);
 120            }
 0121            if (Optional.IsDefined(NewClusterInitScripts))
 122            {
 0123                writer.WritePropertyName("newClusterInitScripts");
 0124                writer.WriteObjectValue(NewClusterInitScripts);
 125            }
 0126            if (Optional.IsDefined(NewClusterEnableElasticDisk))
 127            {
 0128                writer.WritePropertyName("newClusterEnableElasticDisk");
 0129                writer.WriteObjectValue(NewClusterEnableElasticDisk);
 130            }
 0131            if (Optional.IsDefined(EncryptedCredential))
 132            {
 0133                writer.WritePropertyName("encryptedCredential");
 0134                writer.WriteObjectValue(EncryptedCredential);
 135            }
 0136            writer.WriteEndObject();
 0137            foreach (var item in AdditionalProperties)
 138            {
 0139                writer.WritePropertyName(item.Key);
 0140                writer.WriteObjectValue(item.Value);
 141            }
 0142            writer.WriteEndObject();
 0143        }
 144
 145        internal static AzureDatabricksLinkedService DeserializeAzureDatabricksLinkedService(JsonElement element)
 146        {
 0147            string type = default;
 0148            Optional<IntegrationRuntimeReference> connectVia = default;
 0149            Optional<string> description = default;
 0150            Optional<IDictionary<string, ParameterSpecification>> parameters = default;
 0151            Optional<IList<object>> annotations = default;
 0152            object domain = default;
 0153            SecretBase accessToken = default;
 0154            Optional<object> existingClusterId = default;
 0155            Optional<object> instancePoolId = default;
 0156            Optional<object> newClusterVersion = default;
 0157            Optional<object> newClusterNumOfWorker = default;
 0158            Optional<object> newClusterNodeType = default;
 0159            Optional<IDictionary<string, object>> newClusterSparkConf = default;
 0160            Optional<IDictionary<string, object>> newClusterSparkEnvVars = default;
 0161            Optional<IDictionary<string, object>> newClusterCustomTags = default;
 0162            Optional<object> newClusterDriverNodeType = default;
 0163            Optional<object> newClusterInitScripts = default;
 0164            Optional<object> newClusterEnableElasticDisk = default;
 0165            Optional<object> encryptedCredential = default;
 0166            IDictionary<string, object> additionalProperties = default;
 0167            Dictionary<string, object> additionalPropertiesDictionary = default;
 0168            foreach (var property in element.EnumerateObject())
 169            {
 0170                if (property.NameEquals("type"))
 171                {
 0172                    type = property.Value.GetString();
 0173                    continue;
 174                }
 0175                if (property.NameEquals("connectVia"))
 176                {
 0177                    connectVia = IntegrationRuntimeReference.DeserializeIntegrationRuntimeReference(property.Value);
 0178                    continue;
 179                }
 0180                if (property.NameEquals("description"))
 181                {
 0182                    description = property.Value.GetString();
 0183                    continue;
 184                }
 0185                if (property.NameEquals("parameters"))
 186                {
 0187                    Dictionary<string, ParameterSpecification> dictionary = new Dictionary<string, ParameterSpecificatio
 0188                    foreach (var property0 in property.Value.EnumerateObject())
 189                    {
 0190                        dictionary.Add(property0.Name, ParameterSpecification.DeserializeParameterSpecification(property
 191                    }
 0192                    parameters = dictionary;
 0193                    continue;
 194                }
 0195                if (property.NameEquals("annotations"))
 196                {
 0197                    List<object> array = new List<object>();
 0198                    foreach (var item in property.Value.EnumerateArray())
 199                    {
 0200                        array.Add(item.GetObject());
 201                    }
 0202                    annotations = array;
 0203                    continue;
 204                }
 0205                if (property.NameEquals("typeProperties"))
 206                {
 0207                    foreach (var property0 in property.Value.EnumerateObject())
 208                    {
 0209                        if (property0.NameEquals("domain"))
 210                        {
 0211                            domain = property0.Value.GetObject();
 0212                            continue;
 213                        }
 0214                        if (property0.NameEquals("accessToken"))
 215                        {
 0216                            accessToken = SecretBase.DeserializeSecretBase(property0.Value);
 0217                            continue;
 218                        }
 0219                        if (property0.NameEquals("existingClusterId"))
 220                        {
 0221                            existingClusterId = property0.Value.GetObject();
 0222                            continue;
 223                        }
 0224                        if (property0.NameEquals("instancePoolId"))
 225                        {
 0226                            instancePoolId = property0.Value.GetObject();
 0227                            continue;
 228                        }
 0229                        if (property0.NameEquals("newClusterVersion"))
 230                        {
 0231                            newClusterVersion = property0.Value.GetObject();
 0232                            continue;
 233                        }
 0234                        if (property0.NameEquals("newClusterNumOfWorker"))
 235                        {
 0236                            newClusterNumOfWorker = property0.Value.GetObject();
 0237                            continue;
 238                        }
 0239                        if (property0.NameEquals("newClusterNodeType"))
 240                        {
 0241                            newClusterNodeType = property0.Value.GetObject();
 0242                            continue;
 243                        }
 0244                        if (property0.NameEquals("newClusterSparkConf"))
 245                        {
 0246                            Dictionary<string, object> dictionary = new Dictionary<string, object>();
 0247                            foreach (var property1 in property0.Value.EnumerateObject())
 248                            {
 0249                                dictionary.Add(property1.Name, property1.Value.GetObject());
 250                            }
 0251                            newClusterSparkConf = dictionary;
 0252                            continue;
 253                        }
 0254                        if (property0.NameEquals("newClusterSparkEnvVars"))
 255                        {
 0256                            Dictionary<string, object> dictionary = new Dictionary<string, object>();
 0257                            foreach (var property1 in property0.Value.EnumerateObject())
 258                            {
 0259                                dictionary.Add(property1.Name, property1.Value.GetObject());
 260                            }
 0261                            newClusterSparkEnvVars = dictionary;
 0262                            continue;
 263                        }
 0264                        if (property0.NameEquals("newClusterCustomTags"))
 265                        {
 0266                            Dictionary<string, object> dictionary = new Dictionary<string, object>();
 0267                            foreach (var property1 in property0.Value.EnumerateObject())
 268                            {
 0269                                dictionary.Add(property1.Name, property1.Value.GetObject());
 270                            }
 0271                            newClusterCustomTags = dictionary;
 0272                            continue;
 273                        }
 0274                        if (property0.NameEquals("newClusterDriverNodeType"))
 275                        {
 0276                            newClusterDriverNodeType = property0.Value.GetObject();
 0277                            continue;
 278                        }
 0279                        if (property0.NameEquals("newClusterInitScripts"))
 280                        {
 0281                            newClusterInitScripts = property0.Value.GetObject();
 0282                            continue;
 283                        }
 0284                        if (property0.NameEquals("newClusterEnableElasticDisk"))
 285                        {
 0286                            newClusterEnableElasticDisk = property0.Value.GetObject();
 0287                            continue;
 288                        }
 0289                        if (property0.NameEquals("encryptedCredential"))
 290                        {
 0291                            encryptedCredential = property0.Value.GetObject();
 292                            continue;
 293                        }
 294                    }
 295                    continue;
 296                }
 0297                additionalPropertiesDictionary ??= new Dictionary<string, object>();
 0298                additionalPropertiesDictionary.Add(property.Name, property.Value.GetObject());
 299            }
 0300            additionalProperties = additionalPropertiesDictionary;
 0301            return new AzureDatabricksLinkedService(type, connectVia.Value, description.Value, Optional.ToDictionary(par
 302        }
 303    }
 304}