< Summary

Class:Azure.Analytics.Synapse.Spark.Models.SparkBatchJob
Assembly:Azure.Analytics.Synapse.Spark
File(s):C:\Git\azure-sdk-for-net\sdk\synapse\Azure.Analytics.Synapse.Spark\src\Generated\Models\SparkBatchJob.cs
C:\Git\azure-sdk-for-net\sdk\synapse\Azure.Analytics.Synapse.Spark\src\Generated\Models\SparkBatchJob.Serialization.cs
Covered lines:87
Uncovered lines:52
Coverable lines:139
Total lines:271
Line coverage:62.5% (87 of 139)
Covered branches:35
Total branches:52
Branch coverage:67.3% (35 of 52)

Metrics

MethodCyclomatic complexity Line coverage Branch coverage
.ctor(...)-0%100%
.ctor(...)-100%100%
get_LivyInfo()-0%100%
get_Name()-100%100%
get_WorkspaceName()-0%100%
get_SparkPoolName()-0%100%
get_SubmitterName()-0%100%
get_SubmitterId()-100%100%
get_ArtifactId()-100%100%
get_JobType()-0%100%
get_Result()-0%100%
get_Scheduler()-0%100%
get_Plugin()-0%100%
get_Errors()-0%100%
get_Tags()-0%100%
get_Id()-100%100%
get_AppId()-100%100%
get_AppInfo()-0%100%
get_State()-0%100%
get_LogLines()-0%100%
DeserializeSparkBatchJob(...)-65.96%67.31%

File(s)

C:\Git\azure-sdk-for-net\sdk\synapse\Azure.Analytics.Synapse.Spark\src\Generated\Models\SparkBatchJob.cs

#LineLine coverage
 1// Copyright (c) Microsoft Corporation. All rights reserved.
 2// Licensed under the MIT License.
 3
 4// <auto-generated/>
 5
 6#nullable disable
 7
 8using System.Collections.Generic;
 9using Azure.Core;
 10
 11namespace Azure.Analytics.Synapse.Spark.Models
 12{
 13    /// <summary> The SparkBatchJob. </summary>
 14    public partial class SparkBatchJob
 15    {
 16        /// <summary> Initializes a new instance of SparkBatchJob. </summary>
 17        /// <param name="id"> The session Id. </param>
 018        internal SparkBatchJob(int id)
 19        {
 020            Errors = new ChangeTrackingList<SparkServiceError>();
 021            Tags = new ChangeTrackingDictionary<string, string>();
 022            Id = id;
 023            AppInfo = new ChangeTrackingDictionary<string, string>();
 024            LogLines = new ChangeTrackingList<string>();
 025        }
 26
 27        /// <summary> Initializes a new instance of SparkBatchJob. </summary>
 28        /// <param name="livyInfo"> . </param>
 29        /// <param name="name"> The batch name. </param>
 30        /// <param name="workspaceName"> The workspace name. </param>
 31        /// <param name="sparkPoolName"> The Spark pool name. </param>
 32        /// <param name="submitterName"> The submitter name. </param>
 33        /// <param name="submitterId"> The submitter identifier. </param>
 34        /// <param name="artifactId"> The artifact identifier. </param>
 35        /// <param name="jobType"> The job type. </param>
 36        /// <param name="result"> The Spark batch job result. </param>
 37        /// <param name="scheduler"> The scheduler information. </param>
 38        /// <param name="plugin"> The plugin information. </param>
 39        /// <param name="errors"> The error information. </param>
 40        /// <param name="tags"> The tags. </param>
 41        /// <param name="id"> The session Id. </param>
 42        /// <param name="appId"> The application id of this session. </param>
 43        /// <param name="appInfo"> The detailed application info. </param>
 44        /// <param name="state"> The batch state. </param>
 45        /// <param name="logLines"> The log lines. </param>
 16046        internal SparkBatchJob(SparkBatchJobState livyInfo, string name, string workspaceName, string sparkPoolName, str
 47        {
 16048            LivyInfo = livyInfo;
 16049            Name = name;
 16050            WorkspaceName = workspaceName;
 16051            SparkPoolName = sparkPoolName;
 16052            SubmitterName = submitterName;
 16053            SubmitterId = submitterId;
 16054            ArtifactId = artifactId;
 16055            JobType = jobType;
 16056            Result = result;
 16057            Scheduler = scheduler;
 16058            Plugin = plugin;
 16059            Errors = errors;
 16060            Tags = tags;
 16061            Id = id;
 16062            AppId = appId;
 16063            AppInfo = appInfo;
 16064            State = state;
 16065            LogLines = logLines;
 16066        }
 67
 068        public SparkBatchJobState LivyInfo { get; }
 69        /// <summary> The batch name. </summary>
 16070        public string Name { get; }
 71        /// <summary> The workspace name. </summary>
 072        public string WorkspaceName { get; }
 73        /// <summary> The Spark pool name. </summary>
 074        public string SparkPoolName { get; }
 75        /// <summary> The submitter name. </summary>
 076        public string SubmitterName { get; }
 77        /// <summary> The submitter identifier. </summary>
 16078        public string SubmitterId { get; }
 79        /// <summary> The artifact identifier. </summary>
 16080        public string ArtifactId { get; }
 81        /// <summary> The job type. </summary>
 082        public SparkJobType? JobType { get; }
 83        /// <summary> The Spark batch job result. </summary>
 084        public SparkBatchJobResultType? Result { get; }
 85        /// <summary> The scheduler information. </summary>
 086        public SparkScheduler Scheduler { get; }
 87        /// <summary> The plugin information. </summary>
 088        public SparkServicePlugin Plugin { get; }
 89        /// <summary> The error information. </summary>
 090        public IReadOnlyList<SparkServiceError> Errors { get; }
 91        /// <summary> The tags. </summary>
 092        public IReadOnlyDictionary<string, string> Tags { get; }
 93        /// <summary> The session Id. </summary>
 24094        public int Id { get; }
 95        /// <summary> The application id of this session. </summary>
 16096        public string AppId { get; }
 97        /// <summary> The detailed application info. </summary>
 098        public IReadOnlyDictionary<string, string> AppInfo { get; }
 99        /// <summary> The batch state. </summary>
 0100        public string State { get; }
 101        /// <summary> The log lines. </summary>
 0102        public IReadOnlyList<string> LogLines { get; }
 103    }
 104}

C:\Git\azure-sdk-for-net\sdk\synapse\Azure.Analytics.Synapse.Spark\src\Generated\Models\SparkBatchJob.Serialization.cs

#LineLine coverage
 1// Copyright (c) Microsoft Corporation. All rights reserved.
 2// Licensed under the MIT License.
 3
 4// <auto-generated/>
 5
 6#nullable disable
 7
 8using System.Collections.Generic;
 9using System.Text.Json;
 10using Azure.Core;
 11
 12namespace Azure.Analytics.Synapse.Spark.Models
 13{
 14    public partial class SparkBatchJob
 15    {
 16        internal static SparkBatchJob DeserializeSparkBatchJob(JsonElement element)
 17        {
 16018            Optional<SparkBatchJobState> livyInfo = default;
 16019            Optional<string> name = default;
 16020            Optional<string> workspaceName = default;
 16021            Optional<string> sparkPoolName = default;
 16022            Optional<string> submitterName = default;
 16023            Optional<string> submitterId = default;
 16024            Optional<string> artifactId = default;
 16025            Optional<SparkJobType> jobType = default;
 16026            Optional<SparkBatchJobResultType> result = default;
 16027            Optional<SparkScheduler> schedulerInfo = default;
 16028            Optional<SparkServicePlugin> pluginInfo = default;
 16029            Optional<IReadOnlyList<SparkServiceError>> errorInfo = default;
 16030            Optional<IReadOnlyDictionary<string, string>> tags = default;
 16031            int id = default;
 16032            Optional<string> appId = default;
 16033            Optional<IReadOnlyDictionary<string, string>> appInfo = default;
 16034            Optional<string> state = default;
 16035            Optional<IReadOnlyList<string>> log = default;
 192036            foreach (var property in element.EnumerateObject())
 37            {
 80038                if (property.NameEquals("livyInfo"))
 39                {
 040                    livyInfo = SparkBatchJobState.DeserializeSparkBatchJobState(property.Value);
 041                    continue;
 42                }
 80043                if (property.NameEquals("name"))
 44                {
 045                    name = property.Value.GetString();
 046                    continue;
 47                }
 80048                if (property.NameEquals("workspaceName"))
 49                {
 050                    workspaceName = property.Value.GetString();
 051                    continue;
 52                }
 80053                if (property.NameEquals("sparkPoolName"))
 54                {
 055                    sparkPoolName = property.Value.GetString();
 056                    continue;
 57                }
 80058                if (property.NameEquals("submitterName"))
 59                {
 060                    submitterName = property.Value.GetString();
 061                    continue;
 62                }
 80063                if (property.NameEquals("submitterId"))
 64                {
 065                    submitterId = property.Value.GetString();
 066                    continue;
 67                }
 80068                if (property.NameEquals("artifactId"))
 69                {
 070                    artifactId = property.Value.GetString();
 071                    continue;
 72                }
 80073                if (property.NameEquals("jobType"))
 74                {
 075                    jobType = new SparkJobType(property.Value.GetString());
 076                    continue;
 77                }
 80078                if (property.NameEquals("result"))
 79                {
 080                    result = new SparkBatchJobResultType(property.Value.GetString());
 081                    continue;
 82                }
 80083                if (property.NameEquals("schedulerInfo"))
 84                {
 085                    schedulerInfo = SparkScheduler.DeserializeSparkScheduler(property.Value);
 086                    continue;
 87                }
 80088                if (property.NameEquals("pluginInfo"))
 89                {
 090                    pluginInfo = SparkServicePlugin.DeserializeSparkServicePlugin(property.Value);
 091                    continue;
 92                }
 80093                if (property.NameEquals("errorInfo"))
 94                {
 095                    List<SparkServiceError> array = new List<SparkServiceError>();
 096                    foreach (var item in property.Value.EnumerateArray())
 97                    {
 098                        array.Add(SparkServiceError.DeserializeSparkServiceError(item));
 99                    }
 0100                    errorInfo = array;
 0101                    continue;
 102                }
 800103                if (property.NameEquals("tags"))
 104                {
 0105                    Dictionary<string, string> dictionary = new Dictionary<string, string>();
 0106                    foreach (var property0 in property.Value.EnumerateObject())
 107                    {
 0108                        dictionary.Add(property0.Name, property0.Value.GetString());
 109                    }
 0110                    tags = dictionary;
 0111                    continue;
 112                }
 800113                if (property.NameEquals("id"))
 114                {
 160115                    id = property.Value.GetInt32();
 160116                    continue;
 117                }
 640118                if (property.NameEquals("appId"))
 119                {
 160120                    if (property.Value.ValueKind == JsonValueKind.Null)
 121                    {
 32122                        appId = null;
 32123                        continue;
 124                    }
 128125                    appId = property.Value.GetString();
 128126                    continue;
 127                }
 480128                if (property.NameEquals("appInfo"))
 129                {
 160130                    if (property.Value.ValueKind == JsonValueKind.Null)
 131                    {
 16132                        appInfo = null;
 16133                        continue;
 134                    }
 144135                    Dictionary<string, string> dictionary = new Dictionary<string, string>();
 864136                    foreach (var property0 in property.Value.EnumerateObject())
 137                    {
 288138                        dictionary.Add(property0.Name, property0.Value.GetString());
 139                    }
 144140                    appInfo = dictionary;
 144141                    continue;
 142                }
 320143                if (property.NameEquals("state"))
 144                {
 160145                    state = property.Value.GetString();
 160146                    continue;
 147                }
 160148                if (property.NameEquals("log"))
 149                {
 160150                    if (property.Value.ValueKind == JsonValueKind.Null)
 151                    {
 16152                        log = null;
 16153                        continue;
 154                    }
 144155                    List<string> array = new List<string>();
 3056156                    foreach (var item in property.Value.EnumerateArray())
 157                    {
 1384158                        array.Add(item.GetString());
 159                    }
 144160                    log = array;
 161                    continue;
 162                }
 163            }
 160164            return new SparkBatchJob(livyInfo.Value, name.Value, workspaceName.Value, sparkPoolName.Value, submitterName
 165        }
 166    }
 167}