< Summary

Class:Azure.Analytics.Synapse.Spark.SparkBatchClient
Assembly:Azure.Analytics.Synapse.Spark
File(s):C:\Git\azure-sdk-for-net\sdk\synapse\Azure.Analytics.Synapse.Spark\src\Customization\SparkBatchClient.cs
C:\Git\azure-sdk-for-net\sdk\synapse\Azure.Analytics.Synapse.Spark\src\Generated\SparkBatchClient.cs
Covered lines:34
Uncovered lines:44
Coverable lines:78
Total lines:232
Line coverage:43.5% (34 of 78)
Covered branches:0
Total branches:0

Metrics

MethodCyclomatic complexity Line coverage Branch coverage
.ctor(...)-0%100%
.ctor(...)-100%100%
.ctor(...)-0%100%
get_RestClient()-100%100%
.ctor()-100%100%
.ctor(...)-100%100%
.ctor(...)-100%100%
GetSparkBatchJobsAsync()-57.14%100%
GetSparkBatchJobs(...)-57.14%100%
CreateSparkBatchJobAsync()-0%100%
CreateSparkBatchJob(...)-0%100%
GetSparkBatchJobAsync()-57.14%100%
GetSparkBatchJob(...)-57.14%100%
CancelSparkBatchJobAsync()-0%100%
CancelSparkBatchJob(...)-0%100%

File(s)

C:\Git\azure-sdk-for-net\sdk\synapse\Azure.Analytics.Synapse.Spark\src\Customization\SparkBatchClient.cs

#LineLine coverage
 1// Copyright (c) Microsoft Corporation. All rights reserved.
 2// Licensed under the MIT License.
 3
 4using System;
 5using Azure.Core;
 6using Azure.Core.Pipeline;
 7
 8namespace Azure.Analytics.Synapse.Spark
 9{
 10    public partial class SparkBatchClient
 11    {
 12        /// <summary>
 13        /// Initializes a new instance of the <see cref="SparkBatchClient"/>.
 14        /// </summary>
 15        public SparkBatchClient(Uri endpoint, string sparkPoolName, TokenCredential credential)
 016            : this(endpoint, sparkPoolName, credential, SparkClientOptions.Default)
 17        {
 018        }
 19
 20        /// <summary>
 21        /// Initializes a new instance of the <see cref="SparkBatchClient"/>.
 22        /// </summary>
 23        public SparkBatchClient(Uri endpoint, string sparkPoolName, TokenCredential credential, SparkClientOptions optio
 424            : this(new ClientDiagnostics(options),
 425                  SynapseClientPipeline.Build(options, credential),
 426                  endpoint.ToString(),
 427                  sparkPoolName,
 428                  options.VersionString)
 29        {
 430        }
 31    }
 32}

C:\Git\azure-sdk-for-net\sdk\synapse\Azure.Analytics.Synapse.Spark\src\Generated\SparkBatchClient.cs

#LineLine coverage
 1// Copyright (c) Microsoft Corporation. All rights reserved.
 2// Licensed under the MIT License.
 3
 4// <auto-generated/>
 5
 6#nullable disable
 7
 8using System;
 9using System.Threading;
 10using System.Threading.Tasks;
 11using Azure;
 12using Azure.Analytics.Synapse.Spark.Models;
 13using Azure.Core.Pipeline;
 14
 15namespace Azure.Analytics.Synapse.Spark
 016{
 17    /// <summary> The SparkBatch service client. </summary>
 018    public partial class SparkBatchClient
 19    {
 20        private readonly ClientDiagnostics _clientDiagnostics;
 21        private readonly HttpPipeline _pipeline;
 8422        internal SparkBatchRestClient RestClient { get; }
 23        /// <summary> Initializes a new instance of SparkBatchClient for mocking. </summary>
 1224        protected SparkBatchClient()
 425        {
 1226        }
 427        /// <summary> Initializes a new instance of SparkBatchClient. </summary>
 428        /// <param name="clientDiagnostics"> The handler for diagnostic messaging in the client. </param>
 29        /// <param name="pipeline"> The HTTP pipeline for sending and receiving REST requests and responses. </param>
 430        /// <param name="endpoint"> The workspace development endpoint, for example https://myworkspace.dev.azuresynapse
 31        /// <param name="sparkPoolName"> Name of the spark pool. </param>
 32        /// <param name="livyApiVersion"> Valid api-version for the request. </param>
 833        internal SparkBatchClient(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, string endpoint, string sp
 34        {
 835            RestClient = new SparkBatchRestClient(clientDiagnostics, pipeline, endpoint, sparkPoolName, livyApiVersion);
 836            _clientDiagnostics = clientDiagnostics;
 837            _pipeline = pipeline;
 838        }
 39
 40        /// <summary> List all spark batch jobs which are running under a particular spark pool. </summary>
 41        /// <param name="from"> Optional param specifying which index the list should begin from. </param>
 42        /// <param name="size">
 43        /// Optional param specifying the size of the returned list.
 44        ///
 45        ///             By default it is 20 and that is the maximum.
 46        /// </param>
 47        /// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain l
 48        /// <param name="cancellationToken"> The cancellation token to use. </param>
 49        public virtual async Task<Response<SparkBatchJobCollection>> GetSparkBatchJobsAsync(int? @from = null, int? size
 50        {
 251            using var scope = _clientDiagnostics.CreateScope("SparkBatchClient.GetSparkBatchJobs");
 252            scope.Start();
 53            try
 54            {
 255                return await RestClient.GetSparkBatchJobsAsync(@from, size, detailed, cancellationToken).ConfigureAwait(
 56            }
 057            catch (Exception e)
 58            {
 059                scope.Failed(e);
 060                throw;
 61            }
 262        }
 63
 64        /// <summary> List all spark batch jobs which are running under a particular spark pool. </summary>
 65        /// <param name="from"> Optional param specifying which index the list should begin from. </param>
 66        /// <param name="size">
 67        /// Optional param specifying the size of the returned list.
 68        ///
 69        ///             By default it is 20 and that is the maximum.
 70        /// </param>
 71        /// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain l
 72        /// <param name="cancellationToken"> The cancellation token to use. </param>
 73        public virtual Response<SparkBatchJobCollection> GetSparkBatchJobs(int? @from = null, int? size = null, bool? de
 74        {
 275            using var scope = _clientDiagnostics.CreateScope("SparkBatchClient.GetSparkBatchJobs");
 276            scope.Start();
 77            try
 78            {
 279                return RestClient.GetSparkBatchJobs(@from, size, detailed, cancellationToken);
 80            }
 081            catch (Exception e)
 82            {
 083                scope.Failed(e);
 084                throw;
 85            }
 286        }
 87
 88        /// <summary> Create new spark batch job. </summary>
 89        /// <param name="sparkBatchJobOptions"> Livy compatible batch job request payload. </param>
 90        /// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain l
 91        /// <param name="cancellationToken"> The cancellation token to use. </param>
 92        public virtual async Task<Response<SparkBatchJob>> CreateSparkBatchJobAsync(SparkBatchJobOptions sparkBatchJobOp
 93        {
 094            using var scope = _clientDiagnostics.CreateScope("SparkBatchClient.CreateSparkBatchJob");
 095            scope.Start();
 96            try
 97            {
 098                return await RestClient.CreateSparkBatchJobAsync(sparkBatchJobOptions, detailed, cancellationToken).Conf
 99            }
 0100            catch (Exception e)
 101            {
 0102                scope.Failed(e);
 0103                throw;
 104            }
 0105        }
 106
 107        /// <summary> Create new spark batch job. </summary>
 108        /// <param name="sparkBatchJobOptions"> Livy compatible batch job request payload. </param>
 109        /// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain l
 110        /// <param name="cancellationToken"> The cancellation token to use. </param>
 111        public virtual Response<SparkBatchJob> CreateSparkBatchJob(SparkBatchJobOptions sparkBatchJobOptions, bool? deta
 112        {
 0113            using var scope = _clientDiagnostics.CreateScope("SparkBatchClient.CreateSparkBatchJob");
 0114            scope.Start();
 115            try
 116            {
 0117                return RestClient.CreateSparkBatchJob(sparkBatchJobOptions, detailed, cancellationToken);
 118            }
 0119            catch (Exception e)
 120            {
 0121                scope.Failed(e);
 0122                throw;
 123            }
 0124        }
 125
 126        /// <summary> Gets a single spark batch job. </summary>
 127        /// <param name="batchId"> Identifier for the batch job. </param>
 128        /// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain l
 129        /// <param name="cancellationToken"> The cancellation token to use. </param>
 130        public virtual async Task<Response<SparkBatchJob>> GetSparkBatchJobAsync(int batchId, bool? detailed = null, Can
 131        {
 40132            using var scope = _clientDiagnostics.CreateScope("SparkBatchClient.GetSparkBatchJob");
 40133            scope.Start();
 134            try
 135            {
 40136                return await RestClient.GetSparkBatchJobAsync(batchId, detailed, cancellationToken).ConfigureAwait(false
 137            }
 0138            catch (Exception e)
 139            {
 0140                scope.Failed(e);
 0141                throw;
 142            }
 40143        }
 144
 145        /// <summary> Gets a single spark batch job. </summary>
 146        /// <param name="batchId"> Identifier for the batch job. </param>
 147        /// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain l
 148        /// <param name="cancellationToken"> The cancellation token to use. </param>
 149        public virtual Response<SparkBatchJob> GetSparkBatchJob(int batchId, bool? detailed = null, CancellationToken ca
 150        {
 40151            using var scope = _clientDiagnostics.CreateScope("SparkBatchClient.GetSparkBatchJob");
 40152            scope.Start();
 153            try
 154            {
 40155                return RestClient.GetSparkBatchJob(batchId, detailed, cancellationToken);
 156            }
 0157            catch (Exception e)
 158            {
 0159                scope.Failed(e);
 0160                throw;
 161            }
 40162        }
 163
 164        /// <summary> Cancels a running spark batch job. </summary>
 165        /// <param name="batchId"> Identifier for the batch job. </param>
 166        /// <param name="cancellationToken"> The cancellation token to use. </param>
 167        public virtual async Task<Response> CancelSparkBatchJobAsync(int batchId, CancellationToken cancellationToken = 
 168        {
 0169            using var scope = _clientDiagnostics.CreateScope("SparkBatchClient.CancelSparkBatchJob");
 0170            scope.Start();
 171            try
 172            {
 0173                return await RestClient.CancelSparkBatchJobAsync(batchId, cancellationToken).ConfigureAwait(false);
 174            }
 0175            catch (Exception e)
 176            {
 0177                scope.Failed(e);
 0178                throw;
 179            }
 0180        }
 181
 182        /// <summary> Cancels a running spark batch job. </summary>
 183        /// <param name="batchId"> Identifier for the batch job. </param>
 184        /// <param name="cancellationToken"> The cancellation token to use. </param>
 185        public virtual Response CancelSparkBatchJob(int batchId, CancellationToken cancellationToken = default)
 186        {
 0187            using var scope = _clientDiagnostics.CreateScope("SparkBatchClient.CancelSparkBatchJob");
 0188            scope.Start();
 189            try
 190            {
 0191                return RestClient.CancelSparkBatchJob(batchId, cancellationToken);
 192            }
 0193            catch (Exception e)
 194            {
 0195                scope.Failed(e);
 0196                throw;
 197            }
 0198        }
 199    }
 200}