< Summary

Class:Azure.Analytics.Synapse.Spark.SparkBatchRestClient
Assembly:Azure.Analytics.Synapse.Spark
File(s):C:\Git\azure-sdk-for-net\sdk\synapse\Azure.Analytics.Synapse.Spark\src\Generated\SparkBatchRestClient.cs
Covered lines:67
Uncovered lines:74
Coverable lines:141
Total lines:332
Line coverage:47.5% (67 of 141)
Covered branches:11
Total branches:36
Branch coverage:30.5% (11 of 36)

Metrics

MethodCyclomatic complexity Line coverage Branch coverage
.ctor(...)-76.92%50%
CreateGetSparkBatchJobsRequest(...)-83.33%50%
GetSparkBatchJobsAsync()-87.5%50%
GetSparkBatchJobs(...)-87.5%50%
CreateCreateSparkBatchJobRequest(...)-0%0%
CreateSparkBatchJobAsync()-0%0%
CreateSparkBatchJob(...)-0%0%
CreateGetSparkBatchJobRequest(...)-93.33%50%
GetSparkBatchJobAsync()-87.5%50%
GetSparkBatchJob(...)-87.5%50%
CreateCancelSparkBatchJobRequest(...)-0%100%
CancelSparkBatchJobAsync()-0%0%
CancelSparkBatchJob(...)-0%0%

File(s)

C:\Git\azure-sdk-for-net\sdk\synapse\Azure.Analytics.Synapse.Spark\src\Generated\SparkBatchRestClient.cs

#LineLine coverage
 1// Copyright (c) Microsoft Corporation. All rights reserved.
 2// Licensed under the MIT License.
 3
 4// <auto-generated/>
 5
 6#nullable disable
 7
 8using System;
 9using System.Text.Json;
 10using System.Threading;
 11using System.Threading.Tasks;
 12using Azure;
 13using Azure.Analytics.Synapse.Spark.Models;
 14using Azure.Core;
 15using Azure.Core.Pipeline;
 16
 17namespace Azure.Analytics.Synapse.Spark
 18{
 19    internal partial class SparkBatchRestClient
 20    {
 21        private string endpoint;
 22        private string sparkPoolName;
 23        private string livyApiVersion;
 24        private ClientDiagnostics _clientDiagnostics;
 25        private HttpPipeline _pipeline;
 26
 27        /// <summary> Initializes a new instance of SparkBatchRestClient. </summary>
 28        /// <param name="clientDiagnostics"> The handler for diagnostic messaging in the client. </param>
 29        /// <param name="pipeline"> The HTTP pipeline for sending and receiving REST requests and responses. </param>
 30        /// <param name="endpoint"> The workspace development endpoint, for example https://myworkspace.dev.azuresynapse
 31        /// <param name="sparkPoolName"> Name of the spark pool. </param>
 32        /// <param name="livyApiVersion"> Valid api-version for the request. </param>
 33        /// <exception cref="ArgumentNullException"> <paramref name="endpoint"/>, <paramref name="sparkPoolName"/>, or <
 834        public SparkBatchRestClient(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, string endpoint, string 
 35        {
 836            if (endpoint == null)
 37            {
 038                throw new ArgumentNullException(nameof(endpoint));
 39            }
 840            if (sparkPoolName == null)
 41            {
 042                throw new ArgumentNullException(nameof(sparkPoolName));
 43            }
 844            if (livyApiVersion == null)
 45            {
 046                throw new ArgumentNullException(nameof(livyApiVersion));
 47            }
 48
 849            this.endpoint = endpoint;
 850            this.sparkPoolName = sparkPoolName;
 851            this.livyApiVersion = livyApiVersion;
 852            _clientDiagnostics = clientDiagnostics;
 853            _pipeline = pipeline;
 854        }
 55
 56        internal HttpMessage CreateGetSparkBatchJobsRequest(int? @from, int? size, bool? detailed)
 57        {
 458            var message = _pipeline.CreateMessage();
 459            var request = message.Request;
 460            request.Method = RequestMethod.Get;
 461            var uri = new RawRequestUriBuilder();
 462            uri.AppendRaw(endpoint, false);
 463            uri.AppendRaw("/livyApi/versions/", false);
 464            uri.AppendRaw(livyApiVersion, false);
 465            uri.AppendRaw("/sparkPools/", false);
 466            uri.AppendRaw(sparkPoolName, false);
 467            uri.AppendPath("/batches", false);
 468            if (@from != null)
 69            {
 070                uri.AppendQuery("from", @from.Value, true);
 71            }
 472            if (size != null)
 73            {
 074                uri.AppendQuery("size", size.Value, true);
 75            }
 476            if (detailed != null)
 77            {
 078                uri.AppendQuery("detailed", detailed.Value, true);
 79            }
 480            request.Uri = uri;
 481            return message;
 82        }
 83
 84        /// <summary> List all spark batch jobs which are running under a particular spark pool. </summary>
 85        /// <param name="from"> Optional param specifying which index the list should begin from. </param>
 86        /// <param name="size">
 87        /// Optional param specifying the size of the returned list.
 88        ///
 89        ///             By default it is 20 and that is the maximum.
 90        /// </param>
 91        /// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain l
 92        /// <param name="cancellationToken"> The cancellation token to use. </param>
 93        public async Task<Response<SparkBatchJobCollection>> GetSparkBatchJobsAsync(int? @from = null, int? size = null,
 94        {
 295            using var message = CreateGetSparkBatchJobsRequest(@from, size, detailed);
 296            await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false);
 297            switch (message.Response.Status)
 98            {
 99                case 200:
 100                    {
 101                        SparkBatchJobCollection value = default;
 2102                        using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, default, canc
 2103                        value = SparkBatchJobCollection.DeserializeSparkBatchJobCollection(document.RootElement);
 2104                        return Response.FromValue(value, message.Response);
 105                    }
 106                default:
 0107                    throw await _clientDiagnostics.CreateRequestFailedExceptionAsync(message.Response).ConfigureAwait(fa
 108            }
 2109        }
 110
 111        /// <summary> List all spark batch jobs which are running under a particular spark pool. </summary>
 112        /// <param name="from"> Optional param specifying which index the list should begin from. </param>
 113        /// <param name="size">
 114        /// Optional param specifying the size of the returned list.
 115        ///
 116        ///             By default it is 20 and that is the maximum.
 117        /// </param>
 118        /// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain l
 119        /// <param name="cancellationToken"> The cancellation token to use. </param>
 120        public Response<SparkBatchJobCollection> GetSparkBatchJobs(int? @from = null, int? size = null, bool? detailed =
 121        {
 2122            using var message = CreateGetSparkBatchJobsRequest(@from, size, detailed);
 2123            _pipeline.Send(message, cancellationToken);
 2124            switch (message.Response.Status)
 125            {
 126                case 200:
 127                    {
 128                        SparkBatchJobCollection value = default;
 2129                        using var document = JsonDocument.Parse(message.Response.ContentStream);
 2130                        value = SparkBatchJobCollection.DeserializeSparkBatchJobCollection(document.RootElement);
 2131                        return Response.FromValue(value, message.Response);
 132                    }
 133                default:
 0134                    throw _clientDiagnostics.CreateRequestFailedException(message.Response);
 135            }
 2136        }
 137
 138        internal HttpMessage CreateCreateSparkBatchJobRequest(SparkBatchJobOptions sparkBatchJobOptions, bool? detailed)
 139        {
 0140            var message = _pipeline.CreateMessage();
 0141            var request = message.Request;
 0142            request.Method = RequestMethod.Post;
 0143            var uri = new RawRequestUriBuilder();
 0144            uri.AppendRaw(endpoint, false);
 0145            uri.AppendRaw("/livyApi/versions/", false);
 0146            uri.AppendRaw(livyApiVersion, false);
 0147            uri.AppendRaw("/sparkPools/", false);
 0148            uri.AppendRaw(sparkPoolName, false);
 0149            uri.AppendPath("/batches", false);
 0150            if (detailed != null)
 151            {
 0152                uri.AppendQuery("detailed", detailed.Value, true);
 153            }
 0154            request.Uri = uri;
 0155            request.Headers.Add("Content-Type", "application/json");
 0156            var content = new Utf8JsonRequestContent();
 0157            content.JsonWriter.WriteObjectValue(sparkBatchJobOptions);
 0158            request.Content = content;
 0159            return message;
 160        }
 161
 162        /// <summary> Create new spark batch job. </summary>
 163        /// <param name="sparkBatchJobOptions"> Livy compatible batch job request payload. </param>
 164        /// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain l
 165        /// <param name="cancellationToken"> The cancellation token to use. </param>
 166        /// <exception cref="ArgumentNullException"> <paramref name="sparkBatchJobOptions"/> is null. </exception>
 167        public async Task<Response<SparkBatchJob>> CreateSparkBatchJobAsync(SparkBatchJobOptions sparkBatchJobOptions, b
 168        {
 0169            if (sparkBatchJobOptions == null)
 170            {
 0171                throw new ArgumentNullException(nameof(sparkBatchJobOptions));
 172            }
 173
 0174            using var message = CreateCreateSparkBatchJobRequest(sparkBatchJobOptions, detailed);
 0175            await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false);
 0176            switch (message.Response.Status)
 177            {
 178                case 200:
 179                    {
 180                        SparkBatchJob value = default;
 0181                        using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, default, canc
 0182                        value = SparkBatchJob.DeserializeSparkBatchJob(document.RootElement);
 0183                        return Response.FromValue(value, message.Response);
 184                    }
 185                default:
 0186                    throw await _clientDiagnostics.CreateRequestFailedExceptionAsync(message.Response).ConfigureAwait(fa
 187            }
 0188        }
 189
 190        /// <summary> Create new spark batch job. </summary>
 191        /// <param name="sparkBatchJobOptions"> Livy compatible batch job request payload. </param>
 192        /// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain l
 193        /// <param name="cancellationToken"> The cancellation token to use. </param>
 194        /// <exception cref="ArgumentNullException"> <paramref name="sparkBatchJobOptions"/> is null. </exception>
 195        public Response<SparkBatchJob> CreateSparkBatchJob(SparkBatchJobOptions sparkBatchJobOptions, bool? detailed = n
 196        {
 0197            if (sparkBatchJobOptions == null)
 198            {
 0199                throw new ArgumentNullException(nameof(sparkBatchJobOptions));
 200            }
 201
 0202            using var message = CreateCreateSparkBatchJobRequest(sparkBatchJobOptions, detailed);
 0203            _pipeline.Send(message, cancellationToken);
 0204            switch (message.Response.Status)
 205            {
 206                case 200:
 207                    {
 208                        SparkBatchJob value = default;
 0209                        using var document = JsonDocument.Parse(message.Response.ContentStream);
 0210                        value = SparkBatchJob.DeserializeSparkBatchJob(document.RootElement);
 0211                        return Response.FromValue(value, message.Response);
 212                    }
 213                default:
 0214                    throw _clientDiagnostics.CreateRequestFailedException(message.Response);
 215            }
 0216        }
 217
 218        internal HttpMessage CreateGetSparkBatchJobRequest(int batchId, bool? detailed)
 219        {
 80220            var message = _pipeline.CreateMessage();
 80221            var request = message.Request;
 80222            request.Method = RequestMethod.Get;
 80223            var uri = new RawRequestUriBuilder();
 80224            uri.AppendRaw(endpoint, false);
 80225            uri.AppendRaw("/livyApi/versions/", false);
 80226            uri.AppendRaw(livyApiVersion, false);
 80227            uri.AppendRaw("/sparkPools/", false);
 80228            uri.AppendRaw(sparkPoolName, false);
 80229            uri.AppendPath("/batches/", false);
 80230            uri.AppendPath(batchId, true);
 80231            if (detailed != null)
 232            {
 0233                uri.AppendQuery("detailed", detailed.Value, true);
 234            }
 80235            request.Uri = uri;
 80236            return message;
 237        }
 238
 239        /// <summary> Gets a single spark batch job. </summary>
 240        /// <param name="batchId"> Identifier for the batch job. </param>
 241        /// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain l
 242        /// <param name="cancellationToken"> The cancellation token to use. </param>
 243        public async Task<Response<SparkBatchJob>> GetSparkBatchJobAsync(int batchId, bool? detailed = null, Cancellatio
 244        {
 40245            using var message = CreateGetSparkBatchJobRequest(batchId, detailed);
 40246            await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false);
 40247            switch (message.Response.Status)
 248            {
 249                case 200:
 250                    {
 251                        SparkBatchJob value = default;
 40252                        using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, default, canc
 40253                        value = SparkBatchJob.DeserializeSparkBatchJob(document.RootElement);
 40254                        return Response.FromValue(value, message.Response);
 255                    }
 256                default:
 0257                    throw await _clientDiagnostics.CreateRequestFailedExceptionAsync(message.Response).ConfigureAwait(fa
 258            }
 40259        }
 260
 261        /// <summary> Gets a single spark batch job. </summary>
 262        /// <param name="batchId"> Identifier for the batch job. </param>
 263        /// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain l
 264        /// <param name="cancellationToken"> The cancellation token to use. </param>
 265        public Response<SparkBatchJob> GetSparkBatchJob(int batchId, bool? detailed = null, CancellationToken cancellati
 266        {
 40267            using var message = CreateGetSparkBatchJobRequest(batchId, detailed);
 40268            _pipeline.Send(message, cancellationToken);
 40269            switch (message.Response.Status)
 270            {
 271                case 200:
 272                    {
 273                        SparkBatchJob value = default;
 40274                        using var document = JsonDocument.Parse(message.Response.ContentStream);
 40275                        value = SparkBatchJob.DeserializeSparkBatchJob(document.RootElement);
 40276                        return Response.FromValue(value, message.Response);
 277                    }
 278                default:
 0279                    throw _clientDiagnostics.CreateRequestFailedException(message.Response);
 280            }
 40281        }
 282
 283        internal HttpMessage CreateCancelSparkBatchJobRequest(int batchId)
 284        {
 0285            var message = _pipeline.CreateMessage();
 0286            var request = message.Request;
 0287            request.Method = RequestMethod.Delete;
 0288            var uri = new RawRequestUriBuilder();
 0289            uri.AppendRaw(endpoint, false);
 0290            uri.AppendRaw("/livyApi/versions/", false);
 0291            uri.AppendRaw(livyApiVersion, false);
 0292            uri.AppendRaw("/sparkPools/", false);
 0293            uri.AppendRaw(sparkPoolName, false);
 0294            uri.AppendPath("/batches/", false);
 0295            uri.AppendPath(batchId, true);
 0296            request.Uri = uri;
 0297            return message;
 298        }
 299
 300        /// <summary> Cancels a running spark batch job. </summary>
 301        /// <param name="batchId"> Identifier for the batch job. </param>
 302        /// <param name="cancellationToken"> The cancellation token to use. </param>
 303        public async Task<Response> CancelSparkBatchJobAsync(int batchId, CancellationToken cancellationToken = default)
 304        {
 0305            using var message = CreateCancelSparkBatchJobRequest(batchId);
 0306            await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false);
 0307            switch (message.Response.Status)
 308            {
 309                case 200:
 0310                    return message.Response;
 311                default:
 0312                    throw await _clientDiagnostics.CreateRequestFailedExceptionAsync(message.Response).ConfigureAwait(fa
 313            }
 0314        }
 315
 316        /// <summary> Cancels a running spark batch job. </summary>
 317        /// <param name="batchId"> Identifier for the batch job. </param>
 318        /// <param name="cancellationToken"> The cancellation token to use. </param>
 319        public Response CancelSparkBatchJob(int batchId, CancellationToken cancellationToken = default)
 320        {
 0321            using var message = CreateCancelSparkBatchJobRequest(batchId);
 0322            _pipeline.Send(message, cancellationToken);
 0323            switch (message.Response.Status)
 324            {
 325                case 200:
 0326                    return message.Response;
 327                default:
 0328                    throw _clientDiagnostics.CreateRequestFailedException(message.Response);
 329            }
 0330        }
 331    }
 332}