| | 1 | | // Copyright (c) Microsoft Corporation. All rights reserved. |
| | 2 | | // Licensed under the MIT License. |
| | 3 | |
|
| | 4 | | // <auto-generated/> |
| | 5 | |
|
| | 6 | | #nullable disable |
| | 7 | |
|
| | 8 | | using System; |
| | 9 | | using System.Text.Json; |
| | 10 | | using System.Threading; |
| | 11 | | using System.Threading.Tasks; |
| | 12 | | using Azure; |
| | 13 | | using Azure.Analytics.Synapse.Spark.Models; |
| | 14 | | using Azure.Core; |
| | 15 | | using Azure.Core.Pipeline; |
| | 16 | |
|
| | 17 | | namespace Azure.Analytics.Synapse.Spark |
| | 18 | | { |
| | 19 | | internal partial class SparkBatchRestClient |
| | 20 | | { |
| | 21 | | private string endpoint; |
| | 22 | | private string sparkPoolName; |
| | 23 | | private string livyApiVersion; |
| | 24 | | private ClientDiagnostics _clientDiagnostics; |
| | 25 | | private HttpPipeline _pipeline; |
| | 26 | |
|
| | 27 | | /// <summary> Initializes a new instance of SparkBatchRestClient. </summary> |
| | 28 | | /// <param name="clientDiagnostics"> The handler for diagnostic messaging in the client. </param> |
| | 29 | | /// <param name="pipeline"> The HTTP pipeline for sending and receiving REST requests and responses. </param> |
| | 30 | | /// <param name="endpoint"> The workspace development endpoint, for example https://myworkspace.dev.azuresynapse |
| | 31 | | /// <param name="sparkPoolName"> Name of the spark pool. </param> |
| | 32 | | /// <param name="livyApiVersion"> Valid api-version for the request. </param> |
| | 33 | | /// <exception cref="ArgumentNullException"> <paramref name="endpoint"/>, <paramref name="sparkPoolName"/>, or < |
| 8 | 34 | | public SparkBatchRestClient(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, string endpoint, string |
| | 35 | | { |
| 8 | 36 | | if (endpoint == null) |
| | 37 | | { |
| 0 | 38 | | throw new ArgumentNullException(nameof(endpoint)); |
| | 39 | | } |
| 8 | 40 | | if (sparkPoolName == null) |
| | 41 | | { |
| 0 | 42 | | throw new ArgumentNullException(nameof(sparkPoolName)); |
| | 43 | | } |
| 8 | 44 | | if (livyApiVersion == null) |
| | 45 | | { |
| 0 | 46 | | throw new ArgumentNullException(nameof(livyApiVersion)); |
| | 47 | | } |
| | 48 | |
|
| 8 | 49 | | this.endpoint = endpoint; |
| 8 | 50 | | this.sparkPoolName = sparkPoolName; |
| 8 | 51 | | this.livyApiVersion = livyApiVersion; |
| 8 | 52 | | _clientDiagnostics = clientDiagnostics; |
| 8 | 53 | | _pipeline = pipeline; |
| 8 | 54 | | } |
| | 55 | |
|
| | 56 | | internal HttpMessage CreateGetSparkBatchJobsRequest(int? @from, int? size, bool? detailed) |
| | 57 | | { |
| 4 | 58 | | var message = _pipeline.CreateMessage(); |
| 4 | 59 | | var request = message.Request; |
| 4 | 60 | | request.Method = RequestMethod.Get; |
| 4 | 61 | | var uri = new RawRequestUriBuilder(); |
| 4 | 62 | | uri.AppendRaw(endpoint, false); |
| 4 | 63 | | uri.AppendRaw("/livyApi/versions/", false); |
| 4 | 64 | | uri.AppendRaw(livyApiVersion, false); |
| 4 | 65 | | uri.AppendRaw("/sparkPools/", false); |
| 4 | 66 | | uri.AppendRaw(sparkPoolName, false); |
| 4 | 67 | | uri.AppendPath("/batches", false); |
| 4 | 68 | | if (@from != null) |
| | 69 | | { |
| 0 | 70 | | uri.AppendQuery("from", @from.Value, true); |
| | 71 | | } |
| 4 | 72 | | if (size != null) |
| | 73 | | { |
| 0 | 74 | | uri.AppendQuery("size", size.Value, true); |
| | 75 | | } |
| 4 | 76 | | if (detailed != null) |
| | 77 | | { |
| 0 | 78 | | uri.AppendQuery("detailed", detailed.Value, true); |
| | 79 | | } |
| 4 | 80 | | request.Uri = uri; |
| 4 | 81 | | return message; |
| | 82 | | } |
| | 83 | |
|
| | 84 | | /// <summary> List all spark batch jobs which are running under a particular spark pool. </summary> |
| | 85 | | /// <param name="from"> Optional param specifying which index the list should begin from. </param> |
| | 86 | | /// <param name="size"> |
| | 87 | | /// Optional param specifying the size of the returned list. |
| | 88 | | /// |
| | 89 | | /// By default it is 20 and that is the maximum. |
| | 90 | | /// </param> |
| | 91 | | /// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain l |
| | 92 | | /// <param name="cancellationToken"> The cancellation token to use. </param> |
| | 93 | | public async Task<Response<SparkBatchJobCollection>> GetSparkBatchJobsAsync(int? @from = null, int? size = null, |
| | 94 | | { |
| 2 | 95 | | using var message = CreateGetSparkBatchJobsRequest(@from, size, detailed); |
| 2 | 96 | | await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); |
| 2 | 97 | | switch (message.Response.Status) |
| | 98 | | { |
| | 99 | | case 200: |
| | 100 | | { |
| | 101 | | SparkBatchJobCollection value = default; |
| 2 | 102 | | using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, default, canc |
| 2 | 103 | | value = SparkBatchJobCollection.DeserializeSparkBatchJobCollection(document.RootElement); |
| 2 | 104 | | return Response.FromValue(value, message.Response); |
| | 105 | | } |
| | 106 | | default: |
| 0 | 107 | | throw await _clientDiagnostics.CreateRequestFailedExceptionAsync(message.Response).ConfigureAwait(fa |
| | 108 | | } |
| 2 | 109 | | } |
| | 110 | |
|
| | 111 | | /// <summary> List all spark batch jobs which are running under a particular spark pool. </summary> |
| | 112 | | /// <param name="from"> Optional param specifying which index the list should begin from. </param> |
| | 113 | | /// <param name="size"> |
| | 114 | | /// Optional param specifying the size of the returned list. |
| | 115 | | /// |
| | 116 | | /// By default it is 20 and that is the maximum. |
| | 117 | | /// </param> |
| | 118 | | /// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain l |
| | 119 | | /// <param name="cancellationToken"> The cancellation token to use. </param> |
| | 120 | | public Response<SparkBatchJobCollection> GetSparkBatchJobs(int? @from = null, int? size = null, bool? detailed = |
| | 121 | | { |
| 2 | 122 | | using var message = CreateGetSparkBatchJobsRequest(@from, size, detailed); |
| 2 | 123 | | _pipeline.Send(message, cancellationToken); |
| 2 | 124 | | switch (message.Response.Status) |
| | 125 | | { |
| | 126 | | case 200: |
| | 127 | | { |
| | 128 | | SparkBatchJobCollection value = default; |
| 2 | 129 | | using var document = JsonDocument.Parse(message.Response.ContentStream); |
| 2 | 130 | | value = SparkBatchJobCollection.DeserializeSparkBatchJobCollection(document.RootElement); |
| 2 | 131 | | return Response.FromValue(value, message.Response); |
| | 132 | | } |
| | 133 | | default: |
| 0 | 134 | | throw _clientDiagnostics.CreateRequestFailedException(message.Response); |
| | 135 | | } |
| 2 | 136 | | } |
| | 137 | |
|
| | 138 | | internal HttpMessage CreateCreateSparkBatchJobRequest(SparkBatchJobOptions sparkBatchJobOptions, bool? detailed) |
| | 139 | | { |
| 0 | 140 | | var message = _pipeline.CreateMessage(); |
| 0 | 141 | | var request = message.Request; |
| 0 | 142 | | request.Method = RequestMethod.Post; |
| 0 | 143 | | var uri = new RawRequestUriBuilder(); |
| 0 | 144 | | uri.AppendRaw(endpoint, false); |
| 0 | 145 | | uri.AppendRaw("/livyApi/versions/", false); |
| 0 | 146 | | uri.AppendRaw(livyApiVersion, false); |
| 0 | 147 | | uri.AppendRaw("/sparkPools/", false); |
| 0 | 148 | | uri.AppendRaw(sparkPoolName, false); |
| 0 | 149 | | uri.AppendPath("/batches", false); |
| 0 | 150 | | if (detailed != null) |
| | 151 | | { |
| 0 | 152 | | uri.AppendQuery("detailed", detailed.Value, true); |
| | 153 | | } |
| 0 | 154 | | request.Uri = uri; |
| 0 | 155 | | request.Headers.Add("Content-Type", "application/json"); |
| 0 | 156 | | var content = new Utf8JsonRequestContent(); |
| 0 | 157 | | content.JsonWriter.WriteObjectValue(sparkBatchJobOptions); |
| 0 | 158 | | request.Content = content; |
| 0 | 159 | | return message; |
| | 160 | | } |
| | 161 | |
|
| | 162 | | /// <summary> Create new spark batch job. </summary> |
| | 163 | | /// <param name="sparkBatchJobOptions"> Livy compatible batch job request payload. </param> |
| | 164 | | /// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain l |
| | 165 | | /// <param name="cancellationToken"> The cancellation token to use. </param> |
| | 166 | | /// <exception cref="ArgumentNullException"> <paramref name="sparkBatchJobOptions"/> is null. </exception> |
| | 167 | | public async Task<Response<SparkBatchJob>> CreateSparkBatchJobAsync(SparkBatchJobOptions sparkBatchJobOptions, b |
| | 168 | | { |
| 0 | 169 | | if (sparkBatchJobOptions == null) |
| | 170 | | { |
| 0 | 171 | | throw new ArgumentNullException(nameof(sparkBatchJobOptions)); |
| | 172 | | } |
| | 173 | |
|
| 0 | 174 | | using var message = CreateCreateSparkBatchJobRequest(sparkBatchJobOptions, detailed); |
| 0 | 175 | | await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); |
| 0 | 176 | | switch (message.Response.Status) |
| | 177 | | { |
| | 178 | | case 200: |
| | 179 | | { |
| | 180 | | SparkBatchJob value = default; |
| 0 | 181 | | using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, default, canc |
| 0 | 182 | | value = SparkBatchJob.DeserializeSparkBatchJob(document.RootElement); |
| 0 | 183 | | return Response.FromValue(value, message.Response); |
| | 184 | | } |
| | 185 | | default: |
| 0 | 186 | | throw await _clientDiagnostics.CreateRequestFailedExceptionAsync(message.Response).ConfigureAwait(fa |
| | 187 | | } |
| 0 | 188 | | } |
| | 189 | |
|
| | 190 | | /// <summary> Create new spark batch job. </summary> |
| | 191 | | /// <param name="sparkBatchJobOptions"> Livy compatible batch job request payload. </param> |
| | 192 | | /// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain l |
| | 193 | | /// <param name="cancellationToken"> The cancellation token to use. </param> |
| | 194 | | /// <exception cref="ArgumentNullException"> <paramref name="sparkBatchJobOptions"/> is null. </exception> |
| | 195 | | public Response<SparkBatchJob> CreateSparkBatchJob(SparkBatchJobOptions sparkBatchJobOptions, bool? detailed = n |
| | 196 | | { |
| 0 | 197 | | if (sparkBatchJobOptions == null) |
| | 198 | | { |
| 0 | 199 | | throw new ArgumentNullException(nameof(sparkBatchJobOptions)); |
| | 200 | | } |
| | 201 | |
|
| 0 | 202 | | using var message = CreateCreateSparkBatchJobRequest(sparkBatchJobOptions, detailed); |
| 0 | 203 | | _pipeline.Send(message, cancellationToken); |
| 0 | 204 | | switch (message.Response.Status) |
| | 205 | | { |
| | 206 | | case 200: |
| | 207 | | { |
| | 208 | | SparkBatchJob value = default; |
| 0 | 209 | | using var document = JsonDocument.Parse(message.Response.ContentStream); |
| 0 | 210 | | value = SparkBatchJob.DeserializeSparkBatchJob(document.RootElement); |
| 0 | 211 | | return Response.FromValue(value, message.Response); |
| | 212 | | } |
| | 213 | | default: |
| 0 | 214 | | throw _clientDiagnostics.CreateRequestFailedException(message.Response); |
| | 215 | | } |
| 0 | 216 | | } |
| | 217 | |
|
| | 218 | | internal HttpMessage CreateGetSparkBatchJobRequest(int batchId, bool? detailed) |
| | 219 | | { |
| 80 | 220 | | var message = _pipeline.CreateMessage(); |
| 80 | 221 | | var request = message.Request; |
| 80 | 222 | | request.Method = RequestMethod.Get; |
| 80 | 223 | | var uri = new RawRequestUriBuilder(); |
| 80 | 224 | | uri.AppendRaw(endpoint, false); |
| 80 | 225 | | uri.AppendRaw("/livyApi/versions/", false); |
| 80 | 226 | | uri.AppendRaw(livyApiVersion, false); |
| 80 | 227 | | uri.AppendRaw("/sparkPools/", false); |
| 80 | 228 | | uri.AppendRaw(sparkPoolName, false); |
| 80 | 229 | | uri.AppendPath("/batches/", false); |
| 80 | 230 | | uri.AppendPath(batchId, true); |
| 80 | 231 | | if (detailed != null) |
| | 232 | | { |
| 0 | 233 | | uri.AppendQuery("detailed", detailed.Value, true); |
| | 234 | | } |
| 80 | 235 | | request.Uri = uri; |
| 80 | 236 | | return message; |
| | 237 | | } |
| | 238 | |
|
| | 239 | | /// <summary> Gets a single spark batch job. </summary> |
| | 240 | | /// <param name="batchId"> Identifier for the batch job. </param> |
| | 241 | | /// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain l |
| | 242 | | /// <param name="cancellationToken"> The cancellation token to use. </param> |
| | 243 | | public async Task<Response<SparkBatchJob>> GetSparkBatchJobAsync(int batchId, bool? detailed = null, Cancellatio |
| | 244 | | { |
| 40 | 245 | | using var message = CreateGetSparkBatchJobRequest(batchId, detailed); |
| 40 | 246 | | await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); |
| 40 | 247 | | switch (message.Response.Status) |
| | 248 | | { |
| | 249 | | case 200: |
| | 250 | | { |
| | 251 | | SparkBatchJob value = default; |
| 40 | 252 | | using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, default, canc |
| 40 | 253 | | value = SparkBatchJob.DeserializeSparkBatchJob(document.RootElement); |
| 40 | 254 | | return Response.FromValue(value, message.Response); |
| | 255 | | } |
| | 256 | | default: |
| 0 | 257 | | throw await _clientDiagnostics.CreateRequestFailedExceptionAsync(message.Response).ConfigureAwait(fa |
| | 258 | | } |
| 40 | 259 | | } |
| | 260 | |
|
| | 261 | | /// <summary> Gets a single spark batch job. </summary> |
| | 262 | | /// <param name="batchId"> Identifier for the batch job. </param> |
| | 263 | | /// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain l |
| | 264 | | /// <param name="cancellationToken"> The cancellation token to use. </param> |
| | 265 | | public Response<SparkBatchJob> GetSparkBatchJob(int batchId, bool? detailed = null, CancellationToken cancellati |
| | 266 | | { |
| 40 | 267 | | using var message = CreateGetSparkBatchJobRequest(batchId, detailed); |
| 40 | 268 | | _pipeline.Send(message, cancellationToken); |
| 40 | 269 | | switch (message.Response.Status) |
| | 270 | | { |
| | 271 | | case 200: |
| | 272 | | { |
| | 273 | | SparkBatchJob value = default; |
| 40 | 274 | | using var document = JsonDocument.Parse(message.Response.ContentStream); |
| 40 | 275 | | value = SparkBatchJob.DeserializeSparkBatchJob(document.RootElement); |
| 40 | 276 | | return Response.FromValue(value, message.Response); |
| | 277 | | } |
| | 278 | | default: |
| 0 | 279 | | throw _clientDiagnostics.CreateRequestFailedException(message.Response); |
| | 280 | | } |
| 40 | 281 | | } |
| | 282 | |
|
| | 283 | | internal HttpMessage CreateCancelSparkBatchJobRequest(int batchId) |
| | 284 | | { |
| 0 | 285 | | var message = _pipeline.CreateMessage(); |
| 0 | 286 | | var request = message.Request; |
| 0 | 287 | | request.Method = RequestMethod.Delete; |
| 0 | 288 | | var uri = new RawRequestUriBuilder(); |
| 0 | 289 | | uri.AppendRaw(endpoint, false); |
| 0 | 290 | | uri.AppendRaw("/livyApi/versions/", false); |
| 0 | 291 | | uri.AppendRaw(livyApiVersion, false); |
| 0 | 292 | | uri.AppendRaw("/sparkPools/", false); |
| 0 | 293 | | uri.AppendRaw(sparkPoolName, false); |
| 0 | 294 | | uri.AppendPath("/batches/", false); |
| 0 | 295 | | uri.AppendPath(batchId, true); |
| 0 | 296 | | request.Uri = uri; |
| 0 | 297 | | return message; |
| | 298 | | } |
| | 299 | |
|
| | 300 | | /// <summary> Cancels a running spark batch job. </summary> |
| | 301 | | /// <param name="batchId"> Identifier for the batch job. </param> |
| | 302 | | /// <param name="cancellationToken"> The cancellation token to use. </param> |
| | 303 | | public async Task<Response> CancelSparkBatchJobAsync(int batchId, CancellationToken cancellationToken = default) |
| | 304 | | { |
| 0 | 305 | | using var message = CreateCancelSparkBatchJobRequest(batchId); |
| 0 | 306 | | await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); |
| 0 | 307 | | switch (message.Response.Status) |
| | 308 | | { |
| | 309 | | case 200: |
| 0 | 310 | | return message.Response; |
| | 311 | | default: |
| 0 | 312 | | throw await _clientDiagnostics.CreateRequestFailedExceptionAsync(message.Response).ConfigureAwait(fa |
| | 313 | | } |
| 0 | 314 | | } |
| | 315 | |
|
| | 316 | | /// <summary> Cancels a running spark batch job. </summary> |
| | 317 | | /// <param name="batchId"> Identifier for the batch job. </param> |
| | 318 | | /// <param name="cancellationToken"> The cancellation token to use. </param> |
| | 319 | | public Response CancelSparkBatchJob(int batchId, CancellationToken cancellationToken = default) |
| | 320 | | { |
| 0 | 321 | | using var message = CreateCancelSparkBatchJobRequest(batchId); |
| 0 | 322 | | _pipeline.Send(message, cancellationToken); |
| 0 | 323 | | switch (message.Response.Status) |
| | 324 | | { |
| | 325 | | case 200: |
| 0 | 326 | | return message.Response; |
| | 327 | | default: |
| 0 | 328 | | throw _clientDiagnostics.CreateRequestFailedException(message.Response); |
| | 329 | | } |
| 0 | 330 | | } |
| | 331 | | } |
| | 332 | | } |