Source code for azure.synapse.spark.aio.operations._spark_session_operations

# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings

from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest

from ... import models as _models

T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]

[docs]class SparkSessionOperations: """SparkSessionOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.synapse.spark.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config
[docs] async def get_spark_sessions( self, from_parameter: Optional[int] = None, size: Optional[int] = None, detailed: Optional[bool] = None, **kwargs ) -> "_models.SparkSessionCollection": """List all spark sessions which are running under a particular spark pool. :param from_parameter: Optional param specifying which index the list should begin from. :type from_parameter: int :param size: Optional param specifying the size of the returned list. By default it is 20 and that is the maximum. :type size: int :param detailed: Optional query param specifying whether detailed response is returned beyond plain livy. :type detailed: bool :keyword callable cls: A custom type or function that will be passed the direct response :return: SparkSessionCollection, or the result of cls(response) :rtype: ~azure.synapse.spark.models.SparkSessionCollection :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SparkSessionCollection"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" # Construct URL url = self.get_spark_sessions.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), 'sparkPoolName': self._serialize.url("self._config.spark_pool_name", self._config.spark_pool_name, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if from_parameter is not None: query_parameters['from'] = self._serialize.query("from_parameter", from_parameter, 'int') if size is not None: query_parameters['size'] = self._serialize.query("size", size, 'int') if detailed is not None: query_parameters['detailed'] = self._serialize.query("detailed", detailed, 'bool') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) deserialized = self._deserialize('SparkSessionCollection', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
get_spark_sessions.metadata = {'url': '/sessions'} # type: ignore
[docs] async def create_spark_session( self, spark_session_options: "_models.SparkSessionOptions", detailed: Optional[bool] = None, **kwargs ) -> "_models.SparkSession": """Create new spark session. :param spark_session_options: Livy compatible batch job request payload. :type spark_session_options: ~azure.synapse.spark.models.SparkSessionOptions :param detailed: Optional query param specifying whether detailed response is returned beyond plain livy. :type detailed: bool :keyword callable cls: A custom type or function that will be passed the direct response :return: SparkSession, or the result of cls(response) :rtype: ~azure.synapse.spark.models.SparkSession :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SparkSession"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.create_spark_session.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), 'sparkPoolName': self._serialize.url("self._config.spark_pool_name", self._config.spark_pool_name, 'str', skip_quote=True), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if detailed is not None: query_parameters['detailed'] = self._serialize.query("detailed", detailed, 'bool') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(spark_session_options, 'SparkSessionOptions') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) deserialized = self._deserialize('SparkSession', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
create_spark_session.metadata = {'url': '/sessions'} # type: ignore
[docs] async def get_spark_session( self, session_id: int, detailed: Optional[bool] = None, **kwargs ) -> "_models.SparkSession": """Gets a single spark session. :param session_id: Identifier for the session. :type session_id: int :param detailed: Optional query param specifying whether detailed response is returned beyond plain livy. :type detailed: bool :keyword callable cls: A custom type or function that will be passed the direct response :return: SparkSession, or the result of cls(response) :rtype: ~azure.synapse.spark.models.SparkSession :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SparkSession"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" # Construct URL url = self.get_spark_session.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), 'sparkPoolName': self._serialize.url("self._config.spark_pool_name", self._config.spark_pool_name, 'str', skip_quote=True), 'sessionId': self._serialize.url("session_id", session_id, 'int'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if detailed is not None: query_parameters['detailed'] = self._serialize.query("detailed", detailed, 'bool') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) deserialized = self._deserialize('SparkSession', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
get_spark_session.metadata = {'url': '/sessions/{sessionId}'} # type: ignore
[docs] async def cancel_spark_session( self, session_id: int, **kwargs ) -> None: """Cancels a running spark session. :param session_id: Identifier for the session. :type session_id: int :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) # Construct URL url = self.cancel_spark_session.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), 'sparkPoolName': self._serialize.url("self._config.spark_pool_name", self._config.spark_pool_name, 'str', skip_quote=True), 'sessionId': self._serialize.url("session_id", session_id, 'int'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if cls: return cls(pipeline_response, None, {})
cancel_spark_session.metadata = {'url': '/sessions/{sessionId}'} # type: ignore
[docs] async def reset_spark_session_timeout( self, session_id: int, **kwargs ) -> None: """Sends a keep alive call to the current session to reset the session timeout. :param session_id: Identifier for the session. :type session_id: int :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) # Construct URL url = self.reset_spark_session_timeout.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), 'sparkPoolName': self._serialize.url("self._config.spark_pool_name", self._config.spark_pool_name, 'str', skip_quote=True), 'sessionId': self._serialize.url("session_id", session_id, 'int'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] request = self._client.put(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if cls: return cls(pipeline_response, None, {})
reset_spark_session_timeout.metadata = {'url': '/sessions/{sessionId}/reset-timeout'} # type: ignore
[docs] async def get_spark_statements( self, session_id: int, **kwargs ) -> "_models.SparkStatementCollection": """Gets a list of statements within a spark session. :param session_id: Identifier for the session. :type session_id: int :keyword callable cls: A custom type or function that will be passed the direct response :return: SparkStatementCollection, or the result of cls(response) :rtype: ~azure.synapse.spark.models.SparkStatementCollection :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SparkStatementCollection"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" # Construct URL url = self.get_spark_statements.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), 'sparkPoolName': self._serialize.url("self._config.spark_pool_name", self._config.spark_pool_name, 'str', skip_quote=True), 'sessionId': self._serialize.url("session_id", session_id, 'int'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) deserialized = self._deserialize('SparkStatementCollection', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
get_spark_statements.metadata = {'url': '/sessions/{sessionId}/statements'} # type: ignore
[docs] async def create_spark_statement( self, session_id: int, spark_statement_options: "_models.SparkStatementOptions", **kwargs ) -> "_models.SparkStatement": """Create statement within a spark session. :param session_id: Identifier for the session. :type session_id: int :param spark_statement_options: Livy compatible batch job request payload. :type spark_statement_options: ~azure.synapse.spark.models.SparkStatementOptions :keyword callable cls: A custom type or function that will be passed the direct response :return: SparkStatement, or the result of cls(response) :rtype: ~azure.synapse.spark.models.SparkStatement :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SparkStatement"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.create_spark_statement.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), 'sparkPoolName': self._serialize.url("self._config.spark_pool_name", self._config.spark_pool_name, 'str', skip_quote=True), 'sessionId': self._serialize.url("session_id", session_id, 'int'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(spark_statement_options, 'SparkStatementOptions') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) deserialized = self._deserialize('SparkStatement', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
create_spark_statement.metadata = {'url': '/sessions/{sessionId}/statements'} # type: ignore
[docs] async def get_spark_statement( self, session_id: int, statement_id: int, **kwargs ) -> "_models.SparkStatement": """Gets a single statement within a spark session. :param session_id: Identifier for the session. :type session_id: int :param statement_id: Identifier for the statement. :type statement_id: int :keyword callable cls: A custom type or function that will be passed the direct response :return: SparkStatement, or the result of cls(response) :rtype: ~azure.synapse.spark.models.SparkStatement :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SparkStatement"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" # Construct URL url = self.get_spark_statement.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), 'sparkPoolName': self._serialize.url("self._config.spark_pool_name", self._config.spark_pool_name, 'str', skip_quote=True), 'sessionId': self._serialize.url("session_id", session_id, 'int'), 'statementId': self._serialize.url("statement_id", statement_id, 'int'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) deserialized = self._deserialize('SparkStatement', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
get_spark_statement.metadata = {'url': '/sessions/{sessionId}/statements/{statementId}'} # type: ignore
[docs] async def cancel_spark_statement( self, session_id: int, statement_id: int, **kwargs ) -> "_models.SparkStatementCancellationResult": """Kill a statement within a session. :param session_id: Identifier for the session. :type session_id: int :param statement_id: Identifier for the statement. :type statement_id: int :keyword callable cls: A custom type or function that will be passed the direct response :return: SparkStatementCancellationResult, or the result of cls(response) :rtype: ~azure.synapse.spark.models.SparkStatementCancellationResult :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.SparkStatementCancellationResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" # Construct URL url = self.cancel_spark_statement.metadata['url'] # type: ignore path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), 'sparkPoolName': self._serialize.url("self._config.spark_pool_name", self._config.spark_pool_name, 'str', skip_quote=True), 'sessionId': self._serialize.url("session_id", session_id, 'int'), 'statementId': self._serialize.url("statement_id", statement_id, 'int'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) deserialized = self._deserialize('SparkStatementCancellationResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
cancel_spark_statement.metadata = {'url': '/sessions/{sessionId}/statements/{statementId}/cancel'} # type: ignore