# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
from azure.core import PipelineClient
from msrest import Deserializer, Serializer
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any
from azure.core.credentials import TokenCredential
from ._configuration import SparkClientConfiguration
from .operations import SparkBatchOperations
from .operations import SparkSessionOperations
from . import models
[docs]class SparkClient(object):
"""SparkClient.
:ivar spark_batch: SparkBatchOperations operations
:vartype spark_batch: azure.synapse.spark.operations.SparkBatchOperations
:ivar spark_session: SparkSessionOperations operations
:vartype spark_session: azure.synapse.spark.operations.SparkSessionOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param endpoint: The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net.
:type endpoint: str
:param spark_pool_name: Name of the spark pool.
:type spark_pool_name: str
:param livy_api_version: Valid api-version for the request.
:type livy_api_version: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
"""
def __init__(
self,
credential, # type: "TokenCredential"
endpoint, # type: str
spark_pool_name, # type: str
livy_api_version="2019-11-01-preview", # type: str
**kwargs # type: Any
):
# type: (...) -> None
base_url = '{endpoint}/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}'
self._config = SparkClientConfiguration(credential, endpoint, spark_pool_name, livy_api_version, **kwargs)
self._client = PipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.spark_batch = SparkBatchOperations(
self._client, self._config, self._serialize, self._deserialize)
self.spark_session = SparkSessionOperations(
self._client, self._config, self._serialize, self._deserialize)
[docs] def close(self):
# type: () -> None
self._client.close()
def __enter__(self):
# type: () -> SparkClient
self._client.__enter__()
return self
def __exit__(self, *exc_details):
# type: (Any) -> None
self._client.__exit__(*exc_details)