# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import TYPE_CHECKING
from azure.core import PipelineClient
from msrest import Deserializer, Serializer
from . import models
from ._configuration import ArtifactsClientConfiguration
from .operations import BigDataPoolsOperations, DataFlowDebugSessionOperations, DataFlowOperations, DatasetOperations, IntegrationRuntimesOperations, KqlScriptOperations, KqlScriptsOperations, LibraryOperations, LinkedServiceOperations, MetastoreOperations, NotebookOperationResultOperations, NotebookOperations, PipelineOperations, PipelineRunOperations, SparkConfigurationOperations, SparkJobDefinitionOperations, SqlPoolsOperations, SqlScriptOperations, TriggerOperations, TriggerRunOperations, WorkspaceGitRepoManagementOperations, WorkspaceOperations
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any
from azure.core.credentials import TokenCredential
from azure.core.rest import HttpRequest, HttpResponse
[docs]class ArtifactsClient(object):
"""ArtifactsClient.
:ivar kql_scripts: KqlScriptsOperations operations
:vartype kql_scripts: azure.synapse.artifacts.operations.KqlScriptsOperations
:ivar kql_script: KqlScriptOperations operations
:vartype kql_script: azure.synapse.artifacts.operations.KqlScriptOperations
:ivar metastore: MetastoreOperations operations
:vartype metastore: azure.synapse.artifacts.operations.MetastoreOperations
:ivar spark_configuration: SparkConfigurationOperations operations
:vartype spark_configuration: azure.synapse.artifacts.operations.SparkConfigurationOperations
:ivar big_data_pools: BigDataPoolsOperations operations
:vartype big_data_pools: azure.synapse.artifacts.operations.BigDataPoolsOperations
:ivar data_flow: DataFlowOperations operations
:vartype data_flow: azure.synapse.artifacts.operations.DataFlowOperations
:ivar data_flow_debug_session: DataFlowDebugSessionOperations operations
:vartype data_flow_debug_session:
azure.synapse.artifacts.operations.DataFlowDebugSessionOperations
:ivar dataset: DatasetOperations operations
:vartype dataset: azure.synapse.artifacts.operations.DatasetOperations
:ivar workspace_git_repo_management: WorkspaceGitRepoManagementOperations operations
:vartype workspace_git_repo_management:
azure.synapse.artifacts.operations.WorkspaceGitRepoManagementOperations
:ivar integration_runtimes: IntegrationRuntimesOperations operations
:vartype integration_runtimes: azure.synapse.artifacts.operations.IntegrationRuntimesOperations
:ivar library: LibraryOperations operations
:vartype library: azure.synapse.artifacts.operations.LibraryOperations
:ivar linked_service: LinkedServiceOperations operations
:vartype linked_service: azure.synapse.artifacts.operations.LinkedServiceOperations
:ivar notebook: NotebookOperations operations
:vartype notebook: azure.synapse.artifacts.operations.NotebookOperations
:ivar notebook_operation_result: NotebookOperationResultOperations operations
:vartype notebook_operation_result:
azure.synapse.artifacts.operations.NotebookOperationResultOperations
:ivar pipeline: PipelineOperations operations
:vartype pipeline: azure.synapse.artifacts.operations.PipelineOperations
:ivar pipeline_run: PipelineRunOperations operations
:vartype pipeline_run: azure.synapse.artifacts.operations.PipelineRunOperations
:ivar spark_job_definition: SparkJobDefinitionOperations operations
:vartype spark_job_definition: azure.synapse.artifacts.operations.SparkJobDefinitionOperations
:ivar sql_pools: SqlPoolsOperations operations
:vartype sql_pools: azure.synapse.artifacts.operations.SqlPoolsOperations
:ivar sql_script: SqlScriptOperations operations
:vartype sql_script: azure.synapse.artifacts.operations.SqlScriptOperations
:ivar trigger: TriggerOperations operations
:vartype trigger: azure.synapse.artifacts.operations.TriggerOperations
:ivar trigger_run: TriggerRunOperations operations
:vartype trigger_run: azure.synapse.artifacts.operations.TriggerRunOperations
:ivar workspace: WorkspaceOperations operations
:vartype workspace: azure.synapse.artifacts.operations.WorkspaceOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param endpoint: The workspace development endpoint, for example
https://myworkspace.dev.azuresynapse.net.
:type endpoint: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
"""
def __init__(
self,
credential, # type: "TokenCredential"
endpoint, # type: str
**kwargs # type: Any
):
# type: (...) -> None
_base_url = '{endpoint}'
self._config = ArtifactsClientConfiguration(credential=credential, endpoint=endpoint, **kwargs)
self._client = PipelineClient(base_url=_base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.kql_scripts = KqlScriptsOperations(self._client, self._config, self._serialize, self._deserialize)
self.kql_script = KqlScriptOperations(self._client, self._config, self._serialize, self._deserialize)
self.metastore = MetastoreOperations(self._client, self._config, self._serialize, self._deserialize)
self.spark_configuration = SparkConfigurationOperations(self._client, self._config, self._serialize, self._deserialize)
self.big_data_pools = BigDataPoolsOperations(self._client, self._config, self._serialize, self._deserialize)
self.data_flow = DataFlowOperations(self._client, self._config, self._serialize, self._deserialize)
self.data_flow_debug_session = DataFlowDebugSessionOperations(self._client, self._config, self._serialize, self._deserialize)
self.dataset = DatasetOperations(self._client, self._config, self._serialize, self._deserialize)
self.workspace_git_repo_management = WorkspaceGitRepoManagementOperations(self._client, self._config, self._serialize, self._deserialize)
self.integration_runtimes = IntegrationRuntimesOperations(self._client, self._config, self._serialize, self._deserialize)
self.library = LibraryOperations(self._client, self._config, self._serialize, self._deserialize)
self.linked_service = LinkedServiceOperations(self._client, self._config, self._serialize, self._deserialize)
self.notebook = NotebookOperations(self._client, self._config, self._serialize, self._deserialize)
self.notebook_operation_result = NotebookOperationResultOperations(self._client, self._config, self._serialize, self._deserialize)
self.pipeline = PipelineOperations(self._client, self._config, self._serialize, self._deserialize)
self.pipeline_run = PipelineRunOperations(self._client, self._config, self._serialize, self._deserialize)
self.spark_job_definition = SparkJobDefinitionOperations(self._client, self._config, self._serialize, self._deserialize)
self.sql_pools = SqlPoolsOperations(self._client, self._config, self._serialize, self._deserialize)
self.sql_script = SqlScriptOperations(self._client, self._config, self._serialize, self._deserialize)
self.trigger = TriggerOperations(self._client, self._config, self._serialize, self._deserialize)
self.trigger_run = TriggerRunOperations(self._client, self._config, self._serialize, self._deserialize)
self.workspace = WorkspaceOperations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(
self,
request, # type: HttpRequest
**kwargs # type: Any
):
# type: (...) -> HttpResponse
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = client._send_request(request)
<HttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.HttpResponse
"""
request_copy = deepcopy(request)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments)
return self._client.send_request(request_copy, **kwargs)
[docs] def close(self):
# type: () -> None
self._client.close()
def __enter__(self):
# type: () -> ArtifactsClient
self._client.__enter__()
return self
def __exit__(self, *exc_details):
# type: (Any) -> None
self._client.__exit__(*exc_details)