Source code for azure.synapse.artifacts.aio.operations._data_flow_operations

# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast

from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
    ClientAuthenticationError,
    HttpResponseError,
    ResourceExistsError,
    ResourceNotFoundError,
    ResourceNotModifiedError,
    map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.polling.async_base_polling import AsyncLROBasePolling
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict

from ... import models as _models
from ..._vendor import _convert_request
from ...operations._data_flow_operations import (
    build_create_or_update_data_flow_request,
    build_delete_data_flow_request,
    build_get_data_flow_request,
    build_get_data_flows_by_workspace_request,
    build_rename_data_flow_request,
)

T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]


[docs]class DataFlowOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through :class:`~azure.synapse.artifacts.aio.ArtifactsClient`'s :attr:`data_flow` attribute. """ models = _models def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client = input_args.pop(0) if input_args else kwargs.pop("client") self._config = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") async def _create_or_update_data_flow_initial( self, data_flow_name: str, properties: _models.DataFlow, if_match: Optional[str] = None, **kwargs: Any ) -> Optional[_models.DataFlowResource]: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-12-01")) # type: str content_type = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.DataFlowResource]] _data_flow = _models.DataFlowResource(properties=properties) _json = self._serialize.body(_data_flow, "DataFlowResource") request = build_create_or_update_data_flow_request( data_flow_name=data_flow_name, if_match=if_match, api_version=api_version, content_type=content_type, json=_json, template_url=self._create_or_update_data_flow_initial.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) path_format_arguments = { "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) deserialized = None if response.status_code == 200: deserialized = self._deserialize("DataFlowResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _create_or_update_data_flow_initial.metadata = {"url": "/dataflows/{dataFlowName}"} # type: ignore
[docs] @distributed_trace_async async def begin_create_or_update_data_flow( self, data_flow_name: str, properties: _models.DataFlow, if_match: Optional[str] = None, **kwargs: Any ) -> AsyncLROPoller[_models.DataFlowResource]: """Creates or updates a data flow. :param data_flow_name: The data flow name. Required. :type data_flow_name: str :param properties: Data flow properties. Required. :type properties: ~azure.synapse.artifacts.models.DataFlow :param if_match: ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. Default value is None. :type if_match: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either DataFlowResource or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.synapse.artifacts.models.DataFlowResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-12-01")) # type: str content_type = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[_models.DataFlowResource] polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = await self._create_or_update_data_flow_initial( # type: ignore data_flow_name=data_flow_name, properties=properties, if_match=if_match, api_version=api_version, content_type=content_type, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): deserialized = self._deserialize("DataFlowResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized path_format_arguments = { "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } if polling is True: polling_method = cast( AsyncPollingMethod, AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), ) # type: AsyncPollingMethod elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update_data_flow.metadata = {"url": "/dataflows/{dataFlowName}"} # type: ignore
[docs] @distributed_trace_async async def get_data_flow( self, data_flow_name: str, if_none_match: Optional[str] = None, **kwargs: Any ) -> _models.DataFlowResource: """Gets a data flow. :param data_flow_name: The data flow name. Required. :type data_flow_name: str :param if_none_match: ETag of the data flow entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. Default value is None. :type if_none_match: str :keyword callable cls: A custom type or function that will be passed the direct response :return: DataFlowResource or the result of cls(response) :rtype: ~azure.synapse.artifacts.models.DataFlowResource :raises ~azure.core.exceptions.HttpResponseError: """ error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[_models.DataFlowResource] request = build_get_data_flow_request( data_flow_name=data_flow_name, if_none_match=if_none_match, api_version=api_version, template_url=self.get_data_flow.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) path_format_arguments = { "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) deserialized = self._deserialize("DataFlowResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
get_data_flow.metadata = {"url": "/dataflows/{dataFlowName}"} # type: ignore async def _delete_data_flow_initial( # pylint: disable=inconsistent-return-statements self, data_flow_name: str, **kwargs: Any ) -> None: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] request = build_delete_data_flow_request( data_flow_name=data_flow_name, api_version=api_version, template_url=self._delete_data_flow_initial.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) path_format_arguments = { "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if cls: return cls(pipeline_response, None, {}) _delete_data_flow_initial.metadata = {"url": "/dataflows/{dataFlowName}"} # type: ignore
[docs] @distributed_trace_async async def begin_delete_data_flow(self, data_flow_name: str, **kwargs: Any) -> AsyncLROPoller[None]: """Deletes a data flow. :param data_flow_name: The data flow name. Required. :type data_flow_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = await self._delete_data_flow_initial( # type: ignore data_flow_name=data_flow_name, api_version=api_version, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) path_format_arguments = { "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } if polling is True: polling_method = cast( AsyncPollingMethod, AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), ) # type: AsyncPollingMethod elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete_data_flow.metadata = {"url": "/dataflows/{dataFlowName}"} # type: ignore async def _rename_data_flow_initial( # pylint: disable=inconsistent-return-statements self, data_flow_name: str, new_name: Optional[str] = None, **kwargs: Any ) -> None: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-12-01")) # type: str content_type = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] _request = _models.ArtifactRenameRequest(new_name=new_name) _json = self._serialize.body(_request, "ArtifactRenameRequest") request = build_rename_data_flow_request( data_flow_name=data_flow_name, api_version=api_version, content_type=content_type, json=_json, template_url=self._rename_data_flow_initial.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) path_format_arguments = { "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if cls: return cls(pipeline_response, None, {}) _rename_data_flow_initial.metadata = {"url": "/dataflows/{dataFlowName}/rename"} # type: ignore
[docs] @distributed_trace_async async def begin_rename_data_flow( self, data_flow_name: str, new_name: Optional[str] = None, **kwargs: Any ) -> AsyncLROPoller[None]: """Renames a dataflow. :param data_flow_name: The data flow name. Required. :type data_flow_name: str :param new_name: New name of the artifact. Default value is None. :type new_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncLROBasePolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-12-01")) # type: str content_type = kwargs.pop("content_type", _headers.pop("Content-Type", "application/json")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = await self._rename_data_flow_initial( # type: ignore data_flow_name=data_flow_name, new_name=new_name, api_version=api_version, content_type=content_type, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) path_format_arguments = { "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } if polling is True: polling_method = cast( AsyncPollingMethod, AsyncLROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs), ) # type: AsyncPollingMethod elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_rename_data_flow.metadata = {"url": "/dataflows/{dataFlowName}/rename"} # type: ignore
[docs] @distributed_trace def get_data_flows_by_workspace(self, **kwargs: Any) -> AsyncIterable["_models.DataFlowResource"]: """Lists data flows. :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DataFlowResource or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.synapse.artifacts.models.DataFlowResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2020-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[_models.DataFlowListResponse] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): if not next_link: request = build_get_data_flows_by_workspace_request( api_version=api_version, template_url=self.get_data_flows_by_workspace.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) path_format_arguments = { "endpoint": self._serialize.url( "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore else: request = HttpRequest("GET", next_link) request = _convert_request(request) path_format_arguments = { "endpoint": self._serialize.url( "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } request.url = self._client.format_url(request.url, **path_format_arguments) # type: ignore request.method = "GET" return request async def extract_data(pipeline_response): deserialized = self._deserialize("DataFlowListResponse", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) return pipeline_response return AsyncItemPaged(get_next, extract_data)
get_data_flows_by_workspace.metadata = {"url": "/dataflows"} # type: ignore