Source code for azure.ai.language.conversations._conversation_analysis_client

# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------

from copy import deepcopy
from typing import TYPE_CHECKING

from azure.core import PipelineClient
from msrest import Deserializer, Serializer

from . import models
from ._configuration import ConversationAnalysisClientConfiguration
from .operations import ConversationAnalysisClientOperationsMixin

if TYPE_CHECKING:
    # pylint: disable=unused-import,ungrouped-imports
    from typing import Any, Optional

    from azure.core.credentials import AzureKeyCredential
    from azure.core.rest import HttpRequest, HttpResponse

[docs]class ConversationAnalysisClient(ConversationAnalysisClientOperationsMixin): """This API accepts a request and mediates among multiple language projects, such as LUIS Generally Available, Question Answering, Conversation, and then calls the best candidate service to handle the request. At last, it returns a response with the candidate service's response as a payload. In some cases, this API needs to forward requests and responses between the caller and an upstream service. :param endpoint: Supported Cognitive Services endpoint (e.g., https://:code:`<resource-name>`.api.cognitiveservices.azure.com). :type endpoint: str :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.AzureKeyCredential """ def __init__( self, endpoint, # type: str credential, # type: AzureKeyCredential **kwargs # type: Any ): # type: (...) -> None _endpoint = '{Endpoint}/language' self._config = ConversationAnalysisClientConfiguration(endpoint, credential, **kwargs) self._client = PipelineClient(base_url=_endpoint, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False
[docs] def send_request( self, request, # type: HttpRequest **kwargs # type: Any ): # type: (...) -> HttpResponse """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest >>> request = HttpRequest("GET", "https://www.example.org/") <HttpRequest [GET], url: 'https://www.example.org/'> >>> response = client.send_request(request) <HttpResponse: 200 OK> For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart :param request: The network request you want to make. Required. :type request: ~azure.core.rest.HttpRequest :keyword bool stream: Whether the response payload will be streamed. Defaults to False. :return: The response of your network call. Does not do error handling on your response. :rtype: ~azure.core.rest.HttpResponse """ request_copy = deepcopy(request) path_format_arguments = { "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), } request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) return self._client.send_request(request_copy, **kwargs)
[docs] def close(self): # type: () -> None self._client.close()
def __enter__(self): # type: () -> ConversationAnalysisClient self._client.__enter__() return self def __exit__(self, *exc_details): # type: (Any) -> None self._client.__exit__(*exc_details)