Source code for azure.ai.language.questionanswering.aio._question_answering_client

# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------

from copy import deepcopy
from typing import Any, Awaitable

from azure.core import AsyncPipelineClient
from azure.core.credentials import AzureKeyCredential
from azure.core.rest import AsyncHttpResponse, HttpRequest
from msrest import Deserializer, Serializer

from .. import models
from ._configuration import QuestionAnsweringClientConfiguration
from .operations import QuestionAnsweringClientOperationsMixin


[docs]class QuestionAnsweringClient(QuestionAnsweringClientOperationsMixin): """The language service API is a suite of natural language processing (NLP) skills built with best-in-class Microsoft machine learning algorithms. The API can be used to analyze unstructured text for tasks such as sentiment analysis, key phrase extraction, language detection and question answering. Further documentation can be found in https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/overview. :param endpoint: Supported Cognitive Services endpoint (e.g., https://<resource-name>.api.cognitiveservices.azure.com). :type endpoint: str :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.AzureKeyCredential """ def __init__(self, endpoint: str, credential: AzureKeyCredential, **kwargs: Any) -> None: base_url = "{Endpoint}/language" self._config = QuestionAnsweringClientConfiguration(credential, endpoint, **kwargs) self._client = AsyncPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False
[docs] def send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]: """Runs the network request through the client's chained policies. For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart :param request: The network request you want to make. Required. :type request: ~azure.core.rest.HttpRequest :keyword bool stream: Whether the response payload will be streamed. Defaults to False. :return: The response of your network call. Does not do error handling on your response. :rtype: ~azure.core.rest.AsyncHttpResponse """ request_copy = deepcopy(request) path_format_arguments = { "Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) return self._client.send_request(request_copy, **kwargs)
[docs] async def close(self) -> None: await self._client.close()
async def __aenter__(self) -> "QuestionAnsweringClient": await self._client.__aenter__() return self async def __aexit__(self, *exc_details) -> None: await self._client.__aexit__(*exc_details)